Skip to content

Commit

Permalink
Fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Laerte committed Feb 28, 2024
1 parent 63acd07 commit 26a16f2
Show file tree
Hide file tree
Showing 15 changed files with 117 additions and 114 deletions.
10 changes: 5 additions & 5 deletions tests/test_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ def test_randomized_delay(self):

@defer.inlineCallbacks
def _test_delay(self, total, delay, randomize=False):
crawl_kwargs = dict(
maxlatency=delay * 2,
mockserver=self.mockserver,
total=total,
)
crawl_kwargs = {
"maxlatency": delay * 2,
"mockserver": self.mockserver,
"total": total,
}
tolerance = 1 - (0.6 if randomize else 0.2)

settings = {"DOWNLOAD_DELAY": delay, "RANDOMIZE_DOWNLOAD_DELAY": randomize}
Expand Down
2 changes: 1 addition & 1 deletion tests/test_downloadermiddleware_cookies.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def test_local_domain(self):

@pytest.mark.xfail(reason="Cookie header is not currently being processed")
def test_keep_cookie_from_default_request_headers_middleware(self):
DEFAULT_REQUEST_HEADERS = dict(Cookie="default=value; asdf=qwerty")
DEFAULT_REQUEST_HEADERS = {"Cookie": "default=value; asdf=qwerty"}
mw_default_headers = DefaultHeadersMiddleware(DEFAULT_REQUEST_HEADERS.items())
# overwrite with values from 'cookies' request argument
req1 = Request("http://example.org", cookies={"default": "something"})
Expand Down
4 changes: 2 additions & 2 deletions tests/test_downloadermiddleware_httpauth.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def test_auth_subdomain(self):
self.assertEqual(req.headers["Authorization"], basic_auth_header("foo", "bar"))

def test_auth_already_set(self):
req = Request("http://example.com/", headers=dict(Authorization="Digest 123"))
req = Request("http://example.com/", headers={"Authorization": "Digest 123"})
assert self.mw.process_request(req, self.spider) is None
self.assertEqual(req.headers["Authorization"], b"Digest 123")

Expand All @@ -79,6 +79,6 @@ def test_auth(self):
self.assertEqual(req.headers["Authorization"], basic_auth_header("foo", "bar"))

def test_auth_already_set(self):
req = Request("http://example.com/", headers=dict(Authorization="Digest 123"))
req = Request("http://example.com/", headers={"Authorization": "Digest 123"})
assert self.mw.process_request(req, self.spider) is None
self.assertEqual(req.headers["Authorization"], b"Digest 123")
24 changes: 12 additions & 12 deletions tests/test_exporters.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def test_invalid_option(self):

def test_nested_item(self):
i1 = self.item_class(name="Joseph", age="22")
i2 = dict(name="Maria", age=i1)
i2 = {"name": "Maria", "age": i1}
i3 = self.item_class(name="Jesus", age=i2)
ie = self._get_exporter()
exported = ie.export_item(i3)
Expand Down Expand Up @@ -185,7 +185,7 @@ def test_export_list(self):

def test_export_item_dict_list(self):
i1 = self.item_class(name="Joseph", age="22")
i2 = dict(name="Maria", age=[i1])
i2 = {"name": "Maria", "age": [i1]}
i3 = self.item_class(name="Jesus", age=[i2])
ie = self._get_exporter()
exported = ie.export_item(i3)
Expand Down Expand Up @@ -373,7 +373,7 @@ class TestItem2(Item):

def test_join_multivalue_not_strings(self):
self.assertExportResult(
item=dict(name="John", friends=[4, 8]),
item={"name": "John", "friends": [4, 8]},
include_headers_line=False,
expected='"[4, 8]",John\r\n',
)
Expand All @@ -388,14 +388,14 @@ def test_nonstring_types_item(self):
def test_errors_default(self):
with self.assertRaises(UnicodeEncodeError):
self.assertExportResult(
item=dict(text="W\u0275\u200Brd"),
item={"text": "W\u0275\u200Brd"},
expected=None,
encoding="windows-1251",
)

def test_errors_xmlcharrefreplace(self):
self.assertExportResult(
item=dict(text="W\u0275\u200Brd"),
item={"text": "W\u0275\u200Brd"},
include_headers_line=False,
expected="Wɵ​rd\r\n",
encoding="windows-1251",
Expand Down Expand Up @@ -455,8 +455,8 @@ def test_multivalued_fields(self):
)

def test_nested_item(self):
i1 = dict(name="foo\xa3hoo", age="22")
i2 = dict(name="bar", age=i1)
i1 = {"name": "foo\xa3hoo", "age": "22"}
i2 = {"name": "bar", "age": i1}
i3 = self.item_class(name="buz", age=i2)

self.assertExportResult(
Expand All @@ -478,8 +478,8 @@ def test_nested_item(self):
)

def test_nested_list_item(self):
i1 = dict(name="foo")
i2 = dict(name="bar", v2={"egg": ["spam"]})
i1 = {"name": "foo"}
i2 = {"name": "bar", "v2": {"egg": ["spam"]}}
i3 = self.item_class(name="buz", age=[i1, i2])

self.assertExportResult(
Expand Down Expand Up @@ -534,7 +534,7 @@ def _check_output(self):

def test_nested_item(self):
i1 = self.item_class(name="Joseph", age="22")
i2 = dict(name="Maria", age=i1)
i2 = {"name": "Maria", "age": i1}
i3 = self.item_class(name="Jesus", age=i2)
self.ie.start_exporting()
self.ie.export_item(i3)
Expand Down Expand Up @@ -622,9 +622,9 @@ def test_nested_item(self):
self.assertEqual(exported, [expected])

def test_nested_dict_item(self):
i1 = dict(name="Joseph\xa3", age="22")
i1 = {"name": "Joseph\xa3", "age": "22"}
i2 = self.item_class(name="Maria", age=i1)
i3 = dict(name="Jesus", age=i2)
i3 = {"name": "Jesus", "age": i2}
self.ie.start_exporting()
self.ie.export_item(i3)
self.ie.finish_exporting()
Expand Down
38 changes: 19 additions & 19 deletions tests/test_linkextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_extract_all_links(self):
page4_url = "http://example.com/page%204.html"

self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -55,7 +55,7 @@ def test_extract_all_links(self):
def test_extract_filter_allow(self):
lx = self.extractor_cls(allow=("sample",))
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -70,7 +70,7 @@ def test_extract_filter_allow(self):
def test_extract_filter_allow_with_duplicates(self):
lx = self.extractor_cls(allow=("sample",), unique=False)
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -93,7 +93,7 @@ def test_extract_filter_allow_with_duplicates(self):
def test_extract_filter_allow_with_duplicates_canonicalize(self):
lx = self.extractor_cls(allow=("sample",), unique=False, canonicalize=True)
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -116,7 +116,7 @@ def test_extract_filter_allow_with_duplicates_canonicalize(self):
def test_extract_filter_allow_no_duplicates_canonicalize(self):
lx = self.extractor_cls(allow=("sample",), unique=True, canonicalize=True)
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -127,7 +127,7 @@ def test_extract_filter_allow_no_duplicates_canonicalize(self):
def test_extract_filter_allow_and_deny(self):
lx = self.extractor_cls(allow=("sample",), deny=("3",))
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -137,7 +137,7 @@ def test_extract_filter_allow_and_deny(self):
def test_extract_filter_allowed_domains(self):
lx = self.extractor_cls(allow_domains=("google.com",))
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://www.google.com/something", text=""),
],
Expand All @@ -148,7 +148,7 @@ def test_extraction_using_single_values(self):

lx = self.extractor_cls(allow="sample")
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -162,7 +162,7 @@ def test_extraction_using_single_values(self):

lx = self.extractor_cls(allow="sample", deny="3")
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand All @@ -171,15 +171,15 @@ def test_extraction_using_single_values(self):

lx = self.extractor_cls(allow_domains="google.com")
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://www.google.com/something", text=""),
],
)

lx = self.extractor_cls(deny_domains="example.com")
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://www.google.com/something", text=""),
],
Expand Down Expand Up @@ -265,7 +265,7 @@ def test_matches(self):
def test_restrict_xpaths(self):
lx = self.extractor_cls(restrict_xpaths=('//div[@id="subwrapper"]',))
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand Down Expand Up @@ -337,7 +337,7 @@ def test_restrict_css_and_restrict_xpaths_together(self):
restrict_css=("#subwrapper + a",),
)
self.assertEqual(
[link for link in lx.extract_links(self.response)],
list(lx.extract_links(self.response)),
[
Link(url="http://example.com/sample1.html", text=""),
Link(url="http://example.com/sample2.html", text="sample 2"),
Expand Down Expand Up @@ -705,7 +705,7 @@ def test_link_wrong_href(self):
response = HtmlResponse("http://example.org/index.html", body=html)
lx = self.extractor_cls()
self.assertEqual(
[link for link in lx.extract_links(response)],
list(lx.extract_links(response)),
[
Link(
url="http://example.org/item1.html",
Expand Down Expand Up @@ -758,7 +758,7 @@ def test_link_wrong_href(self):
response = HtmlResponse("http://example.org/index.html", body=html)
lx = self.extractor_cls()
self.assertEqual(
[link for link in lx.extract_links(response)],
list(lx.extract_links(response)),
[
Link(
url="http://example.org/item1.html", text="Item 1", nofollow=False
Expand All @@ -779,7 +779,7 @@ def test_link_restrict_text(self):
# Simple text inclusion test
lx = self.extractor_cls(restrict_text="dog")
self.assertEqual(
[link for link in lx.extract_links(response)],
list(lx.extract_links(response)),
[
Link(
url="http://example.org/item2.html",
Expand All @@ -791,7 +791,7 @@ def test_link_restrict_text(self):
# Unique regex test
lx = self.extractor_cls(restrict_text=r"of.*dog")
self.assertEqual(
[link for link in lx.extract_links(response)],
list(lx.extract_links(response)),
[
Link(
url="http://example.org/item2.html",
Expand All @@ -803,7 +803,7 @@ def test_link_restrict_text(self):
# Multiple regex test
lx = self.extractor_cls(restrict_text=[r"of.*dog", r"of.*cat"])
self.assertEqual(
[link for link in lx.extract_links(response)],
list(lx.extract_links(response)),
[
Link(
url="http://example.org/item1.html",
Expand Down Expand Up @@ -834,7 +834,7 @@ def test_skip_bad_links(self):
response = HtmlResponse("http://example.org/index.html", body=html)
lx = self.extractor_cls()
self.assertEqual(
[link for link in lx.extract_links(response)],
list(lx.extract_links(response)),
[
Link(
url="http://example.org/item2.html",
Expand Down
20 changes: 10 additions & 10 deletions tests/test_loader_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,37 +565,37 @@ class NoInputReprocessingFromDictTest(unittest.TestCase):
"""

def test_avoid_reprocessing_with_initial_values_single(self):
il = NoInputReprocessingDictLoader(item=dict(title="foo"))
il = NoInputReprocessingDictLoader(item={"title": "foo"})
il_loaded = il.load_item()
self.assertEqual(il_loaded, dict(title="foo"))
self.assertEqual(il_loaded, {"title": "foo"})
self.assertEqual(
NoInputReprocessingDictLoader(item=il_loaded).load_item(), dict(title="foo")
NoInputReprocessingDictLoader(item=il_loaded).load_item(), {"title": "foo"}
)

def test_avoid_reprocessing_with_initial_values_list(self):
il = NoInputReprocessingDictLoader(item=dict(title=["foo", "bar"]))
il = NoInputReprocessingDictLoader(item={"title": ["foo", "bar"]})
il_loaded = il.load_item()
self.assertEqual(il_loaded, dict(title="foo"))
self.assertEqual(il_loaded, {"title": "foo"})
self.assertEqual(
NoInputReprocessingDictLoader(item=il_loaded).load_item(), dict(title="foo")
NoInputReprocessingDictLoader(item=il_loaded).load_item(), {"title": "foo"}
)

def test_avoid_reprocessing_without_initial_values_single(self):
il = NoInputReprocessingDictLoader()
il.add_value("title", "foo")
il_loaded = il.load_item()
self.assertEqual(il_loaded, dict(title="FOO"))
self.assertEqual(il_loaded, {"title": "FOO"})
self.assertEqual(
NoInputReprocessingDictLoader(item=il_loaded).load_item(), dict(title="FOO")
NoInputReprocessingDictLoader(item=il_loaded).load_item(), {"title": "FOO"}
)

def test_avoid_reprocessing_without_initial_values_list(self):
il = NoInputReprocessingDictLoader()
il.add_value("title", ["foo", "bar"])
il_loaded = il.load_item()
self.assertEqual(il_loaded, dict(title="FOO"))
self.assertEqual(il_loaded, {"title": "FOO"})
self.assertEqual(
NoInputReprocessingDictLoader(item=il_loaded).load_item(), dict(title="FOO")
NoInputReprocessingDictLoader(item=il_loaded).load_item(), {"title": "FOO"}
)


Expand Down
2 changes: 1 addition & 1 deletion tests/test_mail.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def test_send_attach(self):
self.assertEqual(attach.get_payload(decode=True), b"content")

def _catch_mail_sent(self, **kwargs):
self.catched_msg = dict(**kwargs)
self.catched_msg = {**kwargs}

def test_send_utf8(self):
subject = "sübjèçt"
Expand Down
2 changes: 1 addition & 1 deletion tests/test_pipeline_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def _assert_files_download_failure(self, crawler, items, code, logs):
self.assertEqual(logs.count(file_dl_failure), 3)

# check that no files were written to the media store
self.assertEqual([x for x in self.tmpmediastore.iterdir()], [])
self.assertEqual(list(self.tmpmediastore.iterdir()), [])

@defer.inlineCallbacks
def test_download_media(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_pipeline_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def file_path(self, request, response=None, info=None, item=None):
file_path = CustomFilesPipeline.from_settings(
Settings({"FILES_STORE": self.tempdir})
).file_path
item = dict(path="path-to-store-file")
item = {"path": "path-to-store-file"}
request = Request("http://example.com")
self.assertEqual(file_path(request, item=item), "full/path-to-store-file")

Expand Down
Loading

0 comments on commit 26a16f2

Please sign in to comment.