Skip to content

Commit

Permalink
Update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
aguilerapy committed Nov 4, 2020
1 parent ceef349 commit f209d93
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 6 deletions.
1 change: 0 additions & 1 deletion tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ def response_fixture(meta=None, **kwargs):
meta = {'file_name': 'test'}
request = Request('http://example.com', meta=meta)
kwargs.setdefault('status', 200)
kwargs.setdefault('body', b'{"links": {"next": "http://example.com/next"}}')
return TextResponse(request.url, encoding='utf-8', request=request, **kwargs)


Expand Down
22 changes: 17 additions & 5 deletions tests/test_links_spider.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from datetime import date

import pytest
from scrapy.http import Request
from scrapy.utils.trackref import NoneType

from kingfisher_scrapy.base_spider import LinksSpider
from kingfisher_scrapy.exceptions import MissingNextLinkError
Expand All @@ -11,17 +14,26 @@ def test_next_link():
spider = spider_with_crawler(spider_class=LinksSpider)
spider.next_page_formatter = lambda url: 'next.json'

request = spider.next_link(response_fixture())
request = spider.next_link(response_fixture(body=b'{"links": {"next": "http://example.com/next"}}'))

assert type(request) is Request
assert request.url == 'http://example.com/next'
assert request.meta == {'file_name': 'next.json'}


def test_next_link_condition():
spider = spider_with_crawler(spider_class=LinksSpider)
spider.from_date = spider.until_date = date(2002, 12, 31)

request = spider.next_link(response_fixture(body='{"links": {"next": ""}}'))

assert type(request) is NoneType


def test_parse_404():
spider = spider_with_crawler(spider_class=LinksSpider)

generator = spider.parse(response_fixture(status=404))
generator = spider.parse(response_fixture(status=404, body=b'{"links": {"next": "http://example.com/next"}}'))
item = next(generator)

assert type(item) is FileError
Expand All @@ -39,16 +51,17 @@ def test_parse_200():
spider = spider_with_crawler(spider_class=LinksSpider)
spider.data_type = 'release_package'
spider.next_page_formatter = lambda url: 'next.json'
body = b'{"links": {"next": "http://example.com/next"}}'

generator = spider.parse(response_fixture())
generator = spider.parse(response_fixture(body=body))
item = next(generator)
request = next(generator)

assert type(item) is File
assert item == {
'file_name': 'test',
'url': 'http://example.com',
'data': b'{"links": {"next": "http://example.com/next"}}',
'data': body,
'data_type': 'release_package',
'encoding': 'utf-8',
'post_to_api': True,
Expand All @@ -64,7 +77,6 @@ def test_parse_200():

def test_next_link_not_found():
spider = spider_with_crawler(spider_class=LinksSpider)
spider.next_page_formatter = lambda url: 'next.json'
body = '{"links": {"next": ""}}'

with pytest.raises(MissingNextLinkError) as e:
Expand Down

0 comments on commit f209d93

Please sign in to comment.