Skip to content

Commit

Permalink
Merge f0e6006 into d5a7252
Browse files Browse the repository at this point in the history
  • Loading branch information
yolile committed Jun 23, 2020
2 parents d5a7252 + f0e6006 commit d154a75
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 1 deletion.
16 changes: 16 additions & 0 deletions kingfisher_scrapy/pipelines.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,28 @@
# https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# https://docs.scrapy.org/en/latest/topics/signals.html#item-signals
from kingfisher_scrapy.items import File, FileItem


class Validate:
def __init__(self):
self.files = set()
self.file_items = set()

def process_item(self, item, spider):
if hasattr(item, 'validate'):
# We call this in the item pipeline to guarantee that all items are validated. However, its backtrace isn't
# as helpful for debugging, so we could also call it in ``BaseSpider`` if this becomes an issue.
item.validate()

if isinstance(item, FileItem):
key = (item['file_name'], item['number'])
if key in self.file_items:
spider.logger.warning('Duplicate FileItem: {!r}'.format(key))
self.file_items.add(key)
elif isinstance(item, File):
key = item['file_name']
if key in self.files:
spider.logger.warning('Duplicate File: {!r}'.format(key))
self.files.add(key)

return item
44 changes: 43 additions & 1 deletion tests/test_validate.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import pytest

from kingfisher_scrapy.exceptions import MissingRequiredFieldError
from kingfisher_scrapy.items import File
from kingfisher_scrapy.items import File, FileItem
from kingfisher_scrapy.pipelines import Validate
from tests import spider_with_crawler


def test_process_item():
Expand All @@ -23,3 +24,44 @@ def test_process_item_error():

with pytest.raises(MissingRequiredFieldError):
pipeline.process_item(item, None)


def test_duplicate_file(caplog):
pipeline = Validate()
spider = spider_with_crawler()
item = File({
'file_name': 'test1',
'data': '',
'data_type': '',
'url': '',
})

pipeline.process_item(item, spider)
pipeline.process_item(item, spider)
item2 = item.copy()
item2['file_name'] = 'file2'
pipeline.process_item(item2, spider)

assert len(caplog.messages) == 1
assert caplog.messages[0] == "Duplicate File: 'test1'"


def test_duplicate_file_item(caplog):
pipeline = Validate()
spider = spider_with_crawler()
item = FileItem({
'file_name': 'test1',
'data': '',
'data_type': '',
'url': '',
'number': 1
})

pipeline.process_item(item, spider)
pipeline.process_item(item, spider)
item2 = item.copy()
item2['number'] = 2
pipeline.process_item(item2, spider)

assert len(caplog.messages) == 1
assert caplog.messages[0] == "Duplicate FileItem: ('test1', 1)"

0 comments on commit d154a75

Please sign in to comment.