diff --git a/kingfisher_scrapy/extensions/files_store.py b/kingfisher_scrapy/extensions/files_store.py index b271d6a0..27f142a1 100644 --- a/kingfisher_scrapy/extensions/files_store.py +++ b/kingfisher_scrapy/extensions/files_store.py @@ -51,10 +51,10 @@ def spider_closed(self, spider, reason): path = os.path.join(self.directory, self.relative_crawl_directory(spider)) - if not os.path.exists(path): - message = 'SOMETHING FAILED AND NO DATA WAS DOWNLOADED. CHECK THE LOGS FOR DETAILS' - else: + if os.path.exists(path): message = f'The data is available at: {path}' + else: + message = 'Something went wrong. No data was downloaded.' message_length = math.ceil(len(message) / 2) * 2 title_length = message_length // 2 - 8 diff --git a/tests/extensions/test_files_store.py b/tests/extensions/test_files_store.py index adc6fc4a..7f9a3a8c 100644 --- a/tests/extensions/test_files_store.py +++ b/tests/extensions/test_files_store.py @@ -64,6 +64,7 @@ def test_spider_closed_even(caplog): extension.item_scraped(item, spider) with caplog.at_level(logging.INFO): extension.spider_closed(spider, 'finished') + assert [record.message for record in caplog.records] == [ '+------------------ DATA DIRECTORY ------------------+', '| |', @@ -73,27 +74,29 @@ def test_spider_closed_even(caplog): ] -def test_spider_closed_no_data(caplog): - spider = spider_with_files_store('no_data') +def test_spider_closed_no_data(tmpdir, caplog): + spider = spider_with_files_store(tmpdir) extension = FilesStore.from_crawler(spider.crawler) with caplog.at_level(logging.INFO): extension.spider_closed(spider, 'finished') + print(caplog.records) assert [record.message for record in caplog.records] == [ - '+----------------------------- DATA DIRECTORY -----------------------------+', - '| |', - '| SOMETHING FAILED AND NO DATA WAS DOWNLOADED. CHECK THE LOGS FOR DETAILS |', - '| |', - '+--------------------------------------------------------------------------+', + '+---------------- DATA DIRECTORY ----------------+', + '| |', + '| Something went wrong. No data was downloaded. |', + '| |', + '+------------------------------------------------+', ] -def test_spider_closed_fails(caplog): - spider = spider_with_files_store('failed') +def test_spider_closed_failed(tmpdir, caplog): + spider = spider_with_files_store(tmpdir) extension = FilesStore.from_crawler(spider.crawler) with caplog.at_level(logging.INFO): extension.spider_closed(spider, 'failed') + assert not caplog.records