Skip to content

Commit

Permalink
Actualiza los tests con el cálculo del nuevo indicador
Browse files Browse the repository at this point in the history
  • Loading branch information
lrromero committed Sep 24, 2019
1 parent 738fe84 commit 5ea72d6
Show file tree
Hide file tree
Showing 4 changed files with 140 additions and 36 deletions.
1 change: 1 addition & 0 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@ pycallgraph
setuptools>=38.6
wheel>=0.31
vcrpy
requests_mock
12 changes: 9 additions & 3 deletions tests/test_catalog_readme.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,19 @@ def setUp(cls):
cls.catalog = cls.get_sample("several_datasets_for_harvest.json")

@my_vcr.use_cassette()
def test_generate_readme(self):
@mock.patch('pydatajson.status_indicators_generator.is_working_url',
return_value=True)
def test_generate_readme(self, _mock_check):
with io.open(os.path.join(self.RESULTS_DIR, "catalog_readme.md"), 'r',
encoding='utf-8') as expected_readme_file:
expected_readme = expected_readme_file.read()
readme = generate_readme(self.catalog)
assert_equal(expected_readme, readme)

@my_vcr.use_cassette()
def test_readme_file_write(self):
@mock.patch('pydatajson.status_indicators_generator.is_working_url',
return_value=True)
def test_readme_file_write(self, _mock_check):
actual_filename = os.path.join(self.TEMP_DIR, "catalog_readme.md")
expected_filename = os.path.join(self.RESULTS_DIR, "catalog_readme.md")
generate_readme(self.catalog, export_path=actual_filename)
Expand All @@ -61,8 +65,10 @@ def test_readme_file_write(self):
assert_true(comparison)

@my_vcr.use_cassette()
@mock.patch('pydatajson.status_indicators_generator.is_working_url',
return_value=True)
@mock.patch('pydatajson.indicators._federation_indicators')
def test_readme_null_indicators(self, mock_indicators):
def test_readme_null_indicators(self, mock_indicators, _mock_check):
mock_indicators.return_value = {
'datasets_federados_cant': None,
'datasets_federados_pct': None,
Expand Down
31 changes: 30 additions & 1 deletion tests/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,11 @@

import nose
import openpyxl as pyxl
import requests_mock

from pydatajson.helpers import fields_to_uppercase
from requests.exceptions import ConnectionError, Timeout

from pydatajson.helpers import fields_to_uppercase, is_working_url
from .context import pydatajson


Expand Down Expand Up @@ -264,5 +267,31 @@ def test_fields_to_uppercase_modifies_all_lowercase_fields(self):

self.assertEqual(fields_to_uppercase(fields), expected)

@requests_mock.Mocker()
def test_validate_valid_url(self, req_mock):
req_mock.head('http://test.com/')
self.assertTrue(is_working_url('http://test.com/'))

@requests_mock.Mocker()
def test_validate_invalid_url(self, req_mock):
req_mock.head('http://test.com/', status_code=400)
self.assertFalse(is_working_url('http://test.com/'))

@requests_mock.Mocker()
def test_validate_url_with_exception(self, req_mock):
req_mock.head('http://test.com/', exc=ConnectionError)
self.assertFalse(is_working_url('http://test.com/'))

@requests_mock.Mocker()
def validate_url_with_timeout(self, req_mock):
req_mock.head('http://test.com/', exc=Timeout)
self.assertFalse(is_working_url('http://test.com/'))

def test_validate_malformed_values(self):
self.assertFalse(is_working_url('malformed_value'))
self.assertFalse(is_working_url(''))
self.assertFalse(is_working_url(None))


if __name__ == '__main__':
nose.run(defaultTest=__name__)

0 comments on commit 5ea72d6

Please sign in to comment.