Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from setuptools import setup, find_packages


version = '0.6.0'
version = '0.6.1'


def read(f):
Expand Down
22 changes: 16 additions & 6 deletions tests/test_check_url.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ def test_skip_urls_with_inner_params(self):
actual = list(self.extractor.extract_urls('aaa http://www.{{param}}.com aaa'))
self.assertEqual([], actual)

def test_skip_urls_with_inner_variables(self):
actual = list(self.extractor.extract_urls('aaa http://www.{param}.com aaa'))
self.assertEqual([], actual)


class TestTxt(AsyncTestCase):
def setUp(self):
Expand Down Expand Up @@ -83,11 +87,17 @@ def test_skip_parameterized_urls_from_start(self, mock_get):

self.assertFalse(mock_get.called)

@patch('aiohttp.request')
def test_skip_urls_with_variables(self, mock_get):
self._check(mock_get, 'aaa http://domain.com/{ticket.url}, aaa', 200)

self.assertFalse(mock_get.called)

@patch('aiohttp.request')
def test_include_params_in_the_url(self, mock_get):
self._check(mock_get, 'aaa http://domain.com/hello?id=123 aaa', 200)

mock_get.assert_called_with('get', 'http://domain.com/hello?id=123', headers=self.headers)
mock_get.assert_called_with('get', 'http://domain.com/hello?id=123', allow_redirects=True, headers=self.headers)

@patch('aiohttp.request')
def test_skip_empty_urls(self, mock_get):
Expand All @@ -111,13 +121,13 @@ def test_skip_commas(self, mock_get):
def test_skip_commas_url(self, mock_get):
self._check(mock_get, 'aaa http://www.google.com, aaa', 200)

mock_get.assert_called_with('get', 'http://www.google.com', headers=self.headers)
mock_get.assert_called_with('get', 'http://www.google.com', allow_redirects=True, headers=self.headers)

@patch('aiohttp.request')
def test_skip_chineese_commas(self, mock_get):
self._check(mock_get, 'aaa http://bit.ly/UpdateKeepSafe。拥有最新版本就能解决大部分问题了。 aaa', 200)

mock_get.assert_called_with('get', 'http://bit.ly/UpdateKeepSafe', headers=self.headers)
mock_get.assert_called_with('get', 'http://bit.ly/UpdateKeepSafe', allow_redirects=True, headers=self.headers)

@patch('aiohttp.request')
def test_skip_keepsafe_urls(self, mock_get):
Expand All @@ -127,10 +137,10 @@ def test_skip_keepsafe_urls(self, mock_get):

@patch('aiohttp.request')
def test_check_headers(self, mock_get):
self.check = url.UrlValidator('txt', headers=self.headers)
self.check = url.UrlValidator('txt', allow_redirects=True, headers=self.headers)
self._check(mock_get, 'aaa http://www.google.com, aaa', 200)

mock_get.assert_called_with('get', 'http://www.google.com', headers=self.headers)
mock_get.assert_called_with('get', 'http://www.google.com', allow_redirects=True, headers=self.headers)


class TestHtml(AsyncTestCase):
Expand All @@ -154,7 +164,7 @@ def _check(self, mock_get, content, status_code, check=None):
def test_happy_path(self, mock_get):
errors = self._check(mock_get, '<a href="http://www.google.com">link</a>', 200)

mock_get.assert_called_with('get', 'http://www.google.com', headers=self.headers)
mock_get.assert_called_with('get', 'http://www.google.com', allow_redirects=True, headers=self.headers)
self.assertEqual([], errors)

@patch('aiohttp.request')
Expand Down
14 changes: 7 additions & 7 deletions validator/checks/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, **kwargs):
r'\];:\'".,<>?\xab\xbb\u201c\u201d\u2018\u2019]))'

def _without_params(self, url):
return not bool(re.search(r'\{\{[a-zA-Z0-9_.]+\}\}', url))
return not bool(re.search(r'\{[a-zA-Z0-9_.]+\}', url))

def _strip_non_ascii_chars(self, url):
return ''.join(filter(lambda c: c in string.printable, url))
Expand Down Expand Up @@ -106,7 +106,7 @@ def __init__(self, headers=None):
async def _make_request(self, url):
try:
logging.info('checking {}'.format(url))
async with aiohttp.request('get', url, headers=self._headers) as res:
async with aiohttp.request('get', url, headers=self._headers, allow_redirects=True) as res:
return res.status
except Exception:
logging.error('Error making request to %s', url)
Expand Down Expand Up @@ -196,11 +196,11 @@ class UrlOccurenciesValidator(UrlValidator):
def check(self, data, parser, reader):
error = []
for row in data:
base = row.pop(0)
base_urls = self._get_urls([[base]], parser, reader)
for other in row:
other_urls = self._get_urls([[other]], parser, reader)
error.append(UrlOccurencyDiff(base, other, base_urls, other_urls))
base = row.pop(0)
base_urls = self._get_urls([[base]], parser, reader)
for other in row:
other_urls = self._get_urls([[other]], parser, reader)
error.append(UrlOccurencyDiff(base, other, base_urls, other_urls))
return [x for x in error if not x.is_valid()]

def async_check(self, *args):
Expand Down