Navigation Menu

Skip to content
This repository has been archived by the owner on Feb 18, 2019. It is now read-only.

Commit

Permalink
Merge pull request #59 from mozilla/fix-test-suite
Browse files Browse the repository at this point in the history
Fix issue #56.
  • Loading branch information
Mauro Doglio committed Apr 2, 2013
2 parents 9159ad7 + 8b58b33 commit 410b1c5
Show file tree
Hide file tree
Showing 8 changed files with 257 additions and 273 deletions.
2 changes: 1 addition & 1 deletion spade/tests/model/test_batchuseragent.py
Expand Up @@ -25,7 +25,7 @@ def test_instantiation():
assert ua.ua_type == BatchUserAgent.MOBILE
assert ua.ua_type != BatchUserAgent.DESKTOP
assert ua.ua_string == 'Mozilla / 5.0'
assert unicode(ua) == u"(mobile) Mozilla / 5.0"
assert unicode(ua) == u"Mozilla / 5.0"


def test_length_toolong():
Expand Down
10 changes: 8 additions & 2 deletions spade/tests/model/test_useragent.py
Expand Up @@ -8,10 +8,16 @@
from spade.model.models import UserAgent


def test_unicode():
def test_unicode_human_name():
"""Unicode representation of a user agent is the UA string."""
ua = UserAgent(ua_string=u"Mozilla/5.0", ua_human_name=u"Moz 5")
assert unicode(ua) == u"(desktop) Moz 5: 'Mozilla/5.0'"
assert unicode(ua) == u"Moz 5"


def test_unicode_ua_string():
"""Unicode representation of a user agent is the UA string."""
ua = UserAgent(ua_string=u"Mozilla/5.0",)
assert unicode(ua) == u"Mozilla/5.0"


def test_length_toolong():
Expand Down
7 changes: 4 additions & 3 deletions spade/tests/scraper/middleware/test_spidermiddleware.py
Expand Up @@ -86,6 +86,7 @@ def generate_offsite_testing_requests():
mock_request.meta['referrer'] = 'http://test.com'
yield mock_request


def generate_crawl_html_requests():
"""Generate an arbitrary request"""
mock_request = Request('http://test.com/hello.html')
Expand Down Expand Up @@ -168,9 +169,10 @@ def test_crawl_limit(spider, depth_middleware, mock_response, depth2_request):
# Assert no requests went through
assert len(results) == 0


def test_linkedpages(spider, depth_middleware, mock_response, depth2_request):
"""
Ensure all CSS/JS requests are not filtered when linked from level 2 html
Ensure only JS requests are not filtered when linked from level 2 html
pages
"""
request_generator = generate_crawl_js_and_css_requests()
Expand All @@ -184,5 +186,4 @@ def test_linkedpages(spider, depth_middleware, mock_response, depth2_request):
for req in remaining_requests:
results.append(req)

# Assert both requests went through
assert len(results) == 2
assert len(results) == 1
20 changes: 4 additions & 16 deletions spade/tests/scraper/spider/test_spider.py
Expand Up @@ -259,15 +259,9 @@ def test_css_item_emission(spider, linked_css_request, css_headers, mock_css):
item_expected['urlscan'] = mock_urlscan
item_expected['url'] = mock_response.url
item_expected['user_agent'] = mock_response.meta['user_agent']
item_expected['redirected_from'] = ''

item_collected = None
for item in pipeline_generator:
if isinstance(item, MarkupItem):
item_collected = item
else:
assert False

assert item_expected == item_collected
assert list(pipeline_generator) == [item_expected]


def test_js_item_emission(spider, linked_js_request, js_headers, mock_js):
Expand Down Expand Up @@ -303,12 +297,6 @@ def test_js_item_emission(spider, linked_js_request, js_headers, mock_js):
item_expected['urlscan'] = mock_urlscan
item_expected['url'] = mock_response.url
item_expected['user_agent'] = mock_response.meta['user_agent']
item_expected['redirected_from'] = ''

item_collected = None
for item in pipeline_generator:
if isinstance(item, MarkupItem):
item_collected = item
else:
assert False

assert item_expected == item_collected
assert list(pipeline_generator) == [item_expected]

0 comments on commit 410b1c5

Please sign in to comment.