Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP

Loading…

Add status code to failure message for all url checking #158

Merged
merged 1 commit into from

3 participants

@bobsilverberg

Changed all the tests to include the status code in any url vailidity failures

@bobsilverberg

FYI, I ran this on Jenkins and it passed.

@AlinT

Lgtm

@m8ttyB
Owner

Nice work @bobsilverberg lgtm - merging

@m8ttyB m8ttyB merged commit d5fa167 into mozilla:master
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
This page is out of date. Refresh to see the latest.
View
5 pages/page.py
@@ -102,8 +102,3 @@ def get_response_code(self, url):
requests_config = {'max_retries': 5}
r = requests.get(url, verify=False, config=requests_config)
return r.status_code
-
- def is_valid_link(self, url):
- if self.get_response_code(url) == requests.codes.ok:
- return True
- return False
View
35 tests/test_about.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from unittestzero import Assert
from pages.desktop.about import AboutPage
@@ -12,7 +13,7 @@
class TestAboutPage:
@pytest.mark.nondestructive
- def test_footer_section_links(self, mozwebqa):
+ def test_footer_link_destinations_are_correct(self, mozwebqa):
about_page = AboutPage(mozwebqa)
about_page.go_to_page()
Assert.contains(about_page.footer.expected_footer_logo_destination,
@@ -27,22 +28,23 @@ def test_footer_section_links(self, mozwebqa):
Assert.equal(0, len(bad_links), '%s bad links found: ' % len(bad_links) + ', '.join(bad_links))
@pytest.mark.nondestructive
- def test_footer_section_urls(self, mozwebqa):
+ def test_footer_links_are_valid(self, mozwebqa):
about_page = AboutPage(mozwebqa)
about_page.go_to_page()
Assert.contains(about_page.footer.expected_footer_logo_destination,
about_page.footer.footer_logo_destination)
Assert.contains(about_page.footer.expected_footer_logo_img,
about_page.footer.footer_logo_img)
- bad_links = []
+ bad_urls = []
for link in AboutPage.Footer.footer_links_list:
url = about_page.link_destination(link.get('locator'))
- if not about_page.is_valid_link(url):
- bad_links.append('%s is not a valid url.' % url)
- Assert.equal(0, len(bad_links), '%s bad links found: ' % len(bad_links) + ', '.join(bad_links))
+ response_code = about_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
+ Assert.equal(0, len(bad_urls), '%s bad links found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
- def test_tabzilla_links_are_correct(self, mozwebqa):
+ def test_tabzilla_link_destinations_are_corrects(self, mozwebqa):
about_page = AboutPage(mozwebqa)
about_page.go_to_page()
Assert.true(about_page.header.is_tabzilla_panel_visible)
@@ -60,12 +62,13 @@ def test_tabzilla_links_are_valid(self, mozwebqa):
about_page.go_to_page()
Assert.true(about_page.header.is_tabzilla_panel_visible)
about_page.header.toggle_tabzilla_dropdown()
- bad_links = []
+ bad_urls = []
for link in AboutPage.Header.tabzilla_links_list:
url = about_page.link_destination(link.get('locator'))
- if not about_page.is_valid_link(url):
- bad_links.append('%s is not a valid url.' % url)
- Assert.equal(0, len(bad_links), '%s bad links found: ' % len(bad_links) + ', '.join(bad_links))
+ response_code = about_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
+ Assert.equal(0, len(bad_urls), '%s bad links found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
def test_tabzilla_links_are_visible(self, mozwebqa):
@@ -107,8 +110,9 @@ def test_navbar_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in about_page.Header.nav_links_list:
url = about_page.link_destination(link.get('locator'))
- if not about_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = about_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -139,6 +143,7 @@ def test_major_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in about_page.major_links_list:
url = about_page.link_destination(link.get('locator'))
- if not about_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = about_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
View
13 tests/test_apps.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from pages.desktop.apps import Apps
from unittestzero import Assert
@@ -12,7 +13,7 @@
class TestApps:
@pytest.mark.nondestructive
- def test_footer_section(self, mozwebqa):
+ def test_footer_link_destinations_are_correct(self, mozwebqa):
apps_page = Apps(mozwebqa)
apps_page.go_to_page()
Assert.contains(apps_page.footer.expected_footer_logo_destination,
@@ -67,8 +68,9 @@ def test_page_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in apps_page.page_links_list:
url = apps_page.link_destination(link.get('locator'))
- if not apps_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = apps_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -78,8 +80,9 @@ def test_showcased_apps_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in apps_page.showcased_apps_links:
url = link.get_attribute('href')
- if not apps_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = apps_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
View
6 tests/test_b2g.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from pages.desktop.b2g import BootToGecko
from unittestzero import Assert
@@ -79,8 +80,9 @@ def test_nav_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in b2g_page.b2g_nav_links_list:
url = b2g_page.link_destination(link.get('locator'))
- if not b2g_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = b2g_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
View
4 tests/test_contribute.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from pages.desktop.contribute import Contribute
from unittestzero import Assert
@@ -51,4 +52,5 @@ def test_want_to_help_form_is_correct(self, mozwebqa):
privacy_link = help_form.privacy_link
url = contribute_page.link_destination(privacy_link.get('locator'))
Assert.true(url.endswith(privacy_link.get('url_suffix')), '%s does not end with %s' % (url, privacy_link.get('url_suffix')))
- Assert.true(contribute_page.is_valid_link(url), '%s is not a valid url.' % url)
+ response_code = contribute_page.get_response_code(url)
+ Assert.equal(response_code, requests.codes.ok, '%s is not a valid url - status code: %s.' % (url, response_code))
View
11 tests/test_mission.py
@@ -4,6 +4,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from pages.desktop.mission import Mission
from unittestzero import Assert
@@ -39,8 +40,9 @@ def test_major_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in mission_page.major_links_list:
url = mission_page.link_destination(link.get('locator'))
- if not mission_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = mission_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -49,8 +51,9 @@ def test_video_srcs_are_valid(self, mozwebqa):
mission_page.go_to_page()
bad_srcs = []
for src in mission_page.video_sources_list:
- if not mission_page.is_valid_link(src):
- bad_srcs.append('%s is not a valid url' % src)
+ response_code = mission_page.get_response_code(src)
+ if response_code != requests.codes.ok:
+ bad_srcs.append('%s is not a valid url - status code: %s.' % (src, response_code))
Assert.equal(0, len(bad_srcs), '%s bad urls found: ' % len(bad_srcs) + ', '.join(bad_srcs))
@pytest.mark.nondestructive
View
16 tests/test_performance.py
@@ -3,9 +3,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import pytest
+import requests
from unittestzero import Assert
from pages.desktop.performance import Performance
-import pytest
class TestPerformance:
@@ -73,8 +74,9 @@ def test_billboard_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in performance_page.billboard_links_list:
url = performance_page.link_destination(link.get('locator'))
- if not performance_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = performance_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -83,7 +85,8 @@ def test_performance_image_is_correct(self, mozwebqa):
performance_page.go_to_page()
src = performance_page.perf_hardware_img_src
Assert.true(src.endswith('hardware-accel.png'))
- Assert.true(performance_page.is_valid_link(src), '%s is not a valid url.' % src)
+ response_code = performance_page.get_response_code(src)
+ Assert.equal(response_code, requests.codes.ok, '%s is not a valid url - status code: %s.' % (src, response_code))
@pytest.mark.nondestructive
def test_video_srcs_are_valid(self, mozwebqa):
@@ -91,7 +94,8 @@ def test_video_srcs_are_valid(self, mozwebqa):
performance_page.go_to_page()
bad_srcs = []
for src in performance_page.video_sources_list:
- if not performance_page.is_valid_link(src):
- bad_srcs.append('%s is not a valid url' % src)
+ response_code = performance_page.get_response_code(src)
+ if response_code != requests.codes.ok:
+ bad_srcs.append('%s is not a valid url - status code: %s.' % (src, response_code))
Assert.equal(0, len(bad_srcs), '%s bad urls found: ' % len(bad_srcs) + ', '.join(bad_srcs))
Assert.true(performance_page.is_video_overlay_visible)
View
31 tests/test_products.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from unittestzero import Assert
from pages.desktop.products import ProductsPage
@@ -67,8 +68,9 @@ def test_product_nav_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in products_page.product_nav_links_list:
url = products_page.link_destination(link.get('locator'))
- if not products_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = products_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -99,8 +101,9 @@ def test_images_srcs_are_valid(self, mozwebqa):
bad_urls = []
for image in products_page.images_list:
url = products_page.image_source(image.get('locator'))
- if not products_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = products_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -131,8 +134,9 @@ def test_products_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in products_page.products_links_list:
url = products_page.link_destination(link.get('locator'))
- if not products_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = products_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -163,8 +167,9 @@ def test_innovations_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in products_page.innovations_links_list:
url = products_page.link_destination(link.get('locator'))
- if not products_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = products_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -195,8 +200,9 @@ def test_tools_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in products_page.tools_links_list:
url = products_page.link_destination(link.get('locator'))
- if not products_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = products_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -227,6 +233,7 @@ def test_platforms_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in products_page.platforms_links_list:
url = products_page.link_destination(link.get('locator'))
- if not products_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = products_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
View
16 tests/test_security.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from pages.desktop.security import Security
from unittestzero import Assert
@@ -74,8 +75,9 @@ def test_billboard_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in security_page.billboard_links_list:
url = security_page.link_destination(link.get('locator'))
- if not security_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = security_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -106,8 +108,9 @@ def test_section_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in security_page.section_links_list:
url = security_page.link_destination(link.get('locator'))
- if not security_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = security_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -138,6 +141,7 @@ def test_images_srcs_are_valid(self, mozwebqa):
bad_urls = []
for image in security_page.images_list:
url = security_page.image_source(image.get('locator'))
- if not security_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = security_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
View
6 tests/test_sms.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from unittestzero import Assert
from pages.desktop.sms import SMS
@@ -47,8 +48,9 @@ def test_info_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in sms_page.info_links_list:
url = sms_page.link_destination(link.get('locator'))
- if not sms_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = sms_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
View
11 tests/test_technology.py
@@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
+import requests
from unittestzero import Assert
from pages.desktop.technology import Technology
@@ -40,8 +41,9 @@ def test_billboard_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in technology_page.billboard_links_list:
url = technology_page.link_destination(link.get('locator'))
- if not technology_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = technology_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
@@ -72,8 +74,9 @@ def test_more_info_link_urls_are_valid(self, mozwebqa):
bad_urls = []
for link in technology_page.more_info_links_list:
url = technology_page.link_destination(link.get('locator'))
- if not technology_page.is_valid_link(url):
- bad_urls.append('%s is not a valid url' % url)
+ response_code = technology_page.get_response_code(url)
+ if response_code != requests.codes.ok:
+ bad_urls.append('%s is not a valid url - status code: %s.' % (url, response_code))
Assert.equal(0, len(bad_urls), '%s bad urls found: ' % len(bad_urls) + ', '.join(bad_urls))
@pytest.mark.nondestructive
Something went wrong with that request. Please try again.