Skip to content


Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Fetching contributors…

Cannot retrieve contributors at this time

123 lines (101 sloc) 5.616 kb
# -*- coding: utf-8 -*-
from django.conf import settings
from mock import patch
from import raises, eq_
from dashboards.models import WikiDocumentVisits, THIS_WEEK
from sumo.tests import TestCase
from sumo.webtrends import StatsException, StatsIOError
from wiki.tests import document, revision
class DocumentVisitsTests(TestCase):
"""Tests for the WebTrends statistics gathering"""
fixtures = ['users.json']
def test_bad_json(self):
"""Raise a nice error if WebTrends hands us bad JSON."""
def test_no_data_attr(self):
"""Raise a nice err if WebTrends returns an obj with no 'data' attr."""
def test_not_subscriptable(self):
"""Raise a nice err if WebTrends returns an unsubscriptable obj."""
def test_no_pages(self):
"""Don't pave over current data if WebTrends returns well-formatted
data structure with no interesting data in it."""
# Get some JSON that contains no interesting data.
no_pages = '{"data": {"12/01/2010-12/07/2010": {"SubRows": {}}}}'
counts = WikiDocumentVisits._visit_counts(no_pages)
eq_({}, counts) # Make sure nothing interesting is there.
# Try to reload visits table from the uninteresting data:
d = document()
v = WikiDocumentVisits.objects.create(document=d, visits=12,
WikiDocumentVisits.reload_period_from_json(THIS_WEEK, no_pages)
# Visits table should remain unchanged:
eq_(1, WikiDocumentVisits.objects.filter(
def test_no_locale(self):
"""Skip URLs with no locale."""
eq_({}, WikiDocumentVisits._visit_counts('{"data": {"12/01/2010-12/07/'
'2010": {"SubRows":{"":8}}}}'))
def test_foreign_locale(self):
"""Skip URLs with non-English locale."""
eq_({}, WikiDocumentVisits._visit_counts('{"data": {"12/01/2010-12/07/'
'2010": {"SubRows":{"":8}}}}'))
def test_unknown_view(self):
"""Skip URLs that don't resolve."""
eq_({}, WikiDocumentVisits._visit_counts('{"data": {"12/01/2010-12/07/'
'2010": {"SubRows":{"":8}}}}'
% settings.LANGUAGE_CODE))
def test_non_document_view(self):
"""Skip URLs that don't resolve to the wiki document view."""
eq_({}, WikiDocumentVisits._visit_counts('{"data": {"12/01/2010-12/07/'
'2010": {"SubRows":{"":8'
'}}}}' % settings.LANGUAGE_CODE))
def test_bad_visit_count(self):
"""Skip URLs whose visit counts aren't ints."""
d = revision(is_approved=True, save=True).document
eq_({}, WikiDocumentVisits._visit_counts('{"data": {"12/01/2010-12/07/'
'2010": {"SubRows":{"":{'
% (settings.LANGUAGE_CODE, d.slug)))
def test_bad_page_info(self):
"""Skip URLs whose page info is unsubscriptable."""
d = revision(is_approved=True, save=True).document
eq_({}, WikiDocumentVisits._visit_counts('{"data": {"12/01/2010-12/07/'
'2010": {"SubRows":{"":8}}}}'
% (settings.LANGUAGE_CODE, d.slug)))
def test_good_visit_count(self):
"""Extract visit counts from good data.
It has some nasty non-ASCII chars in it.
d = revision(document=document(slug='hellỗ', save=True),
is_approved=True, save=True).document
d2 = revision(document=document(slug='there', save=True),
is_approved=True, save=True).document
# We get a str, not a unicode obj, out of the urllib call.
eq_({ 1037639, 213817}, WikiDocumentVisits._visit_counts(
'{"data": {"12/01/2010-12/07/2010": {"SubRows":{'
'"Title":"Firefox Support Home Page | Firefox Support","UrlLink":'
'{"Visits":1037639.0,"Views":3357731.0,"Average Time Viewed":23.0'
'{"Attributes":{"Title":"Startseite der Firefox-Hilfe | Firefox'
'"measures":{"Visits":213817.0,"Views":595329.0,"Average Time '
% ((settings.LANGUAGE_CODE,) * 2)))
@patch.object(settings._wrapped, 'WEBTRENDS_WIKI_REPORT_URL',
def test_networking_failure(self):
"""Assert a StatsIOError is thrown when networking fails."""
self.assertRaises(StatsIOError, WikiDocumentVisits.json_for, THIS_WEEK)
# This takes 14 seconds and hits the WebTrends server, for which you might
# not have credentials. If you have credentials and want to test the
# networking, uncomment this.
# def test_networking(self):
# WikiDocumentVisits.reload_period_from_json(
# THIS_WEEK, WikiDocumentVisits.json_for(THIS_WEEK))
Jump to Line
Something went wrong with that request. Please try again.