Skip to content
This repository has been archived by the owner on Feb 1, 2018. It is now read-only.

bug 866873 - robots.txt accordingly #333

Merged
merged 1 commit into from Apr 30, 2013
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
20 changes: 20 additions & 0 deletions crashstats/crashstats/tests/test_views.py
Expand Up @@ -6,6 +6,7 @@
import datetime import datetime
from nose.tools import eq_, ok_ from nose.tools import eq_, ok_
from django.test import TestCase from django.test import TestCase
from django.test.utils import override_settings
from django.test.client import RequestFactory from django.test.client import RequestFactory
from django.conf import settings from django.conf import settings
from django.core.cache import cache from django.core.cache import cache
Expand All @@ -21,6 +22,25 @@ def __init__(self, content=None, status_code=200):
self.status_code = status_code self.status_code = status_code




class RobotsTestViews(TestCase):

@override_settings(ENGAGE_ROBOTS=True)
def test_robots_txt(self):
url = '/robots.txt'
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/plain')
ok_('Allow: /' in response.content)

@override_settings(ENGAGE_ROBOTS=False)
def test_robots_txt_disengage(self):
url = '/robots.txt'
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/plain')
ok_('Disallow: /' in response.content)


class BaseTestViews(TestCase): class BaseTestViews(TestCase):


@mock.patch('requests.get') @mock.patch('requests.get')
Expand Down
3 changes: 3 additions & 0 deletions crashstats/crashstats/urls.py
Expand Up @@ -14,6 +14,9 @@


urlpatterns = patterns( urlpatterns = patterns(
'', # prefix '', # prefix
url('^robots\.txt$',
views.robots_txt,
name='crashstats.robots_txt'),
url('^home' + products + '$', url('^home' + products + '$',
views.home, views.home,
name='crashstats.home'), name='crashstats.home'),
Expand Down
8 changes: 8 additions & 0 deletions crashstats/crashstats/views.py
Expand Up @@ -22,6 +22,14 @@
from .decorators import check_days_parameter from .decorators import check_days_parameter




def robots_txt(request):
return http.HttpResponse(
'User-agent: *\n'
'%s: /' % ('Allow' if settings.ENGAGE_ROBOTS else 'Disallow'),
mimetype='text/plain',
)


def has_builds(product, versions): def has_builds(product, versions):
contains_builds = False contains_builds = False
prod_versions = [] prod_versions = []
Expand Down
4 changes: 4 additions & 0 deletions crashstats/settings/base.py
Expand Up @@ -252,3 +252,7 @@
# A prefix that is sometimes prefixed on the crash ID when used elsewhere in # A prefix that is sometimes prefixed on the crash ID when used elsewhere in
# the socorro eco-system. # the socorro eco-system.
CRASH_ID_PREFIX = 'bp-' CRASH_ID_PREFIX = 'bp-'


# If true, allow robots to spider the site
ENGAGE_ROBOTS = False