Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Loading…

bug 866873 - robots.txt accordingly #333

Merged
merged 1 commit into from

3 participants

@peterbe
Owner

Disengage robots by default.

NOTE: If we want Googlebot to spider our production site we need to override this default setting to True in prod's settings/local.py

@lauraxt
Owner

r+

@lauraxt lauraxt merged commit d70e4e1 into mozilla:master
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on Apr 30, 2013
  1. @peterbe
This page is out of date. Refresh to see the latest.
View
20 crashstats/crashstats/tests/test_views.py
@@ -6,6 +6,7 @@
import datetime
from nose.tools import eq_, ok_
from django.test import TestCase
+from django.test.utils import override_settings
from django.test.client import RequestFactory
from django.conf import settings
from django.core.cache import cache
@@ -21,6 +22,25 @@ def __init__(self, content=None, status_code=200):
self.status_code = status_code
+class RobotsTestViews(TestCase):
+
+ @override_settings(ENGAGE_ROBOTS=True)
+ def test_robots_txt(self):
+ url = '/robots.txt'
+ response = self.client.get(url)
+ eq_(response.status_code, 200)
+ eq_(response['Content-Type'], 'text/plain')
+ ok_('Allow: /' in response.content)
+
+ @override_settings(ENGAGE_ROBOTS=False)
+ def test_robots_txt_disengage(self):
+ url = '/robots.txt'
+ response = self.client.get(url)
+ eq_(response.status_code, 200)
+ eq_(response['Content-Type'], 'text/plain')
+ ok_('Disallow: /' in response.content)
+
+
class BaseTestViews(TestCase):
@mock.patch('requests.get')
View
3  crashstats/crashstats/urls.py
@@ -14,6 +14,9 @@
urlpatterns = patterns(
'', # prefix
+ url('^robots\.txt$',
+ views.robots_txt,
+ name='crashstats.robots_txt'),
url('^home' + products + '$',
views.home,
name='crashstats.home'),
View
8 crashstats/crashstats/views.py
@@ -22,6 +22,14 @@
from .decorators import check_days_parameter
+def robots_txt(request):
+ return http.HttpResponse(
+ 'User-agent: *\n'
+ '%s: /' % ('Allow' if settings.ENGAGE_ROBOTS else 'Disallow'),
+ mimetype='text/plain',
+ )
+
+
def has_builds(product, versions):
contains_builds = False
prod_versions = []
View
4 crashstats/settings/base.py
@@ -252,3 +252,7 @@
# A prefix that is sometimes prefixed on the crash ID when used elsewhere in
# the socorro eco-system.
CRASH_ID_PREFIX = 'bp-'
+
+
+# If true, allow robots to spider the site
+ENGAGE_ROBOTS = False
Something went wrong with that request. Please try again.