Skip to content
This repository has been archived by the owner on Mar 15, 2018. It is now read-only.

Commit

Permalink
Merge pull request #121 from khchen428/master
Browse files Browse the repository at this point in the history
#53 Make it easier to disallow robots crawling.
  • Loading branch information
kumar303 committed Jul 18, 2012
2 parents 5c43d5a + 979643f commit fbad813
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 1 deletion.
5 changes: 5 additions & 0 deletions project/settings/base.py
Expand Up @@ -45,6 +45,11 @@
'django_browserid.context_processors.browserid_form', 'django_browserid.context_processors.browserid_form',
] ]


# Should robots.txt deny everything or disallow a calculated list of URLs we
# don't want to be crawled? Default is false, disallow everything.
# Also see http://www.google.com/support/webmasters/bin/answer.py?answer=93710
ENGAGE_ROBOTS = False

# Always generate a CSRF token for anonymous users. # Always generate a CSRF token for anonymous users.
ANON_ALWAYS = True ANON_ALWAYS = True


Expand Down
5 changes: 4 additions & 1 deletion project/settings/local.py-dist
Expand Up @@ -60,6 +60,9 @@ PASSWORD_HASHERS = get_password_hashers(base.BASE_PASSWORD_HASHERS, HMAC_KEYS)
# Make this unique, and don't share it with anybody. It cannot be blank. # Make this unique, and don't share it with anybody. It cannot be blank.
SECRET_KEY = '' SECRET_KEY = ''


# Should robots.txt allow web crawlers? Set this to True for production
ENGAGE_ROBOTS = True

# Uncomment these to activate and customize Celery: # Uncomment these to activate and customize Celery:
# CELERY_ALWAYS_EAGER = False # required to activate celeryd # CELERY_ALWAYS_EAGER = False # required to activate celeryd
# BROKER_HOST = 'localhost' # BROKER_HOST = 'localhost'
Expand All @@ -80,4 +83,4 @@ SECRET_KEY = ''


# Uncomment this line if you are running a local development install without # Uncomment this line if you are running a local development install without
# HTTPS to disable HTTPS-only cookies. # HTTPS to disable HTTPS-only cookies.
#SESSION_COOKIE_SECURE = False #SESSION_COOKIE_SECURE = False
8 changes: 8 additions & 0 deletions project/urls.py
Expand Up @@ -14,6 +14,14 @@
urlpatterns = patterns('', urlpatterns = patterns('',
# Example: # Example:
(r'', include(urls)), (r'', include(urls)),

# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow' ,
mimetype="text/plain"
)
)


# Uncomment the admin/doc line below to enable admin documentation: # Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')), # (r'^admin/doc/', include('django.contrib.admindocs.urls')),
Expand Down

0 comments on commit fbad813

Please sign in to comment.