Skip to content

Commit

Permalink
Merge 6ccf34a into 1686aee
Browse files Browse the repository at this point in the history
  • Loading branch information
protoroto committed Apr 8, 2019
2 parents 1686aee + 6ccf34a commit 2431628
Show file tree
Hide file tree
Showing 10 changed files with 255 additions and 7 deletions.
6 changes: 6 additions & 0 deletions .eggs/README.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins.

This directory caches those eggs to prevent repeated downloads.

However, it is safe to delete this directory.

5 changes: 5 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@
History
-------

0.4.2
++++++++++++++++++

* Add support for noindex, noarchive robots meta tag

0.4.1 (2016-12-02)
++++++++++++++++++

Expand Down
32 changes: 32 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,48 @@ Quickstart
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': {'cmspages': ExtendedSitemap}}),
)

* Add the following snippets to the django CMS templates::

{% load robots_index %}

...
<head>
<!-- somewhere in the head tag -->
{% page_robots %}
</head>
...

Usage
-----

After installing as above, you will be able to tune the sitemap setting for each page.

A new menu item ``Sitemap properties`` will be available in the page toolbar.

For each page you will be able to set the following flags / values:

* Sitemap changefreq (default: the django CMS default)
* Sitemap priority (default: 0.5)
* Include page in sitemap (default: `True`)
* Set `noindex` value to page robots meta tag
* Set `noarchite` value to page robots meta tag
* Provide any additional robots meta tag values

page_robots options
###################

`page_robots` meta tag accepts the following parameters:

* `page`: the page to render robots meta tag (default: current page). Can be
any valid `page lookup`_
* `site`: the current site id (default: current site).

Settings
--------

* PAGE_SITEMAP_CHANGEFREQ_LIST: List of frequency changes
* PAGE_SITEMAP_DEFAULT_CHANGEFREQ: Default changefrequency (default: django CMS value -monthly-)
* PAGE_SITEMAP_CACHE_DURATION: Cache duration: same as django CMS menu cache)


.. _page lookup: https://docs.django-cms.org/en/reference/templatetags.html#page_lookup
2 changes: 1 addition & 1 deletion djangocms_page_sitemap/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@
from .models import PageSitemapProperties


@admin.register(PageSitemapProperties)
class PageSitemapPropertiesAdmin(PageExtensionAdmin):
pass
admin.site.register(PageSitemapProperties, PageSitemapPropertiesAdmin)
30 changes: 30 additions & 0 deletions djangocms_page_sitemap/migrations/0004_auto_20190504_1423.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-02-02 09:44
from __future__ import unicode_literals

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('djangocms_page_sitemap', '0003_auto_20151018_1612'),
]

operations = [
migrations.AddField(
model_name='pagesitemapproperties',
name='noarchive',
field=models.BooleanField(default=False, help_text='Add meta tag robots with value noarchive', verbose_name='Mark as no archive'),
),
migrations.AddField(
model_name='pagesitemapproperties',
name='noindex',
field=models.BooleanField(default=False, help_text='Add meta tag robots with value noindex', verbose_name='Mark as no index'),
),
migrations.AddField(
model_name='pagesitemapproperties',
name='robots_extra',
field=models.CharField(default='', help_text='Extra values for robots meta tag', max_length=200, verbose_name='Extra robots value'),
),
]
12 changes: 12 additions & 0 deletions djangocms_page_sitemap/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,18 @@ class PageSitemapProperties(PageExtension):
max_digits=2, default=0.5,
validators=[MinValueValidator(0), MaxValueValidator(1)])
include_in_sitemap = models.BooleanField(_('Include in sitemap'), default=True)
noindex = models.BooleanField(
_('Mark as no index'), default=False,
help_text=_('Add meta tag robots with value noindex')
)
noarchive = models.BooleanField(
_('Mark as no archive'), default=False,
help_text=_('Add meta tag robots with value noarchive')
)
robots_extra = models.CharField(
_('Extra robots value'), default='', max_length=200,
help_text=_('Extra values for robots meta tag')
)

def __str__(self):
return _('Sitemap values for Page %s') % self.extended_object.pk
Expand Down
1 change: 1 addition & 0 deletions djangocms_page_sitemap/templatetags/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# -*- coding: utf-8 -*-
45 changes: 45 additions & 0 deletions djangocms_page_sitemap/templatetags/robots_index.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals

from classytags.arguments import Argument
from classytags.core import Options, Tag
from cms.templatetags.cms_tags import _get_page_by_untyped_arg
from django import template
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ObjectDoesNotExist

register = template.Library()


@register.tag(name='page_robots')
class PageRobots(Tag):
"""
Generates the robots meta tag according to the extension attributes
"""
name = 'page_robots'
options = Options(
Argument('page', required=False),
Argument('site_id', required=False),
)

def render_tag(self, context, page, site_id):
request = context.get('request')
if not site_id:
site_id = get_current_site(request).pk
if not page:
page = request.current_page
else:
page = _get_page_by_untyped_arg(page, request, site_id)
content = []
if not page:
return ''
try:
if page.pagesitemapproperties.noindex:
content.append('noindex')
if page.pagesitemapproperties.noarchive:
content.append('noarchive')
if page.pagesitemapproperties.robots_extra:
content.append(page.pagesitemapproperties.robots_extra)
return '<meta name="robots" content="%s">' % ','.join(content)
except ObjectDoesNotExist:
return ''
123 changes: 123 additions & 0 deletions tests/test_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals

from django import template
from django.contrib.auth.models import AnonymousUser
from django.contrib.sites.models import Site

from djangocms_page_sitemap.models import PageSitemapProperties

from .base import BaseTest


class RobotsTest(BaseTest):

def _test_robots_tag(self, template_string, context, expected):

tpl_obj = template.Template(template_string)
ctx_obj = template.Context(context)
rendered = tpl_obj.render(ctx_obj)
self.assertEqual(rendered, expected)
for key, value in context.items():
self.assertEqual(ctx_obj.get(key), value)

def test_robots_options(self):
page1, page2, page3 = self.get_pages()
extension = PageSitemapProperties.objects.create(
extended_object=page1, priority='0.2', changefreq='never'
)

template = '{% load robots_index %}{% page_robots %}'
expected = '<meta name="robots" content="">'
context = {'request': self.get_page_request(page1, AnonymousUser())}
self._test_robots_tag(template, context, expected)

extension.noindex = True
extension.save()
expected = '<meta name="robots" content="noindex">'
self._test_robots_tag(template, context, expected)

extension.noarchive = True
extension.save()
expected = '<meta name="robots" content="noindex,noarchive">'
self._test_robots_tag(template, context, expected)

extension.robots_extra = 'nodmoz'
extension.save()
expected = '<meta name="robots" content="noindex,noarchive,nodmoz">'
self._test_robots_tag(template, context, expected)

def test_robots_page_parameter(self):
page1, page2, page3 = self.get_pages()
extension = PageSitemapProperties.objects.create(
extended_object=page1, priority='0.2', changefreq='never'
)
page1.publish('en')
extension.refresh_from_db()

template = '{% load robots_index %}{% page_robots %}'
expected = ''
context = {'request': self.get_page_request(page2.get_public_object(), AnonymousUser())}
self._test_robots_tag(template, context, expected)

extension.noindex = True
extension.save()
page1.publish('en')
expected = ''
self._test_robots_tag(template, context, expected)

template = '{%% load robots_index %%}{%% page_robots %s %%}' % page1.pk
expected = '<meta name="robots" content="noindex">'
self._test_robots_tag(template, context, expected)

extension.noarchive = True
extension.save()
page1.publish('en')
expected = '<meta name="robots" content="noindex,noarchive">'
self._test_robots_tag(template, context, expected)

extension.robots_extra = 'nodmoz'
extension.save()
page1.publish('en')
expected = '<meta name="robots" content="noindex,noarchive,nodmoz">'
self._test_robots_tag(template, context, expected)

def test_robots_page_no_site(self):
page1, page2, page3 = self.get_pages()
extension = PageSitemapProperties.objects.create(
extended_object=page1, priority='0.2', changefreq='never'
)
page1.publish('en')
extension.refresh_from_db()

template = '{% load robots_index %}{% page_robots None "abc" %}'
expected = ''
context = {'request': self.get_page_request(page2.get_public_object(), AnonymousUser())}
self._test_robots_tag(template, context, expected)

def test_robots_page_no_page(self):
page1, page2, page3 = self.get_pages()
extension = PageSitemapProperties.objects.create(
extended_object=page1, priority='0.2', changefreq='never'
)
page1.publish('en')
extension.refresh_from_db()

template = '{% load robots_index %}{% page_robots "abc" %}'
expected = ''
context = {'request': self.get_page_request(page2.get_public_object(), AnonymousUser())}
self._test_robots_tag(template, context, expected)

def test_robots_page_other_site(self):
site_2 = Site.objects.create(domain='http://othersite.com')
page1, page2, page3 = self.get_pages()
extension = PageSitemapProperties.objects.create(
extended_object=page1, priority='0.2', changefreq='never'
)
page1.publish('en')
extension.refresh_from_db()

template = '{%% load robots_index %%}{%% page_robots None %s %%}' % site_2.pk
expected = ''
context = {'request': self.get_page_request(page2.get_public_object(), AnonymousUser())}
self._test_robots_tag(template, context, expected)
6 changes: 0 additions & 6 deletions tests/test_utils/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,10 @@
from django.conf.urls import include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.i18n import javascript_catalog
from django.views.static import serve

from djangocms_page_sitemap.sitemap import ExtendedSitemap

admin.autodiscover()

urlpatterns = [
Expand All @@ -21,9 +18,6 @@
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^media/cms/(?P<path>.*)$', serve,
{'document_root': get_cms_setting('MEDIA_ROOT'), 'show_indexes': True}),
url(r'^sitemap\.xml$', sitemap, {
'sitemaps': {'cmspages': ExtendedSitemap}
}),
url(r'^jsi18n/(?P<packages>\S+?)/$', javascript_catalog),
]

Expand Down

0 comments on commit 2431628

Please sign in to comment.