Skip to content
This repository has been archived by the owner on Mar 28, 2019. It is now read-only.

Commit

Permalink
Merge pull request #579 from mozilla-services/prepare-2.10.3
Browse files Browse the repository at this point in the history
Prepare 2.10.3
  • Loading branch information
leplatrem committed Nov 18, 2015
2 parents ec87e00 + e76bfa5 commit e2a2439
Show file tree
Hide file tree
Showing 10 changed files with 150 additions and 47 deletions.
9 changes: 7 additions & 2 deletions CHANGELOG.rst
Expand Up @@ -4,10 +4,15 @@ Changelog
This document describes changes between each past release.


2.10.3 (unreleased)
2.10.3 (2015-11-18)
-------------------

- Nothing changed yet.
**Bug fixes**

- Fix Service CORS not being set when plugins are included
- Fix principals of permission backend not being plugged by default (#573)
- Fix pagination loop when number of records is multiple of page size (fixes #366)
- Fix Redis error traces not being logged (#560)


2.10.2 (2015-11-10)
Expand Down
25 changes: 15 additions & 10 deletions cliquet/__init__.py
Expand Up @@ -80,6 +80,7 @@
'userid_hmac_secret': '',
'version_prefix_redirect_enabled': True,
'trailing_slash_redirect_enabled': True,
'multiauth.groupfinder': 'cliquet.authorization.groupfinder',
'multiauth.policies': 'basicauth',
'multiauth.policy.basicauth.use': ('cliquet.authentication.'
'BasicAuthAuthenticationPolicy'),
Expand All @@ -94,6 +95,17 @@ class Service(CorniceService):
This is useful in order to attach specific behaviours without monkey
patching the default cornice service (which would impact other uses of it)
"""
default_cors_headers = ('Backoff', 'Retry-After', 'Alert',
'Content-Length')

def error_handler(self, error):
return errors.json_error_handler(error)

@classmethod
def init_from_settings(cls, settings):
cls.cors_origins = tuple(aslist(settings['cors_origins']))
cors_max_age = settings['cors_max_age_seconds']
cls.cors_max_age = int(cors_max_age) if cors_max_age else None


def includeme(config):
Expand All @@ -113,6 +125,9 @@ def includeme(config):
for app in includes:
config.include(app)

# Add CORS settings to the base cliquet Service class.
Service.init_from_settings(settings)

# Setup components.
for step in aslist(settings['initialization_sequence']):
step_func = config.maybe_dotted(step)
Expand All @@ -122,16 +137,6 @@ def includeme(config):
# for key, value in settings.items():
# logger.info('Using %s = %s' % (key, value))

# Add CORS settings to the base cliquet Service class.
cors_origins = settings['cors_origins']
Service.cors_origins = tuple(aslist(cors_origins))
Service.default_cors_headers = ('Backoff', 'Retry-After', 'Alert',
'Content-Length')
cors_max_age = settings['cors_max_age_seconds']
Service.cors_max_age = int(cors_max_age) if cors_max_age else None

Service.error_handler = lambda self, e: errors.json_error_handler(e)

# Custom helpers.
config.add_request_method(follow_subrequest)
config.add_request_method(lambda request: {'id': request.prefixed_userid},
Expand Down
24 changes: 24 additions & 0 deletions cliquet/authorization.py
Expand Up @@ -13,6 +13,30 @@
PRIVATE = 'private'


def groupfinder(userid, request):
"""Fetch principals from permission backend for the specified `userid`.
This is plugged by default using the ``multiauth.groupfinder`` setting.
"""
backend = getattr(request.registry, 'permission', None)
# Permission backend not configured. Ignore.
if not backend:
return []

# Anonymous safety check.
if not hasattr(request, 'prefixed_userid'):
return []

# Query the permission backend only once per request (e.g. batch).

reify_key = request.prefixed_userid + '_principals'
if reify_key not in request.bound_data:
principals = backend.user_principals(request.prefixed_userid)
request.bound_data[reify_key] = principals

return request.bound_data[reify_key]


@implementer(IAuthorizationPolicy)
class AuthorizationPolicy(object):
# Callable that takes an object id and a permission and returns
Expand Down
33 changes: 21 additions & 12 deletions cliquet/resource/__init__.py
Expand Up @@ -215,7 +215,7 @@ def collection_get(self):
filter_fields = [f.field for f in filters]
include_deleted = self.model.modified_field in filter_fields

pagination_rules = self._extract_pagination_rules_from_token(
pagination_rules, offset = self._extract_pagination_rules_from_token(
limit, sorting)

records, total_records = self.model.get_records(
Expand All @@ -225,9 +225,12 @@ def collection_get(self):
pagination_rules=pagination_rules,
include_deleted=include_deleted)

offset = offset + len(records)
next_page = None
if limit and len(records) == limit and total_records > limit:
next_page = self._next_page_url(sorting, limit, records[-1])

if limit and len(records) == limit and offset < total_records:
lastrecord = records[-1]
next_page = self._next_page_url(sorting, limit, lastrecord, offset)
headers['Next-Page'] = encode_header(next_page)

# Bind metric about response size.
Expand Down Expand Up @@ -910,11 +913,14 @@ def _extract_pagination_rules_from_token(self, limit, sorting):
queryparams = self.request.GET
token = queryparams.get('_token', None)
filters = []
offset = 0
if token:
try:
last_record = json.loads(decode64(token))
assert isinstance(last_record, dict)
except (ValueError, TypeError, AssertionError):
tokeninfo = json.loads(decode64(token))
assert isinstance(tokeninfo, dict)
last_record = tokeninfo['last_record']
offset = tokeninfo['offset']
except (ValueError, KeyError, TypeError, AssertionError):
error_msg = '_token has invalid content'
error_details = {
'location': 'querystring',
Expand All @@ -923,11 +929,11 @@ def _extract_pagination_rules_from_token(self, limit, sorting):
raise_invalid(self.request, **error_details)

filters = self._build_pagination_rules(sorting, last_record)
return filters
return filters, offset

def _next_page_url(self, sorting, limit, last_record):
def _next_page_url(self, sorting, limit, last_record, offset):
"""Build the Next-Page header from where we stopped."""
token = self._build_pagination_token(sorting, last_record)
token = self._build_pagination_token(sorting, last_record, offset)

params = self.request.GET.copy()
params['_limit'] = limit
Expand All @@ -938,17 +944,20 @@ def _next_page_url(self, sorting, limit, last_record):
**self.request.matchdict)
return next_page_url

def _build_pagination_token(self, sorting, last_record):
def _build_pagination_token(self, sorting, last_record, offset):
"""Build a pagination token.
It is a base64 JSON object with the sorting fields values of
the last_record.
"""
token = {}
token = {
'last_record': {},
'offset': offset
}

for field, _ in sorting:
token[field] = last_record[field]
token['last_record'][field] = last_record[field]

return encode64(json.dumps(token))

Expand Down
3 changes: 2 additions & 1 deletion cliquet/storage/redis.py
Expand Up @@ -4,7 +4,7 @@
import redis
from six.moves.urllib import parse as urlparse

from cliquet import utils
from cliquet import utils, logger
from cliquet.storage import (
exceptions, DEFAULT_ID_FIELD,
DEFAULT_MODIFIED_FIELD, DEFAULT_DELETED_FIELD)
Expand All @@ -17,6 +17,7 @@ def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except redis.RedisError as e:
logger.exception(e)
raise exceptions.BackendError(original=e)
return wrapped

Expand Down
57 changes: 37 additions & 20 deletions cliquet/tests/resource/test_pagination.py
Expand Up @@ -95,12 +95,12 @@ def test_stops_giving_next_page_at_the_end_sets(self):
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)

# def test_stops_giving_next_page_at_the_end_sets_on_exact_limit(self):
# self.resource.request.GET = {'_limit': '10'}
# self.resource.collection_get()
# self._setup_next_page()
# self.resource.collection_get()
# self.assertNotIn('Next-Page', self.last_response.headers)
def test_stops_giving_next_page_at_the_end_sets_on_exact_limit(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
self._setup_next_page()
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)

def test_handle_simple_sorting(self):
self.resource.request.GET = {'_sort': '-status', '_limit': '20'}
Expand Down Expand Up @@ -168,6 +168,13 @@ def test_token_wrong_json(self):
'_token': b64encode('{"toto":'.encode('ascii')).decode('ascii')}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)

def test_token_wrong_json_fields(self):
badtoken = '{"toto": {"tutu": 1}}'
self.resource.request.GET = {
'_since': '123', '_limit': '20',
'_token': b64encode(badtoken.encode('ascii')).decode('ascii')}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)

def test_raises_bad_request_if_token_has_bad_data_structure(self):
invalid_token = json.dumps([[('last_modified', 0, '>')]])
self.resource.request.GET = {
Expand All @@ -185,36 +192,46 @@ def setUp(self):
'last_modified': 1234, 'title': 'Title'
}

def test_token_contains_current_offset(self):
token = self.resource._build_pagination_token([('last_modified', -1)],
self.record,
42)
tokeninfo = json.loads(b64decode(token).decode('ascii'))
self.assertEqual(tokeninfo['offset'], 42)

def test_no_sorting_default_to_modified_field(self):
token = self.resource._build_pagination_token([('last_modified', -1)],
self.record)
self.assertDictEqual(json.loads(b64decode(token).decode('ascii')),
self.record,
42)
tokeninfo = json.loads(b64decode(token).decode('ascii'))
self.assertDictEqual(tokeninfo['last_record'],
{"last_modified": 1234})

def test_sorting_handle_both_rules(self):
token = self.resource._build_pagination_token([
('status', -1),
('last_modified', -1)
], self.record)
self.assertDictEqual(
json.loads(b64decode(token).decode('ascii')),
{"last_modified": 1234, "status": 2})
], self.record, 34)
tokeninfo = json.loads(b64decode(token).decode('ascii'))
self.assertDictEqual(tokeninfo['last_record'],
{"last_modified": 1234, "status": 2})

def test_sorting_handle_ordering_direction(self):
token = self.resource._build_pagination_token([
('status', 1),
('last_modified', 1)
], self.record)
self.assertEqual(
json.loads(b64decode(token).decode('ascii')),
{"last_modified": 1234, "status": 2})
], self.record, 32)
tokeninfo = json.loads(b64decode(token).decode('ascii'))
self.assertEqual(tokeninfo['last_record'],
{"last_modified": 1234, "status": 2})

def test_multiple_sorting_keep_all(self):
token = self.resource._build_pagination_token([
('status', 1),
('title', -1),
('last_modified', -1)
], self.record)
self.assertEqual(
json.loads(b64decode(token).decode('ascii')),
{"last_modified": 1234, "status": 2, 'title': 'Title'})
], self.record, 31)
tokeninfo = json.loads(b64decode(token).decode('ascii'))
self.assertEqual(tokeninfo['last_record'],
{"last_modified": 1234, "status": 2,
'title': 'Title'})
33 changes: 33 additions & 0 deletions cliquet/tests/test_authentication.py
Expand Up @@ -33,6 +33,39 @@ def test_views_are_forbidden_if_unknown_auth_method(self):
self.headers['Authorization'] = 'Carrier pigeon'
app.get(self.sample_url, headers=self.headers, status=401)

def test_principals_are_fetched_from_permission_backend(self):
patch = mock.patch(('cliquet.tests.support.'
'AllowAuthorizationPolicy.permits'))
self.addCleanup(patch.stop)
mocked = patch.start()

self.permission.add_user_principal(self.principal, 'group:admin')
self.app.get(self.collection_url, headers=self.headers)

_, principals, _ = mocked.call_args[0]
self.assertIn('group:admin', principals)

def test_user_principals_are_cached_per_user(self):
patch = mock.patch.object(self.permission, 'user_principals',
wraps=self.permission.user_principals)
self.addCleanup(patch.stop)
mocked = patch.start()
batch = {
"defaults": {
"headers": self.headers,
"path": "/mushrooms"
},
"requests": [
{},
{},
{},
{"headers": {"Authorization": "Basic Ym9iOg=="}},
{"headers": {"Authorization": "Basic bWF0Og=="}},
]
}
self.app.post_json('/batch', batch)
self.assertEqual(mocked.call_count, 3)


class BasicAuthenticationPolicyTest(unittest.TestCase):
def setUp(self):
Expand Down
9 changes: 9 additions & 0 deletions cliquet/tests/test_storage.py
Expand Up @@ -995,6 +995,15 @@ def test_get_all_handle_expired_values(self):
with mocked_mget:
self.storage.get_all(**self.storage_kw) # not raising

def test_errors_logs_stack_trace(self):
self.client_error_patcher.start()

with mock.patch('cliquet.storage.logger.exception') as exc_handler:
with self.assertRaises(exceptions.BackendError):
self.storage.get_all(**self.storage_kw)

self.assertTrue(exc_handler.called)


@skip_if_no_postgresql
class PostgresqlStorageTest(StorageTest, unittest.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion cliquet_docs/conf.py
Expand Up @@ -68,7 +68,7 @@
# The short X.Y version.
version = '2.10'
# The full version, including alpha/beta/rc tags.
release = '2.10.2'
release = '2.10.3'

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -59,7 +59,7 @@


setup(name='cliquet',
version='2.10.3.dev0',
version='2.10.3',
description='Micro service API toolkit',
long_description=README + "\n\n" + CHANGELOG + "\n\n" + CONTRIBUTORS,
license='Apache License (2.0)',
Expand Down

0 comments on commit e2a2439

Please sign in to comment.