Skip to content

Commit

Permalink
serialization: fix again caching behavior
Browse files Browse the repository at this point in the history
* Forces the reset of the cache if a serializer use this behavior.
* Refactoring document facet filtering/building.

Co-Authored-by: Renaud Michotte <renaud.michotte@gmail.com>
  • Loading branch information
zannkukai committed Aug 25, 2022
1 parent 09fd796 commit 583d514
Show file tree
Hide file tree
Showing 8 changed files with 65 additions and 71 deletions.
10 changes: 6 additions & 4 deletions rero_ils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -950,12 +950,10 @@ def _(x):
pid_type='pttr',
pid_minter='patron_transaction_id',
pid_fetcher='patron_transaction_id',
search_class=('rero_ils.modules.patron_transactions.api:'
'PatronTransactionsSearch'),
search_class='rero_ils.modules.patron_transactions.api:PatronTransactionsSearch',
search_index='patron_transactions',
search_type=None,
indexer_class=('rero_ils.modules.patron_transactions.api:'
'PatronTransactionsIndexer'),
indexer_class='rero_ils.modules.patron_transactions.api:PatronTransactionsIndexer',
record_serializers={
'application/json': 'rero_ils.modules.serializers:json_v1_response'
},
Expand All @@ -966,6 +964,10 @@ def _(x):
'application/json': 'rero_ils.modules.serializers:json_v1_search',
'application/rero+json': 'rero_ils.modules.patron_transactions.serializers:json_pttr_search'
},
search_serializers_aliases={
'json': 'application/json',
'rero': 'application/rero+json'
},
record_loaders={
'application/json': lambda: PatronTransaction(request.get_json()),
},
Expand Down
43 changes: 22 additions & 21 deletions rero_ils/facets.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,27 +105,28 @@ def default_facets_factory(search, index):
# create facet_filter from post_filters
# and inject the facet_filter into the
# aggregation facet query
if 'terms' in facet_body:
facet_field = facet_body.get('terms')['field']
elif 'date_histogram' in facet_body:
facet_field = facet_body.get('date_histogram')['field']

# get DSL expression of post_filters,
# both single post filters and group of post filters
filters, filters_group, urlkwargs = \
_create_filter_dsl(urlkwargs,
facets.get('post_filters', {}))

# create the filter to inject in the facet
facet_filter = _facet_filter(
index, filters, filters_group,
facet_name, facet_field)

# add a nested aggs_facet in the facet aggs
# and add the facet_filter to the aggregation
if facet_filter:
facet_body = dict(aggs=dict(aggs_facet=facet_body))
facet_body['filter'] = facet_filter
facet_field = None
for key in ['terms', 'date_histogram']:
if key in facet_body:
facet_field = facet_body.get(key)['field']
break
if facet_field:
# get DSL expression of post_filters,
# both single post filters and group of post filters
filters, filters_group, urlkwargs = \
_create_filter_dsl(urlkwargs,
facets.get('post_filters', {}))

# create the filter to inject in the facet
facet_filter = _facet_filter(
index, filters, filters_group,
facet_name, facet_field)

# add a nested aggs_facet in the facet aggs
# and add the facet_filter to the aggregation
if facet_filter:
facet_body = dict(aggs=dict(aggs_facet=facet_body))
facet_body['filter'] = facet_filter

aggs.update({facet_name: facet_body})
search = _aggregations(search, aggs)
Expand Down
58 changes: 22 additions & 36 deletions rero_ils/modules/documents/serializers/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,46 +162,32 @@ def _get_location_pids(lib_pid):

for org in aggr_org:
# filter libraries by organisation
records = LibrariesSearch() \
.get_libraries_by_organisation_pid(
org['key'], ['pid', 'name'])
org_library = [{record.pid: record.name}
for record in records]
org_library_pids = list(set().union(*(lib.keys()
for lib in org_library)))

org['library']['buckets'] = [library for library
in org['library']['buckets']
if library['key'] in
org_library_pids]

for library in org['library']['buckets']:
for lib in org_library:
if library['key'] in lib:
library['name'] = lib[library['key']]
break
# Keep only libraries for the current selected organisation.
query = LibrariesSearch() \
.filter('term', organisation__pid=org['key'])\
.source(['pid', 'name'])
org_libraries = {hit.pid: hit.name for hit in query.scan()}
org['library']['buckets'] = list(filter(
lambda l: l['key'] in org_libraries,
org['library']['buckets']
))
for term in org['library']['buckets']:
if term['key'] in org_libraries:
term['name'] = org_libraries[term['key']]

# filter locations by library
for library in org['library']['buckets']:
locations = LocationsSearch() \
query = LocationsSearch() \
.filter('term', library__pid=library['key'])\
.source(['pid', 'name']).scan()
library_location = [{location.pid: location.name}
for location in locations]
library_location_pids = \
list(set()
.union(*(loc.keys()
for loc in library_location)))
library['location']['buckets'] = \
[location for location
in library['location']['buckets']
if location['key'] in library_location_pids]

for location in library['location']['buckets']:
for loc in library_location:
if location['key'] in loc:
location['name'] = loc[location['key']]
break
.source(['pid', 'name'])
lib_locations = {hit.pid: hit.name for hit in query.scan()}
library['location']['buckets'] = list(filter(
lambda l: l['key'] in lib_locations,
library['location']['buckets']
))
for term in library['location']['buckets']:
if term['key'] in lib_locations:
term['name'] = lib_locations[term['key']]

# Complete Organisation aggregation information
# with corresponding resource name
Expand Down
5 changes: 3 additions & 2 deletions rero_ils/modules/patron_transactions/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,12 @@ def _postprocess_search_hit(self, hit: dict) -> None:
metadata = hit.get('metadata', {})
# Serialize document (if exists)
document_pid = metadata.get('document', {}).get('pid')
if document := self.get_resource(DocumentsSearch(), document_pid):
if document_pid and \
(document := self.get_resource(DocumentsSearch(), document_pid)):
metadata['document'] = document
# Serialize loan & item
loan_pid = metadata.get('loan', {}).get('pid')
if loan := self.get_resource(Loan, loan_pid):
if loan_pid and (loan := self.get_resource(Loan, loan_pid)):
metadata['loan'] = loan
item_pid = loan.get('item_pid', {}).get('value')
if item := self.get_resource(Item, item_pid):
Expand Down
4 changes: 4 additions & 0 deletions rero_ils/modules/serializers/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,10 @@ def __init__(self, limit=2000):
self._resources = {}
self._limit = limit

def reset(self):
"""Resetting the cache."""
self._resources.clear()

def append(self, key, resource):
"""Append a resource into the cache.
Expand Down
8 changes: 5 additions & 3 deletions rero_ils/modules/serializers/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
"""
from __future__ import absolute_import, print_function

from copy import deepcopy
from datetime import datetime

from flask import current_app
Expand Down Expand Up @@ -51,9 +50,12 @@ def search_responsify(serializer, mimetype):
"""
def view(pid_fetcher, search_result, code=200, headers=None, links=None,
item_links_factory=None):
copy_serializer = deepcopy(serializer)
# Check if the serializer implement a 'reset' function. If yes, then
# call this function before perform serialization.
if (reset := getattr(serializer, 'reset', None)) and callable(reset):
reset()
response = current_app.response_class(
copy_serializer.serialize_search(
serializer.serialize_search(
pid_fetcher, search_result,
links=links, item_links_factory=item_links_factory),
mimetype=mimetype)
Expand Down
2 changes: 0 additions & 2 deletions rero_ils/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,9 +427,7 @@ def _boosting_parser(query_boosting, search_index):
raise InvalidQueryRESTError()

search, urlkwargs = default_facets_factory(search, search_index)

search, sortkwargs = default_sorter_factory(search, search_index)

for key, value in sortkwargs.items():
urlkwargs.add(key, value)
urlkwargs.add('q', query_string)
Expand Down
6 changes: 3 additions & 3 deletions tests/api/test_serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,7 +452,8 @@ def test_cached_serializers(client, rero_json_header, item_lib_martigny,
# return is affected by changed.

# STEP#1 : first items search serialization
list_url = url_for('invenio_records_rest.item_list')
item = item_lib_martigny
list_url = url_for('invenio_records_rest.item_list', q=item.pid)
response = client.get(list_url, headers=rero_json_header)
assert response.status_code == 200

Expand All @@ -461,8 +462,7 @@ def test_cached_serializers(client, rero_json_header, item_lib_martigny,
location['name'] = 'new location name'
location = location.update(location, dbcommit=True, reindex=True)
flush_index(LocationsSearch.Meta.index)
from rero_ils.modules.locations.api import Location
assert Location.get_record_by_pid(location.pid).get('name') == \
assert LocationsSearch().get_record_by_pid(location.pid)['name'] == \
location.get('name')

# STEP#3 : second items search serialization
Expand Down

0 comments on commit 583d514

Please sign in to comment.