+
+
+
+
The JSON API provides a RESTful JSON interface to SENAITE LIMS.
+
+
+
+
diff --git a/bootstrap.py b/bootstrap.py
new file mode 100644
index 0000000..a459921
--- /dev/null
+++ b/bootstrap.py
@@ -0,0 +1,210 @@
+##############################################################################
+#
+# Copyright (c) 2006 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Bootstrap a buildout-based project
+
+Simply run this script in a directory containing a buildout.cfg.
+The script accepts buildout command-line options, so you can
+use the -c option to specify an alternate configuration file.
+"""
+
+import os
+import shutil
+import sys
+import tempfile
+
+from optparse import OptionParser
+
+__version__ = '2015-07-01'
+# See zc.buildout's changelog if this version is up to date.
+
+tmpeggs = tempfile.mkdtemp(prefix='bootstrap-')
+
+usage = '''\
+[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
+
+Bootstraps a buildout-based project.
+
+Simply run this script in a directory containing a buildout.cfg, using the
+Python that you want bin/buildout to use.
+
+Note that by using --find-links to point to local resources, you can keep
+this script from going over the network.
+'''
+
+parser = OptionParser(usage=usage)
+parser.add_option("--version",
+ action="store_true", default=False,
+ help=("Return bootstrap.py version."))
+parser.add_option("-t", "--accept-buildout-test-releases",
+ dest='accept_buildout_test_releases',
+ action="store_true", default=False,
+ help=("Normally, if you do not specify a --version, the "
+ "bootstrap script and buildout gets the newest "
+ "*final* versions of zc.buildout and its recipes and "
+ "extensions for you. If you use this flag, "
+ "bootstrap and buildout will get the newest releases "
+ "even if they are alphas or betas."))
+parser.add_option("-c", "--config-file",
+ help=("Specify the path to the buildout configuration "
+ "file to be used."))
+parser.add_option("-f", "--find-links",
+ help=("Specify a URL to search for buildout releases"))
+parser.add_option("--allow-site-packages",
+ action="store_true", default=False,
+ help=("Let bootstrap.py use existing site packages"))
+parser.add_option("--buildout-version",
+ help="Use a specific zc.buildout version")
+parser.add_option("--setuptools-version",
+ help="Use a specific setuptools version")
+parser.add_option("--setuptools-to-dir",
+ help=("Allow for re-use of existing directory of "
+ "setuptools versions"))
+
+options, args = parser.parse_args()
+if options.version:
+ print("bootstrap.py version %s" % __version__)
+ sys.exit(0)
+
+
+######################################################################
+# load/install setuptools
+
+try:
+ from urllib.request import urlopen
+except ImportError:
+ from urllib2 import urlopen
+
+ez = {}
+if os.path.exists('ez_setup.py'):
+ exec(open('ez_setup.py').read(), ez)
+else:
+ exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez)
+
+if not options.allow_site_packages:
+ # ez_setup imports site, which adds site packages
+ # this will remove them from the path to ensure that incompatible versions
+ # of setuptools are not in the path
+ import site
+ # inside a virtualenv, there is no 'getsitepackages'.
+ # We can't remove these reliably
+ if hasattr(site, 'getsitepackages'):
+ for sitepackage_path in site.getsitepackages():
+ # Strip all site-packages directories from sys.path that
+ # are not sys.prefix; this is because on Windows
+ # sys.prefix is a site-package directory.
+ if sitepackage_path != sys.prefix:
+ sys.path[:] = [x for x in sys.path
+ if sitepackage_path not in x]
+
+setup_args = dict(to_dir=tmpeggs, download_delay=0)
+
+if options.setuptools_version is not None:
+ setup_args['version'] = options.setuptools_version
+if options.setuptools_to_dir is not None:
+ setup_args['to_dir'] = options.setuptools_to_dir
+
+ez['use_setuptools'](**setup_args)
+import setuptools
+import pkg_resources
+
+# This does not (always?) update the default working set. We will
+# do it.
+for path in sys.path:
+ if path not in pkg_resources.working_set.entries:
+ pkg_resources.working_set.add_entry(path)
+
+######################################################################
+# Install buildout
+
+ws = pkg_resources.working_set
+
+setuptools_path = ws.find(
+ pkg_resources.Requirement.parse('setuptools')).location
+
+# Fix sys.path here as easy_install.pth added before PYTHONPATH
+cmd = [sys.executable, '-c',
+ 'import sys; sys.path[0:0] = [%r]; ' % setuptools_path +
+ 'from setuptools.command.easy_install import main; main()',
+ '-mZqNxd', tmpeggs]
+
+find_links = os.environ.get(
+ 'bootstrap-testing-find-links',
+ options.find_links or
+ ('http://downloads.buildout.org/'
+ if options.accept_buildout_test_releases else None)
+ )
+if find_links:
+ cmd.extend(['-f', find_links])
+
+requirement = 'zc.buildout'
+version = options.buildout_version
+if version is None and not options.accept_buildout_test_releases:
+ # Figure out the most recent final version of zc.buildout.
+ import setuptools.package_index
+ _final_parts = '*final-', '*final'
+
+ def _final_version(parsed_version):
+ try:
+ return not parsed_version.is_prerelease
+ except AttributeError:
+ # Older setuptools
+ for part in parsed_version:
+ if (part[:1] == '*') and (part not in _final_parts):
+ return False
+ return True
+
+ index = setuptools.package_index.PackageIndex(
+ search_path=[setuptools_path])
+ if find_links:
+ index.add_find_links((find_links,))
+ req = pkg_resources.Requirement.parse(requirement)
+ if index.obtain(req) is not None:
+ best = []
+ bestv = None
+ for dist in index[req.project_name]:
+ distv = dist.parsed_version
+ if _final_version(distv):
+ if bestv is None or distv > bestv:
+ best = [dist]
+ bestv = distv
+ elif distv == bestv:
+ best.append(dist)
+ if best:
+ best.sort()
+ version = best[-1].version
+if version:
+ requirement = '=='.join((requirement, version))
+cmd.append(requirement)
+
+import subprocess
+if subprocess.call(cmd) != 0:
+ raise Exception(
+ "Failed to execute command:\n%s" % repr(cmd)[1:-1])
+
+######################################################################
+# Import and run buildout
+
+ws.add_entry(tmpeggs)
+ws.require(requirement)
+import zc.buildout.buildout
+
+if not [a for a in args if '=' not in a]:
+ args.append('bootstrap')
+
+# if -c was provided, we push it back into args for buildout' main function
+if options.config_file is not None:
+ args[0:0] = ['-c', options.config_file]
+
+zc.buildout.buildout.main(args)
+shutil.rmtree(tmpeggs)
diff --git a/buildout.cfg b/buildout.cfg
new file mode 100644
index 0000000..eefae2a
--- /dev/null
+++ b/buildout.cfg
@@ -0,0 +1,137 @@
+[buildout]
+parts =
+ instance
+ test
+ robot
+ code-analysis
+ ipzope
+ omelette
+
+extends =
+ http://dist.plone.org/release/4.3.15/versions.cfg
+
+find-links =
+ http://dist.plone.org/release/4.3.15
+ http://dist.plone.org/thirdparty
+
+eggs =
+ Plone
+ Pillow
+ plone.reload
+ i18ndude
+ Products.PrintingMailHost
+
+zcml =
+
+versions = versions
+extensions = mr.developer
+
+eggs-directory = ${buildout:directory}/eggs
+download-cache = ~/.buildout/downloads
+
+sources = sources
+auto-checkout = *
+
+develop = .
+
+[sources]
+bika.lims = git https://github.com/bikalims/bika.lims.git branch=master
+senaite.api = git https://github.com/senaite/senaite.api.git branch=master
+
+[instance]
+recipe = plone.recipe.zope2instance
+user = admin:admin
+http-address = 0.0.0.0:8080
+eggs =
+ senaite.jsonapi
+ ${buildout:eggs}
+zcml =
+ senaite.jsonapi
+ ${buildout:zcml}
+
+[test]
+recipe = zc.recipe.testrunner
+eggs =
+ senaite.jsonapi [test]
+defaults = ['--auto-color', '--auto-progress']
+
+[code-analysis]
+recipe = plone.recipe.codeanalysis
+directory = ${buildout:directory}/src/senaite/jsonapi
+clean-lines = True
+clean-lines-exclude =
+ **/node_modules/*
+find-untranslated = False
+i18ndude-bin = ${buildout:bin-directory}/i18ndude
+utf8-header = True
+deprecated-aliases = True
+flake8-extensions =
+ flake8-blind-except
+ flake8-coding
+ flake8-debugger
+flake8-ignore = E241,E301,E501
+multiprocessing = True
+return-status-codes = False
+
+[robot]
+recipe = zc.recipe.egg
+eggs =
+ ${test:eggs}
+ plone.app.robotframework
+scripts =
+ robot-server
+ robot
+
+# Please download ipy_profile_zope to ~/.ipython/profile_zope/startup/ipy_profile_zope.py
+# https://raw.githubusercontent.com/collective/dotipython/master/ipy_profile_zope.py
+[ipzope]
+recipe = zc.recipe.egg
+eggs =
+ ipython<=4
+ ${instance:eggs}
+initialization =
+ import sys, os
+ os.environ["SOFTWARE_HOME"] = "${instance:location}"
+ os.environ["INSTANCE_HOME"] = "${instance:location}"
+ sys.argv[1:1] = "--profile zope".split()
+extra-paths =
+ ${instance:location}/lib/python
+scripts = ipython=ipzope
+
+[omelette]
+recipe = collective.recipe.omelette
+eggs = ${buildout:eggs}
+
+[i18ndude]
+unzip = true
+recipe = zc.recipe.egg
+eggs = i18ndude
+
+[update_gtags]
+recipe = collective.recipe.template
+output = ${buildout:directory}/bin/update_gtags
+input = ${buildout:directory}/templates/update_gtags.in
+mode = 755
+
+[update_translations]
+recipe = collective.recipe.template
+output = ${buildout:directory}/bin/update_translations
+input = ${buildout:directory}/templates/update_translations.in
+mode = 755
+
+[write_code_headers]
+recipe = collective.recipe.template
+output = ${buildout:directory}/bin/write_code_headers
+input = ${buildout:directory}/templates/write_code_headers.py.in
+mode = 755
+
+[sphinxbuilder]
+recipe = collective.recipe.sphinxbuilder
+interpreter = ${buildout:directory}/bin/zopepy
+build = ${buildout:directory}/docs/_build
+source = ${buildout:directory}/docs
+
+[versions]
+setuptools =
+zc.buildout =
+CairoSVG = 1.0.20
\ No newline at end of file
diff --git a/docs/Changelog.rst b/docs/Changelog.rst
new file mode 100644
index 0000000..a22d2b6
--- /dev/null
+++ b/docs/Changelog.rst
@@ -0,0 +1,4 @@
+1.0.0 (unreleased)
+------------------
+
+- initial package setup
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..3cf5636
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+
+from setuptools import setup, find_packages
+
+version = '1.0.0'
+
+
+setup(
+ name='senaite.jsonapi',
+ version=version,
+ description="SENAITE JSON API",
+ long_description=open("README.md").read() + "\n" +
+ open("docs/Changelog.rst").read() + "\n" +
+ "\n\n" +
+ "Authors and maintainers\n" +
+ "-----------------------\n" +
+ "- RIDING BYTES, http://ridingbytes.com",
+ # Get more strings from
+ # http://pypi.python.org/pypi?:action=list_classifiers
+ classifiers=[
+ "Programming Language :: Python",
+ "Framework :: Plone",
+ "Framework :: Zope2",
+ ],
+ keywords='',
+ author='SENAITE Foundation',
+ author_email='hello@senaite.com',
+ url='https://github.com/senaite/senaite.jsonapi',
+ license='GPLv3',
+ packages=find_packages('src', exclude=['ez_setup']),
+ package_dir={'': 'src'},
+ namespace_packages=['senaite'],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=[
+ 'setuptools',
+ 'plone.api',
+ 'senaite.api',
+ 'bika.lims>=3.2.1rc3',
+ ],
+ extras_require={
+ 'test': [
+ 'Products.PloneTestCase',
+ 'Products.SecureMailHost',
+ 'plone.app.robotframework',
+ 'plone.app.testing',
+ 'robotframework-debuglibrary',
+ 'robotframework-selenium2library',
+ 'robotsuite',
+ 'unittest2',
+ ]
+ },
+ entry_points="""
+ # -*- Entry points: -*-
+ [z3c.autoinclude.plugin]
+ target = plone
+ """,
+)
diff --git a/src/senaite/__init__.py b/src/senaite/__init__.py
new file mode 100644
index 0000000..7185b20
--- /dev/null
+++ b/src/senaite/__init__.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2017 SENAITE
+
+try:
+ __import__('pkg_resources').declare_namespace(__name__)
+except ImportError:
+ from pkgutil import extend_path
+ __path__ = extend_path(__path__, __name__)
diff --git a/src/senaite/docs/JSONAPIv1.rst b/src/senaite/docs/JSONAPIv1.rst
new file mode 100644
index 0000000..7666f9a
--- /dev/null
+++ b/src/senaite/docs/JSONAPIv1.rst
@@ -0,0 +1,63 @@
+SENAITE JSON API V1
+===================
+
+Running this test from the buildout directory::
+
+ bin/test test_doctests -t JSONAPIv1
+
+
+Test Setup
+----------
+
+Needed Imports::
+
+ >>> import transaction
+
+ >>> from plone.app.testing import TEST_USER_ID
+ >>> from plone.app.testing import TEST_USER_PASSWORD
+
+ >>> from senaite.lims import api
+
+Functional Helpers::
+
+ >>> def start_server():
+ ... from Testing.ZopeTestCase.utils import startZServer
+ ... ip, port = startZServer()
+ ... return "http://{}:{}/{}".format(ip, port, portal.id)
+
+ >>> def login(user=TEST_USER_ID, password=TEST_USER_PASSWORD):
+ ... browser.open(portal_url + "/login_form")
+ ... browser.getControl(name='__ac_name').value = user
+ ... browser.getControl(name='__ac_password').value = password
+ ... browser.getControl(name='submit').click()
+ ... assert("__ac_password" not in browser.contents)
+
+ >>> def logout():
+ ... browser.open(portal_url + "/logout")
+ ... assert("You are now logged out" in browser.contents)
+
+Variables::
+
+ >>> portal = self.getPortal()
+ >>> portal_url = portal.absolute_url()
+ >>> bika_setup = portal.bika_setup
+ >>> bika_setup_url = portal_url + "/bika_setup"
+ >>> browser = self.getBrowser()
+
+JSON API::
+
+ >>> api_base_url = portal_url + "/@@API/senaite/v1"
+
+
+Version
+=======
+
+Ensure we are logged out::
+
+ >>> logout()
+
+The version route should be visible to unauthenticated users::
+
+ >>> browser.open(api_base_url + "/version")
+ >>> browser.contents
+ '{"url": "http://nohost/plone/@@API/v2/version", "date": "...", "version": ..., "_runtime": ...}'
diff --git a/src/senaite/jsonapi/__init__.py b/src/senaite/jsonapi/__init__.py
new file mode 100644
index 0000000..34e1ed3
--- /dev/null
+++ b/src/senaite/jsonapi/__init__.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+from plone.jsonapi.core import router
+
+from senaite.lims import logger
+
+
+def add_route(route, endpoint=None, **kw):
+ """Add a new JSON API route
+ """
+ def wrapper(f):
+ try:
+ router.DefaultRouter.add_url_rule(route,
+ endpoint=endpoint,
+ view_func=f,
+ options=kw)
+ except AssertionError, e:
+ logger.warn("Failed to register route {}: {}".format(route, e))
+ return f
+ return wrapper
+
+
+def url_for(endpoint, default="senaite.lims.jsonapi.get", **values):
+ """Looks up the API URL for the given endpoint
+
+ :param endpoint: The name of the registered route (aka endpoint)
+ :type endpoint: string
+ :returns: External URL for this endpoint
+ :rtype: string/None
+ """
+
+ try:
+ return router.url_for(endpoint, force_external=True, values=values)
+ except Exception:
+ # XXX plone.jsonapi.core should catch the BuildError of Werkzeug and
+ # throw another error which can be handled here.
+ logger.debug("Could not build API URL for endpoint '%s'. "
+ "No route provider registered?" % endpoint)
+
+ # build generic API URL
+ return router.url_for(default, force_external=True, values=values)
diff --git a/src/senaite/jsonapi/api.py b/src/senaite/jsonapi/api.py
new file mode 100644
index 0000000..c817cd6
--- /dev/null
+++ b/src/senaite/jsonapi/api.py
@@ -0,0 +1,1401 @@
+# -*- coding: utf-8 -*-
+
+import json
+import datetime
+
+from DateTime import DateTime
+from AccessControl import Unauthorized
+from Products.CMFPlone.PloneBatch import Batch
+from Products.ZCatalog.Lazy import LazyMap
+from Acquisition import ImplicitAcquisitionWrapper
+
+from zope.schema import getFields
+
+from plone import api as ploneapi
+from plone.jsonapi.core import router
+from plone.behavior.interfaces import IBehaviorAssignable
+
+from senaite.lims import api
+from senaite.lims import logger
+from senaite.lims.jsonapi import config
+from senaite.lims.jsonapi import request as req
+from senaite.lims.jsonapi import underscore as u
+from senaite.lims.jsonapi.interfaces import IInfo
+from senaite.lims.jsonapi.interfaces import IBatch
+from senaite.lims.jsonapi.interfaces import ICatalog
+from senaite.lims.jsonapi.exceptions import APIError
+from senaite.lims.jsonapi.interfaces import IDataManager
+from senaite.lims.jsonapi.interfaces import IFieldManager
+from senaite.lims.jsonapi.interfaces import ICatalogQuery
+from bika.lims.utils.analysisrequest import create_analysisrequest as create_ar
+
+_marker = object()
+
+DEFAULT_ENDPOINT = "senaite.lims.jsonapi.v1.get"
+
+
+# -----------------------------------------------------------------------------
+# JSON API (CRUD) Functions (called by the route providers)
+# -----------------------------------------------------------------------------
+
+# GET RECORD
+def get_record(uid=None):
+ """Get a single record
+ """
+ obj = None
+ if uid is not None:
+ obj = get_object_by_uid(uid)
+ else:
+ obj = get_object_by_request()
+ if obj is None:
+ fail(404, "No object found")
+ complete = req.get_complete(default=_marker)
+ if complete is _marker:
+ complete = True
+ items = make_items_for([obj], complete=complete)
+ return u.first(items)
+
+
+# GET BATCHED
+def get_batched(portal_type=None, uid=None, endpoint=None, **kw):
+ """Get batched results
+ """
+
+ # fetch the catalog results
+ results = get_search_results(portal_type=portal_type, uid=uid, **kw)
+
+ # fetch the batch params from the request
+ size = req.get_batch_size()
+ start = req.get_batch_start()
+
+ # check for existing complete flag
+ complete = req.get_complete(default=_marker)
+ if complete is _marker:
+ # if the uid is given, get the complete information set
+ complete = uid and True or False
+
+ # return a batched record
+ return get_batch(results, size, start, endpoint=endpoint,
+ complete=complete)
+
+
+# CREATE
+def create_items(portal_type=None, uid=None, endpoint=None, **kw):
+ """ create items
+
+ 1. If the uid is given, get the object and create the content in there
+ (assumed that it is folderish)
+ 2. If the uid is 0, the target folder is assumed the portal.
+ 3. If there is no uid given, the payload is checked for either a key
+ - `parent_uid` specifies the *uid* of the target folder
+ - `parent_path` specifies the *physical path* of the target folder
+ """
+
+ # disable CSRF
+ req.disable_csrf_protection()
+
+ # destination where to create the content
+ container = uid and get_object_by_uid(uid) or None
+
+ # extract the data from the request
+ records = req.get_request_data()
+
+ results = []
+ for record in records:
+
+ # get the portal_type
+ if portal_type is None:
+ # try to fetch the portal type out of the request data
+ portal_type = record.pop("portal_type", None)
+
+ # check if it is allowed to create the portal_type
+ if not is_creation_allowed(portal_type):
+ fail(401, "Creation of '{}' is not allowed".format(portal_type))
+
+ if container is None:
+ # find the container for content creation
+ container = find_target_container(portal_type, record)
+
+ # Check if we have a container and a portal_type
+ if not all([container, portal_type]):
+ fail(400, "Please provide a container path/uid and portal_type")
+
+ # create the object and pass in the record data
+ obj = create_object(container, portal_type, **record)
+ results.append(obj)
+
+ if not results:
+ fail(400, "No Objects could be created")
+
+ return make_items_for(results, endpoint=endpoint)
+
+
+# UPDATE
+def update_items(portal_type=None, uid=None, endpoint=None, **kw):
+ """ update items
+
+ 1. If the uid is given, the user wants to update the object with the data
+ given in request body
+ 2. If no uid is given, the user wants to update a bunch of objects.
+ -> each record contains either an UID, path or parent_path + id
+ """
+
+ # disable CSRF
+ req.disable_csrf_protection()
+
+ # the data to update
+ records = req.get_request_data()
+
+ # we have an uid -> try to get an object for it
+ obj = get_object_by_uid(uid)
+ if obj:
+ record = records[0] # ignore other records if we got an uid
+ obj = update_object_with_data(obj, record)
+ return make_items_for([obj], endpoint=endpoint)
+
+ # no uid -> go through the record items
+ results = []
+ for record in records:
+ obj = get_object_by_record(record)
+
+ # no object found for this record
+ if obj is None:
+ continue
+
+ # update the object with the given record data
+ obj = update_object_with_data(obj, record)
+ results.append(obj)
+
+ if not results:
+ fail(400, "No Objects could be updated")
+
+ return make_items_for(results, endpoint=endpoint)
+
+
+# DELETE
+def delete_items(portal_type=None, uid=None, endpoint=None, **kw):
+ """ delete items
+
+ 1. If the uid is given, we can ignore the request body and delete the
+ object with the given uid (if the uid was valid).
+ 2. If no uid is given, the user wants to delete more than one item.
+ => go through each item and extract the uid. Delete it afterwards.
+ // we should do this kind of transaction base. So if we can not get an
+ // object for an uid, no item will be deleted.
+ 3. we could check if the portal_type matches, just to be sure the user
+ wants to delete the right content.
+ """
+
+ # disable CSRF
+ req.disable_csrf_protection()
+
+ # try to find the requested objects
+ objects = find_objects(uid=uid)
+
+ # We don't want to delete the portal object
+ if filter(lambda o: is_root(o), objects):
+ fail(400, "Can not delete the portal object")
+
+ results = []
+ for obj in objects:
+ # We deactivate only!
+ deactivate_object(obj)
+ info = IInfo(obj)()
+ results.append(info)
+
+ if not results:
+ fail(404, "No Objects could be found")
+
+ return results
+
+
+def make_items_for(brains_or_objects, endpoint=None, complete=False):
+ """Generate API compatible data items for the given list of brains/objects
+
+ :param brains_or_objects: List of objects or brains
+ :type brains_or_objects: list/Products.ZCatalog.Lazy.LazyMap
+ :param endpoint: The named URL endpoint for the root of the items
+ :type endpoint: str/unicode
+ :param complete: Flag to wake up the object and fetch all data
+ :type complete: bool
+ :returns: A list of extracted data items
+ :rtype: list
+ """
+
+ # check if the user wants to include children
+ include_children = req.get_children(False)
+
+ def extract_data(brain_or_object):
+ info = get_info(brain_or_object, endpoint=endpoint, complete=complete)
+ if include_children and is_folderish(brain_or_object):
+ info.update(get_children_info(brain_or_object, complete=complete))
+ return info
+
+ return map(extract_data, brains_or_objects)
+
+
+# -----------------------------------------------------------------------------
+# Info Functions (JSON compatible data representation)
+# -----------------------------------------------------------------------------
+
+def get_info(brain_or_object, endpoint=None, complete=False):
+ """Extract the data from the catalog brain or object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :param endpoint: The named URL endpoint for the root of the items
+ :type endpoint: str/unicode
+ :param complete: Flag to wake up the object and fetch all data
+ :type complete: bool
+ :returns: Data mapping for the object/catalog brain
+ :rtype: dict
+ """
+
+ # extract the data from the initial object with the proper adapter
+ info = IInfo(brain_or_object).to_dict()
+
+ # update with url info (always included)
+ url_info = get_url_info(brain_or_object, endpoint)
+ info.update(url_info)
+
+ # include the parent url info
+ parent = get_parent_info(brain_or_object)
+ info.update(parent)
+
+ # add the complete data of the object if requested
+ # -> requires to wake up the object if it is a catalog brain
+ if complete:
+ # ensure we have a full content object
+ obj = api.get_object(brain_or_object)
+ # get the compatible adapter
+ adapter = IInfo(obj)
+ # update the data set with the complete information
+ info.update(adapter.to_dict())
+
+ # update the data set with the workflow information
+ # -> only possible if `?complete=yes&workflow=yes`
+ if req.get_workflow(False):
+ info.update(get_workflow_info(obj))
+
+ # # add sharing data if the user requested it
+ # # -> only possible if `?complete=yes`
+ # if req.get_sharing(False):
+ # sharing = get_sharing_info(obj)
+ # info.update({"sharing": sharing})
+
+ return info
+
+
+def get_url_info(brain_or_object, endpoint=None):
+ """Generate url information for the content object/catalog brain
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :param endpoint: The named URL endpoint for the root of the items
+ :type endpoint: str/unicode
+ :returns: URL information mapping
+ :rtype: dict
+ """
+
+ # If no endpoint was given, guess the endpoint by portal type
+ if endpoint is None:
+ endpoint = get_endpoint(brain_or_object)
+
+ uid = get_uid(brain_or_object)
+ portal_type = get_portal_type(brain_or_object)
+ resource = portal_type_to_resource(portal_type)
+
+ return {
+ "uid": uid,
+ "url": get_url(brain_or_object),
+ "api_url": url_for(endpoint, resource=resource, uid=uid),
+ }
+
+
+def get_parent_info(brain_or_object, endpoint=None):
+ """Generate url information for the parent object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :param endpoint: The named URL endpoint for the root of the items
+ :type endpoint: str/unicode
+ :returns: URL information mapping
+ :rtype: dict
+ """
+
+ # special case for the portal object
+ if is_root(brain_or_object):
+ return {}
+
+ # get the parent object
+ parent = get_parent(brain_or_object)
+ portal_type = get_portal_type(parent)
+ resource = portal_type_to_resource(portal_type)
+
+ # fall back if no endpoint specified
+ if endpoint is None:
+ endpoint = get_endpoint(parent)
+
+ return {
+ "parent_id": get_id(parent),
+ "parent_uid": get_uid(parent),
+ "parent_url": url_for(endpoint, resource=resource, uid=get_uid(parent))
+ }
+
+
+def get_children_info(brain_or_object, complete=False):
+ """Generate data items of the contained contents
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :param complete: Flag to wake up the object and fetch all data
+ :type complete: bool
+ :returns: info mapping of contained content items
+ :rtype: list
+ """
+
+ # fetch the contents (if folderish)
+ children = get_contents(brain_or_object)
+
+ def extract_data(brain_or_object):
+ return get_info(brain_or_object, complete=complete)
+ items = map(extract_data, children)
+
+ return {
+ "children_count": len(items),
+ "children": items
+ }
+
+
+def get_file_info(obj, fieldname, default=None):
+ """Extract file data from a file field
+
+ :param obj: Content object
+ :type obj: ATContentType/DexterityContentType
+ :param fieldname: Schema name of the field
+ :type fieldname: str/unicode
+ :returns: File data mapping
+ :rtype: dict
+ """
+
+ # extract the file field from the object if omitted
+ field = get_field(obj, fieldname)
+
+ # get the value with the fieldmanager
+ fm = IFieldManager(field)
+
+ # return None if we have no file data
+ if fm.get_size(obj) == 0:
+ return None
+
+ out = {
+ "content_type": fm.get_content_type(obj),
+ "filename": fm.get_filename(obj),
+ "download": fm.get_download_url(obj),
+ }
+
+ # only return file data only if requested (?filedata=yes)
+ if req.get_filedata(False):
+ data = fm.get_data(obj)
+ out["data"] = data.encode("base64")
+
+ return out
+
+
+def get_workflow_info(brain_or_object, endpoint=None):
+ """Generate workflow information of the assigned workflows
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :param endpoint: The named URL endpoint for the root of the items
+ :type endpoint: str/unicode
+ :returns: Workflows info
+ :rtype: dict
+ """
+
+ # ensure we have a full content object
+ obj = get_object(brain_or_object)
+
+ # get the portal workflow tool
+ wf_tool = get_tool("portal_workflow")
+
+ # the assigned workflows of this object
+ workflows = wf_tool.getWorkflowsFor(obj)
+
+ # no worfkflows assigned -> return
+ if not workflows:
+ return []
+
+ def to_transition_info(transition):
+ """ return the transition information
+ """
+ return {
+ "title": transition["title"],
+ "value": transition["id"],
+ "display": transition["description"],
+ "url": transition["url"],
+ }
+
+ out = []
+
+ for workflow in workflows:
+
+ # get the status info of the current state (dictionary)
+ info = wf_tool.getStatusOf(workflow.getId(), obj)
+
+ # get the current review_status
+ review_state = info.get("review_state", None)
+ inactive_state = info.get("inactive_state", None)
+ cancellation_state = info.get("cancellation_state", None)
+ worksheetanalysis_review_state = info.get("worksheetanalysis_review_state", None)
+
+ state = review_state or \
+ inactive_state or \
+ cancellation_state or \
+ worksheetanalysis_review_state
+
+ if state is None:
+ logger.warn("No state variable found for {} -> {}".format(
+ repr(obj), info))
+ continue
+
+ # get the wf status object
+ status_info = workflow.states[state]
+
+ # get the title of the current status
+ status = status_info.title
+
+ # get the transition informations
+ transitions = map(to_transition_info, wf_tool.getTransitionsFor(obj))
+
+ out.append({
+ "workflow": workflow.getId(),
+ "status": status,
+ "review_state": state,
+ "transitions": transitions,
+ })
+
+ return {"workflow_info": out}
+
+
+# -----------------------------------------------------------------------------
+# API
+# -----------------------------------------------------------------------------
+
+def fail(status, msg):
+ """API Error
+ """
+ if msg is None:
+ msg = "Reason not given."
+ raise APIError(status, "{}".format(msg))
+
+
+def search(**kw):
+ """Search the catalog adapter
+
+ :returns: Catalog search results
+ :rtype: iterable
+ """
+ portal = get_portal()
+ catalog = ICatalog(portal)
+ catalog_query = ICatalogQuery(catalog)
+ query = catalog_query.make_query(**kw)
+ return catalog(query)
+
+
+def get_search_results(portal_type=None, uid=None, **kw):
+ """Search the catalog and return the results
+
+ :returns: Catalog search results
+ :rtype: iterable
+ """
+
+ # If we have an UID, return the object immediately
+ if uid is not None:
+ logger.info("UID '%s' found, returning the object immediately" % uid)
+ return u.to_list(get_object_by_uid(uid))
+
+ # allow to search search for the Plone Site with portal_type
+ include_portal = False
+ if u.to_string(portal_type) == "Plone Site":
+ include_portal = True
+
+ # The request may contain a list of portal_types, e.g.
+ # `?portal_type=Document&portal_type=Plone Site`
+ if "Plone Site" in u.to_list(req.get("portal_type")):
+ include_portal = True
+
+ # Build and execute a catalog query
+ results = search(portal_type=portal_type, uid=uid, **kw)
+
+ if include_portal:
+ results = list(results) + u.to_list(get_portal())
+
+ return results
+
+
+def get_portal():
+ """Proxy to senaite.lims.api.get_portal
+ """
+ return api.get_portal()
+
+
+def get_tool(name, default=_marker):
+ """Proxy to senaite.lims.api.get_tool
+ """
+ return api.get_tool(name, default)
+
+
+def get_object(brain_or_object):
+ """Proxy to senaite.lims.api.get_object
+ """
+ return api.get_object(brain_or_object)
+
+
+def is_brain(brain_or_object):
+ """Proxy to senaite.lims.api.is_brain
+ """
+ return api.is_brain(brain_or_object)
+
+
+def is_at_content(brain_or_object):
+ """Proxy to senaite.lims.api.is_at_content
+ """
+ return api.is_at_content(brain_or_object)
+
+
+def is_dexterity_content(brain_or_object):
+ """Proxy to senaite.lims.api.is_dexterity_content
+ """
+ return api.is_dexterity_content(brain_or_object)
+
+
+def get_schema(brain_or_object):
+ """Get the schema of the content
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :returns: Schema object
+ """
+ obj = get_object(brain_or_object)
+ if is_root(obj):
+ return None
+ if is_dexterity_content(obj):
+ pt = get_tool("portal_types")
+ fti = pt.getTypeInfo(obj.portal_type)
+ return fti.lookupSchema()
+ if is_at_content(obj):
+ return obj.Schema()
+ fail(400, "{} has no Schema.".format(repr(brain_or_object)))
+
+
+def get_fields(brain_or_object):
+ """Get the list of fields from the object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :returns: List of fields
+ :rtype: list
+ """
+ obj = get_object(brain_or_object)
+ # The portal object has no schema
+ if is_root(obj):
+ return {}
+ schema = get_schema(obj)
+ if is_dexterity_content(obj):
+ names = schema.names()
+ fields = map(lambda name: schema.get(name), names)
+ schema_fields = dict(zip(names, fields))
+ # update with behavior fields
+ schema_fields.update(get_behaviors(obj))
+ return schema_fields
+ return dict(zip(schema.keys(), schema.fields()))
+
+
+def get_field(brain_or_object, name, default=None):
+ """Return the named field
+ """
+ fields = get_fields(brain_or_object)
+ return fields.get(name, default)
+
+
+def get_behaviors(brain_or_object):
+ """Iterate over all behaviors that are assigned to the object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :returns: Behaviors
+ :rtype: list
+ """
+ obj = get_object(brain_or_object)
+ if not is_dexterity_content(obj):
+ fail(400, "Only Dexterity contents can have assigned behaviors")
+ assignable = IBehaviorAssignable(obj, None)
+ if not assignable:
+ return {}
+ out = {}
+ for behavior in assignable.enumerateBehaviors():
+ for name, field in getFields(behavior.interface).items():
+ out[name] = field
+ return out
+
+
+def is_root(brain_or_object):
+ """Proxy to senaite.lims.api.is_portal
+ """
+ return api.is_portal(brain_or_object)
+
+
+def is_folderish(brain_or_object):
+ """Proxy to senaite.lims.api.is_folderish
+ """
+ return api.is_folderish(brain_or_object)
+
+
+def is_uid(uid):
+ """Checks if the passed in uid is a valid UID
+
+ :param uid: The uid to check
+ :type uid: string
+ :return: True if the uid is a valid 32 alphanumeric uid or '0'
+ :rtype: bool
+ """
+ if not isinstance(uid, basestring):
+ return False
+ if uid != "0" and len(uid) != 32:
+ return False
+ return True
+
+
+def is_path(path):
+ """Checks if the passed in path is a valid Path within the portal
+
+ :param path: The path to check
+ :type uid: string
+ :return: True if the path is a valid path within the portal
+ :rtype: bool
+ """
+ if not isinstance(path, basestring):
+ return False
+ portal_path = get_path(get_portal())
+ if not path.startswith(portal_path):
+ return False
+ obj = get_object_by_path(path)
+ if obj is None:
+ return False
+ return True
+
+
+def is_json_serializable(thing):
+ """Checks if the given thing can be serialized to JSON
+
+ :param thing: The object to check if it can be serialized
+ :type thing: arbitrary object
+ :returns: True if it can be JSON serialized
+ :rtype: bool
+ """
+ try:
+ json.dumps(thing)
+ return True
+ except TypeError:
+ return False
+
+
+def to_json_value(obj, fieldname, value=_marker, default=None):
+ """JSON save value encoding
+
+ :param obj: Content object
+ :type obj: ATContentType/DexterityContentType
+ :param fieldname: Schema name of the field
+ :type fieldname: str/unicode
+ :param value: The field value
+ :type value: depends on the field type
+ :returns: JSON encoded field value
+ :rtype: field dependent
+ """
+
+ # This function bridges the value of the field to a probably more complex
+ # JSON structure to return to the client.
+
+ # extract the value from the object if omitted
+ if value is _marker:
+ value = IDataManager(obj).json_data(fieldname)
+
+ # convert objects
+ if isinstance(value, ImplicitAcquisitionWrapper):
+ return get_url_info(value)
+
+ # convert dates
+ if is_date(value):
+ return to_iso_date(value)
+
+ # check if the value is callable
+ if callable(value):
+ value = value()
+
+ # check if the value is JSON serializable
+ if not is_json_serializable(value):
+ logger.warn("Output {} is not JSON serializable".format(repr(value)))
+ return default
+
+ return value
+
+
+def is_date(thing):
+ """Checks if the given thing represents a date
+
+ :param thing: The object to check if it is a date
+ :type thing: arbitrary object
+ :returns: True if we have a date object
+ :rtype: bool
+ """
+ # known date types
+ date_types = (datetime.datetime,
+ datetime.date,
+ DateTime)
+ return isinstance(thing, date_types)
+
+
+def is_lazy_map(thing):
+ """Checks if the passed in thing is a LazyMap
+
+ :param thing: The thing to test
+ :type thing: any
+ :returns: True if the thing is a richtext value
+ :rtype: bool
+ """
+ return isinstance(thing, LazyMap)
+
+
+def to_iso_date(date, default=None):
+ """ISO representation for the date object
+
+ :param date: A date object
+ :type field: datetime/DateTime
+ :returns: The ISO format of the date
+ :rtype: str
+ """
+
+ # not a date
+ if not is_date(date):
+ return default
+
+ # handle Zope DateTime objects
+ if isinstance(date, (DateTime)):
+ return date.ISO8601()
+
+ # handle python datetime objects
+ return date.isoformat()
+
+
+def get_contents(brain_or_object, depth=1):
+ """Lookup folder contents for this object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :returns: List of contained contents
+ :rtype: list/Products.ZCatalog.Lazy.LazyMap
+ """
+
+ # Nothing to do if the object is contentish
+ if not is_folderish(brain_or_object):
+ return []
+
+ query = {
+ "path": {
+ "query": get_path(brain_or_object),
+ "depth": depth,
+ }
+ }
+
+ return search(query=query)
+
+
+def get_parent(brain_or_object):
+ """Locate the parent object of the content/catalog brain
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :returns: parent object
+ :rtype: Parent content
+ """
+
+ if is_root(brain_or_object):
+ return get_portal()
+
+ if is_brain(brain_or_object):
+ parent_path = get_parent_path(brain_or_object)
+ return get_object_by_path(parent_path)
+
+ return brain_or_object.aq_parent
+
+
+def get_object_by_uid(uid, default=None):
+ """Proxy to senaite.lims.api.get_object_by_uid
+ """
+ return api.get_object_by_uid(uid, default)
+
+
+def get_path(brain_or_object):
+ """Proxy to senaite.lims.api.get_path
+ """
+ return api.get_path(brain_or_object)
+
+
+def get_parent_path(brain_or_object):
+ """Proxy to senaite.lims.api.get_parent_path
+ """
+ return api.get_parent_path(brain_or_object)
+
+
+def get_id(brain_or_object):
+ """Proxy to senaite.lims.api.get_id
+ """
+ return api.get_id(brain_or_object)
+
+
+def get_uid(brain_or_object):
+ """Proxy to senaite.lims.api.get_uid
+ """
+ return api.get_uid(brain_or_object)
+
+
+def get_url(brain_or_object):
+ """Proxy to senaite.lims.api.get_url
+ """
+ return api.get_url(brain_or_object)
+
+
+def get_portal_type(brain_or_object):
+ """Proxy to senaite.lims.api.get_portal_type
+ """
+ return api.get_portal_type(brain_or_object)
+
+
+def do_transition_for(brain_or_object, transition):
+ """Proxy to senaite.lims.api.do_transition_for
+ """
+ return api.do_transition_for(brain_or_object, transition)
+
+
+def get_portal_types():
+ """Get a list of all portal types
+
+ :retruns: List of portal type names
+ :rtype: list
+ """
+ types_tool = get_tool("portal_types")
+ return types_tool.listContentTypes()
+
+
+def get_resource_mapping():
+ """Map resources used in the routes to portal types
+
+ :returns: Mapping of resource->portal_type
+ :rtype: dict
+ """
+ portal_types = get_portal_types()
+ resources = map(portal_type_to_resource, portal_types)
+ return dict(zip(resources, portal_types))
+
+
+def portal_type_to_resource(portal_type):
+ """Converts a portal type name to a resource name
+
+ :param portal_type: Portal type name
+ :type name: string
+ :returns: Resource name as it is used in the content route
+ :rtype: string
+ """
+ resource = portal_type.lower()
+ resource = resource.replace(" ", "")
+ return resource
+
+
+def resource_to_portal_type(resource):
+ """Converts a resource to a portal type
+
+ :param resource: Resource name as it is used in the content route
+ :type name: string
+ :returns: Portal type name
+ :rtype: string
+ """
+ if resource is None:
+ return None
+
+ resource_mapping = get_resource_mapping()
+ portal_type = resource_mapping.get(resource.lower())
+
+ if portal_type is None:
+ logger.warn("Could not map the resource '{}' "
+ "to any known portal type".format(resource))
+
+ return portal_type
+
+
+def get_container_for(portal_type):
+ """Returns the single holding container object of this content type
+
+ :param portal_type: The portal type requested
+ :type portal_type: string
+ :returns: Folderish container where the portal type can be created
+ :rtype: AT content object
+ """
+ container_paths = config.CONTAINER_PATHS_FOR_PORTAL_TYPES
+ container_path = container_paths.get(portal_type)
+
+ if container_path is None:
+ return None
+
+ portal_path = get_path(get_portal())
+ return get_object_by_path("/".join([portal_path, container_path]))
+
+
+def is_creation_allowed(portal_type):
+ """Checks if it is allowed to create the portal type
+
+ :param portal_type: The portal type requested
+ :type portal_type: string
+ :returns: True if it is allowed to create this object
+ :rtype: bool
+ """
+ allowed_portal_types = config.ALLOWED_PORTAL_TYPES_TO_CREATE
+ return portal_type in allowed_portal_types
+
+
+def url_for(endpoint, default=DEFAULT_ENDPOINT, **values):
+ """Looks up the API URL for the given endpoint
+
+ :param endpoint: The name of the registered route (aka endpoint)
+ :type endpoint: string
+ :returns: External URL for this endpoint
+ :rtype: string/None
+ """
+
+ try:
+ return router.url_for(endpoint, force_external=True, values=values)
+ except Exception:
+ logger.warn("Could not build API URL for endpoint '%s'. "
+ "No route provider registered?" % endpoint)
+ # build generic API URL
+ return router.url_for(default, force_external=True, values=values)
+
+
+def get_endpoint(brain_or_object, default=DEFAULT_ENDPOINT):
+ """Calculate the endpoint for this object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :returns: Endpoint for this object
+ :rtype: string
+ """
+ portal_type = get_portal_type(brain_or_object)
+ resource = portal_type_to_resource(portal_type)
+
+ # Try to get the right namespaced endpoint
+ endpoints = router.DefaultRouter.view_functions.keys()
+ if resource in endpoints:
+ return resource # exact match
+ endpoint_candidates = filter(lambda e: e.endswith(resource), endpoints)
+ if len(endpoint_candidates) == 1:
+ # only return the namespaced endpoint, if we have an exact match
+ return endpoint_candidates[0]
+
+ return default
+
+
+def get_catalog():
+ """Get catalog adapter
+
+ :returns: ICatalog adapter for the Portal
+ :rtype: CatalogTool
+ """
+ portal = get_portal()
+ return ICatalog(portal)
+
+
+def get_object_by_request():
+ """Find an object by request parameters
+
+ Inspects request parameters to locate an object
+
+ :returns: Found Object or None
+ :rtype: object
+ """
+ data = req.get_form() or req.get_query_string()
+ return get_object_by_record(data)
+
+
+def get_object_by_record(record):
+ """Find an object by a given record
+
+ Inspects request the record to locate an object
+
+ :param record: A dictionary representation of an object
+ :type record: dict
+ :returns: Found Object or None
+ :rtype: object
+ """
+
+ # nothing to do here
+ if not record:
+ return None
+
+ if record.get("uid"):
+ return get_object_by_uid(record["uid"])
+ if record.get("path"):
+ return get_object_by_path(record["path"])
+ if record.get("parent_path") and record.get("id"):
+ path = "/".join([record["parent_path"], record["id"]])
+ return get_object_by_path(path)
+
+ logger.warn("get_object_by_record::No object found! record='%r'" % record)
+ return None
+
+
+def get_object_by_path(path):
+ """Find an object by a given physical path
+
+ :param path: The physical path of the object to find
+ :type path: string
+ :returns: Found Object or None
+ :rtype: object
+ """
+
+ # nothing to do here
+ if not path:
+ return None
+
+ portal = get_portal()
+ portal_path = get_path(portal)
+
+ if path == portal_path:
+ return portal
+
+ if path.startswith(portal_path):
+ segments = path.split("/")
+ path = "/".join(segments[2:])
+
+ try:
+ return portal.restrictedTraverse(str(path))
+ except (KeyError, AttributeError):
+ fail(404, "No object could be found at {}".format(str(path)))
+
+
+def is_anonymous():
+ """Check if the current user is authenticated or not
+
+ :returns: True if the current user is authenticated
+ :rtype: bool
+ """
+ return ploneapi.user.is_anonymous()
+
+
+def get_current_user():
+ """Get the current logged in user
+
+ :returns: Member
+ :rtype: object
+ """
+ return ploneapi.user.get_current()
+
+
+def get_member_ids():
+ """Return all member ids of the portal.
+ """
+ pm = get_tool("portal_membership")
+ member_ids = pm.listMemberIds()
+ # How can it be possible to get member ids with None?
+ return filter(lambda x: x, member_ids)
+
+
+def get_user(user_or_username=None):
+ """Return Plone User
+
+ :param user_or_username: Plone user or user id
+ :type groupname: PloneUser/MemberData/str
+ :returns: Plone MemberData
+ :rtype: object
+ """
+ if user_or_username is None:
+ return None
+ if hasattr(user_or_username, "getUserId"):
+ return ploneapi.user.get(user_or_username.getUserId())
+ return ploneapi.user.get(userid=u.to_string(user_or_username))
+
+
+def get_user_properties(user_or_username):
+ """Return User Properties
+
+ :param user_or_username: Plone group identifier
+ :type groupname: PloneUser/MemberData/str
+ :returns: Plone MemberData
+ :rtype: object
+ """
+ user = get_user(user_or_username)
+ if user is None:
+ return {}
+ if not callable(user.getUser):
+ return {}
+ out = {}
+ plone_user = user.getUser()
+ for sheet in plone_user.listPropertysheets():
+ ps = plone_user.getPropertysheet(sheet)
+ out.update(dict(ps.propertyItems()))
+ return out
+
+
+def find_objects(uid=None):
+ """Find the object by its UID
+
+ 1. get the object from the given uid
+ 2. fetch objects specified in the request parameters
+ 3. fetch objects located in the request body
+
+ :param uid: The UID of the object to find
+ :type uid: string
+ :returns: List of found objects
+ :rtype: list
+ """
+ # The objects to cut
+ objects = []
+
+ # get the object by the given uid or try to find it by the request
+ # parameters
+ obj = get_object_by_uid(uid) or get_object_by_request()
+
+ if obj:
+ objects.append(obj)
+ else:
+ # no uid -> go through the record items
+ records = req.get_request_data()
+ for record in records:
+ # try to get the object by the given record
+ obj = get_object_by_record(record)
+
+ # no object found for this record
+ if obj is None:
+ continue
+ objects.append(obj)
+
+ return objects
+
+
+def find_target_container(portal_type, record):
+ """Locates a target container for the given portal_type and record
+
+ :param record: The dictionary representation of a content object
+ :type record: dict
+ :returns: folder which contains the object
+ :rtype: object
+ """
+ portal_type = portal_type or record.get("portal_type")
+ container = get_container_for(portal_type)
+ if container:
+ return container
+
+ parent_uid = record.pop("parent_uid", None)
+ parent_path = record.pop("parent_path", None)
+
+ target = None
+
+ # Try to find the target object
+ if parent_uid:
+ target = get_object_by_uid(parent_uid)
+ elif parent_path:
+ target = get_object_by_path(parent_path)
+ else:
+ fail(404, "No target UID/PATH information found")
+
+ if not target:
+ fail(404, "No target container found")
+
+ return target
+
+
+def create_object(container, portal_type, **data):
+ """Creates an object slug
+
+ :returns: The new created content object
+ :rtype: object
+ """
+
+ if "id" in data:
+ # always omit the id as senaite LIMS generates a proper one
+ id = data.pop("id")
+ logger.warn("Passed in ID '{}' omitted! Senaite LIMS "
+ "generates a proper ID for you" .format(id))
+
+ try:
+ # Special case for ARs
+ # => return immediately w/o update
+ if portal_type == "AnalysisRequest":
+ obj = create_analysisrequest(container, **data)
+ # Omit values which are already set through the helper
+ data = u.omit(data, "SampleType", "Analyses")
+ # Set the container as the client, as the AR lives in it
+ data["Client"] = container
+ # Standard content creation
+ else:
+ # we want just a minimun viable object and set the data later
+ obj = api.create(container, portal_type)
+ # obj = api.create(container, portal_type, **data)
+ except Unauthorized:
+ fail(401, "You are not allowed to create this content")
+
+ # Update the object with the given data, but omit the id
+ try:
+ update_object_with_data(obj, data)
+ except APIError:
+
+ # Failure in creation process, delete the invalid object
+ container.manage_delObjects(obj.id)
+ # reraise the error
+ raise
+
+ return obj
+
+
+def create_analysisrequest(container, **data):
+ """Create a minimun viable AnalysisRequest
+
+ :param container: A single folderish catalog brain or content object
+ :type container: ATContentType/DexterityContentType/CatalogBrain
+ """
+ container = get_object(container)
+ request = req.get_request()
+ # we need to resolve the SampleType to a full object
+ sample_type = data.get("SampleType", None)
+ if sample_type is None:
+ fail(400, "Please provide a SampleType")
+
+ # TODO We should handle the same values as in the DataManager for this field
+ # (UID, path, objects, dictionaries ...)
+ results = search(portal_type="SampleType", title=sample_type)
+
+ values = {
+ "Analyses": data.get("Analyses", []),
+ "SampleType": results and get_object(results[0]) or None,
+ }
+
+ return create_ar(container, request, values)
+
+
+def update_object_with_data(content, record):
+ """Update the content with the record data
+
+ :param content: A single folderish catalog brain or content object
+ :type content: ATContentType/DexterityContentType/CatalogBrain
+ :param record: The data to update
+ :type record: dict
+ :returns: The updated content object
+ :rtype: object
+ :raises:
+ APIError,
+ :class:`~plone.jsonapi.routes.exceptions.APIError`
+ """
+
+ # ensure we have a full content object
+ content = get_object(content)
+
+ # get the proper data manager
+ dm = IDataManager(content)
+
+ if dm is None:
+ fail(400, "Update for this object is not allowed")
+
+ # Iterate through record items
+ for k, v in record.items():
+ try:
+ success = dm.set(k, v, **record)
+ except Unauthorized:
+ fail(401, "Not allowed to set the field '%s'" % k)
+ except ValueError, exc:
+ fail(400, str(exc))
+
+ if not success:
+ logger.warn("update_object_with_data::skipping key=%r", k)
+ continue
+
+ logger.debug("update_object_with_data::field %r updated", k)
+
+ # Validate the entire content object
+ invalid = validate_object(content, record)
+ if invalid:
+ fail(400, u.to_json(invalid))
+
+ # do a wf transition
+ if record.get("transition", None):
+ t = record.get("transition")
+ logger.debug(">>> Do Transition '%s' for Object %s", t, content.getId())
+ do_transition_for(content, t)
+
+ # reindex the object
+ content.reindexObject()
+ return content
+
+
+def validate_object(brain_or_object, data):
+ """Validate the entire object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :param data: The sharing dictionary as returned from the API
+ :type data: dict
+ :returns: invalidity status
+ :rtype: dict
+ """
+ obj = get_object(brain_or_object)
+
+ # Call the validator of AT Content Types
+ if is_at_content(obj):
+ return obj.validate(data=data)
+
+ return {}
+
+
+def deactivate_object(brain_or_object):
+ """Deactivate the given object
+
+ :param brain_or_object: A single catalog brain or content object
+ :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
+ :returns: Nothing
+ :rtype: None
+ """
+ obj = get_object(brain_or_object)
+ # we do not want to delete the site root!
+ if is_root(obj):
+ fail(401, "Deactivating the Portal is not allowed")
+ try:
+ do_transition_for(brain_or_object, "deactivate")
+ except Unauthorized:
+ fail(401, "Not allowed to deactivate object '%s'" % obj.getId())
+
+
+# -----------------------------------------------------------------------------
+# Batching Helpers
+# -----------------------------------------------------------------------------
+
+def get_batch(sequence, size, start=0, endpoint=None, complete=False):
+ """ create a batched result record out of a sequence (catalog brains)
+ """
+
+ batch = make_batch(sequence, size, start)
+
+ return {
+ "pagesize": batch.get_pagesize(),
+ "next": batch.make_next_url(),
+ "previous": batch.make_prev_url(),
+ "page": batch.get_pagenumber(),
+ "pages": batch.get_numpages(),
+ "count": batch.get_sequence_length(),
+ "items": make_items_for([b for b in batch.get_batch()],
+ endpoint, complete=complete),
+ }
+
+
+def make_batch(sequence, size=25, start=0):
+ """Make a batch of the given size from the sequence
+ """
+ # we call an adapter here to allow backwards compatibility hooks
+ return IBatch(Batch(sequence, size, start))
diff --git a/src/senaite/jsonapi/batch.py b/src/senaite/jsonapi/batch.py
new file mode 100644
index 0000000..ad95563
--- /dev/null
+++ b/src/senaite/jsonapi/batch.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+import urllib
+
+from zope import interface
+
+from senaite.lims.jsonapi import request as req
+from senaite.lims.jsonapi.interfaces import IBatch
+
+
+class Batch(object):
+ """Adapter for Plone 4.3 batching functionality
+ """
+ interface.implements(IBatch)
+
+ def __init__(self, batch):
+ self.batch = batch
+
+ def get_batch(self):
+ return self.batch
+
+ def get_pagesize(self):
+ return self.batch.pagesize
+
+ def get_pagenumber(self):
+ return self.batch.pagenumber
+
+ def get_numpages(self):
+ return self.batch.numpages
+
+ def get_sequence_length(self):
+ return self.batch.sequence_length
+
+ def make_next_url(self):
+ if not self.batch.has_next:
+ return None
+ request = req.get_request()
+ params = request.form
+ params["b_start"] = self.batch.pagenumber * self.batch.pagesize
+ return "%s?%s" % (request.URL, urllib.urlencode(params))
+
+ def make_prev_url(self):
+ if not self.batch.has_previous:
+ return None
+ request = req.get_request()
+ params = request.form
+ pagesize = self.batch.pagesize
+ pagenumber = self.batch.pagenumber
+ params["b_start"] = max(pagenumber - 2, 0) * pagesize
+ return "%s?%s" % (request.URL, urllib.urlencode(params))
+
+
+class Batch42(object):
+ """Adapter for Plone 4.2 batching functionality
+ """
+ interface.implements(IBatch)
+
+ def __init__(self, batch):
+ self.batch = batch
+
+ def get_batch(self):
+ return self.batch
+
+ def get_pagesize(self):
+ return self.batch.size
+
+ def get_pagenumber(self):
+ return self.batch.pagenumber
+
+ def get_numpages(self):
+ return self.batch.numpages
+
+ def get_sequence_length(self):
+ return self.batch.sequence_length
+
+ def make_next_url(self):
+ if self.batch.next is not None:
+ return None
+ request = req.get_request()
+ params = request.form
+ params["b_start"] = self.batch.numpages * self.batch.size
+ return "%s?%s" % (request.URL, urllib.urlencode(params))
+
+ def make_prev_url(self):
+ if self.batch.previous is not None:
+ return None
+ request = req.get_request()
+ params = request.form
+ params["b_start"] = max(self.batch.numpages - 2, 0) * self.batch.size
+ return "%s?%s" % (request.URL, urllib.urlencode(params))
diff --git a/src/senaite/jsonapi/catalog.py b/src/senaite/jsonapi/catalog.py
new file mode 100644
index 0000000..993e666
--- /dev/null
+++ b/src/senaite/jsonapi/catalog.py
@@ -0,0 +1,237 @@
+# -*- coding: utf-8 -*-
+
+from zope import interface
+
+from DateTime import DateTime
+from ZPublisher import HTTPRequest
+
+from senaite.lims import logger
+from senaite.lims import api as bikaapi
+from senaite.lims.jsonapi import api
+from senaite.lims.jsonapi import request as req
+from senaite.lims.jsonapi import underscore as _
+from senaite.lims.jsonapi.interfaces import ICatalog
+from senaite.lims.jsonapi.interfaces import ICatalogQuery
+
+
+class Catalog(object):
+ """Plone catalog adapter
+ """
+ interface.implements(ICatalog)
+
+ def __init__(self, context):
+ self._catalog = api.get_tool("portal_catalog")
+ self._bika_catalog = api.get_tool("bika_catalog")
+ self._bika_analysis_catalog = api.get_tool("bika_analysis_catalog")
+ self._bika_setup_catalog = api.get_tool("bika_setup_catalog")
+
+ self._catalogs = {
+ "portal_catalog": self._catalog,
+ "bika_catalog": self._bika_catalog,
+ "bika_analysis_catalog": self._bika_analysis_catalog,
+ "bika_setup_catalog": self._bika_setup_catalog,
+ }
+
+ def search(self, query):
+ """search the catalog
+ """
+ logger.info("Catalog query={}".format(query))
+
+ # Support to set the catalog as a request parameter
+ catalogs = _.to_list(req.get("catalog", None))
+ if catalogs:
+ return bikaapi.search(query, catalog=catalogs)
+ # Delegate to the search API of Bika LIMS
+ return bikaapi.search(query)
+
+ def __call__(self, query):
+ return self.search(query)
+
+ def get_catalog(self, name="portal_catalog"):
+ return self._catalogs[name]
+
+ def get_schema(self):
+ catalog = self.get_catalog()
+ return catalog.schema()
+
+ def get_indexes(self):
+ """get all indexes managed by this catalog
+
+ TODO: Combine indexes of relevant catalogs depending on the portal_type
+ which is searched for.
+ """
+ catalog = self.get_catalog()
+ return catalog.indexes()
+
+ def get_index(self, name):
+ """get an index by name
+
+ TODO: Combine indexes of relevant catalogs depending on the portal_type
+ which is searched for.
+ """
+ catalog = self.get_catalog()
+ index = catalog._catalog.getIndex(name)
+ logger.debug("get_index={} of catalog '{}' --> {}".format(
+ name, catalog.__name__, index))
+ return index
+
+ def to_index_value(self, value, index):
+ """Convert the value for a given index
+ """
+
+ # ZPublisher records can be passed to the catalog as is.
+ if isinstance(value, HTTPRequest.record):
+ return value
+
+ if isinstance(index, basestring):
+ index = self.get_index(index)
+
+ if index.id == "portal_type":
+ return filter(lambda x: x, _.to_list(value))
+ if index.meta_type == "DateIndex":
+ return DateTime(value)
+ if index.meta_type == "BooleanIndex":
+ return bool(value)
+ if index.meta_type == "KeywordIndex":
+ return value.split(",")
+
+ return value
+
+
+class CatalogQuery(object):
+ """Catalog query adapter
+ """
+ interface.implements(ICatalogQuery)
+
+ def __init__(self, catalog):
+ self.catalog = catalog
+
+ def make_query(self, **kw):
+ """create a query suitable for the catalog
+ """
+ query = kw.pop("query", {})
+
+ query.update(self.get_request_query())
+ query.update(self.get_custom_query())
+ query.update(self.get_keyword_query(**kw))
+
+ sort_on, sort_order = self.get_sort_spec()
+ if sort_on and "sort_on" not in query:
+ query.update({"sort_on": sort_on})
+ if sort_order and "sort_order" not in query:
+ query.update({"sort_order": sort_order})
+
+ logger.info("make_query:: query={} | catalog={}".format(
+ query, self.catalog))
+
+ return query
+
+ def get_request_query(self):
+ """Checks the request for known catalog indexes and converts the values
+ to fit the type of the catalog index.
+
+ :param catalog: The catalog to build the query for
+ :type catalog: ZCatalog
+ :returns: Catalog query
+ :rtype: dict
+ """
+ query = {}
+
+ # only known indexes get observed
+ indexes = self.catalog.get_indexes()
+
+ for index in indexes:
+ # Check if the request contains a parameter named like the index
+ value = req.get(index)
+ # No value found, continue
+ if value is None:
+ continue
+ # Convert the found value to format understandable by the index
+ index_value = self.catalog.to_index_value(value, index)
+ # Conversion returned None, continue
+ if index_value is None:
+ continue
+ # Append the found value to the query
+ query[index] = index_value
+
+ return query
+
+ def get_custom_query(self):
+ """Extracts custom query keys from the index.
+
+ Parameters which get extracted from the request:
+
+ `q`: Passes the value to the `SearchableText`
+ `path`: Creates a path query
+ `recent_created`: Creates a date query
+ `recent_modified`: Creates a date query
+
+ :param catalog: The catalog to build the query for
+ :type catalog: ZCatalog
+ :returns: Catalog query
+ :rtype: dict
+ """
+ query = {}
+
+ # searchable text queries
+ q = req.get_query()
+ if q:
+ query["SearchableText"] = q
+
+ # physical path queries
+ path = req.get_path()
+ if path:
+ query["path"] = {'query': path, 'depth': req.get_depth()}
+
+ # special handling for recent created/modified
+ recent_created = req.get_recent_created()
+ if recent_created:
+ date = api.calculate_delta_date(recent_created)
+ query["created"] = {'query': date, 'range': 'min'}
+
+ recent_modified = req.get_recent_modified()
+ if recent_modified:
+ date = api.calculate_delta_date(recent_modified)
+ query["modified"] = {'query': date, 'range': 'min'}
+
+ return query
+
+ def get_keyword_query(self, **kw):
+ """Generates a query from the given keywords.
+ Only known indexes make it into the generated query.
+
+ :returns: Catalog query
+ :rtype: dict
+ """
+ query = dict()
+
+ # Only known indexes get observed
+ indexes = self.catalog.get_indexes()
+
+ # Handle additional keyword parameters
+ for k, v in kw.iteritems():
+ # handle uid in keywords
+ if k.lower() == "uid":
+ k = "UID"
+ # handle portal_type in keywords
+ if k.lower() == "portal_type":
+ if v:
+ v = _.to_list(v)
+ if k not in indexes:
+ logger.warn("Skipping unknown keyword parameter '%s=%s'" % (k, v))
+ continue
+ if v is None:
+ logger.warn("Skip None value in kw parameter '%s=%s'" % (k, v))
+ continue
+ logger.debug("Adding '%s=%s' to query" % (k, v))
+ query[k] = v
+
+ return query
+
+ def get_sort_spec(self):
+ """Build sort specification
+ """
+ all_indexes = self.catalog.get_indexes()
+ si = req.get_sort_on(allowed_indexes=all_indexes)
+ so = req.get_sort_order()
+ return si, so
diff --git a/src/senaite/jsonapi/config.py b/src/senaite/jsonapi/config.py
new file mode 100644
index 0000000..04ed8dc
--- /dev/null
+++ b/src/senaite/jsonapi/config.py
@@ -0,0 +1,270 @@
+# -*- coding: utf-8 -*-
+
+
+# The location mentioned here take precendence over any given container path
+# defined in the request.
+CONTAINER_PATHS_FOR_PORTAL_TYPES = {
+ # 'ARImport': '',
+ # 'ARPriorities': '',
+ 'ARPriority': 'bika_setup/bika_arpriorities',
+ # 'ARReport': '',
+ 'ARTemplate': 'bika_setup/bika_artemplates',
+ # 'ARTemplates': '',
+ # 'ATBooleanCriterion': '',
+ # 'ATCurrentAuthorCriterion': '',
+ # 'ATDateCriteria': '',
+ # 'ATDateRangeCriterion': '',
+ # 'ATListCriterion': '',
+ # 'ATPathCriterion': '',
+ # 'ATPortalTypeCriterion': '',
+ # 'ATReferenceCriterion': '',
+ # 'ATRelativePathCriterion': '',
+ # 'ATSelectionCriterion': '',
+ # 'ATSimpleIntCriterion': '',
+ # 'ATSimpleStringCriterion': '',
+ # 'ATSortCriterion': '',
+ # 'Analysis': '',
+ # 'AnalysisCategories': '',
+ 'AnalysisCategory': 'bika_setup/bika_analysiscategories',
+ 'AnalysisProfile': 'bika_setup/bika_analysisprofiles',
+ # 'AnalysisProfiles': '',
+ # 'AnalysisRequest': '',
+ # 'AnalysisRequestsFolder': '',
+ 'AnalysisService': 'bika_setup/bika_analysisservices',
+ # 'AnalysisServices': '',
+ 'AnalysisSpec': 'bika_setup/bika_analysisspecs',
+ # 'AnalysisSpecs': '',
+ # 'Attachment': '',
+ 'AttachmentType': 'bika_setup/bika_attachmenttypes',
+ # 'AttachmentTypes': '',
+ # 'Batch': '',
+ # 'BatchFolder': '',
+ 'BatchLabel': 'bika_setup/bika_batchlabels',
+ # 'BatchLabels': '',
+ # 'BikaSetup': '',
+ 'Calculation': 'bika_setup/bika_calculations',
+ # 'Calculations': '',
+ 'Client': 'clients',
+ # 'ClientFolder': '',
+ # 'Collection': '',
+ # 'Contact': '',
+ 'Container': 'bika_setup/bika_containers',
+ # 'ContainerType': '',
+ # 'ContainerTypes': '',
+ # 'Containers': '',
+ 'Department': 'bika_setup/bika_departments',
+ # 'Departments': '',
+ # 'Discussion Item': '',
+ # 'Document': '',
+ # 'DuplicateAnalysis': '',
+ # 'Event': '',
+ # 'File': '',
+ # 'Folder': '',
+ 'IdentifierType': 'bika_setup/bika_identifiertypes',
+ # 'IdentifierTypes': '',
+ # 'Image': '',
+ 'Instrument': 'bika_setup/bika_instruments',
+ # 'InstrumentCalibration': '',
+ # 'InstrumentCertification': '',
+ 'InstrumentLocation': 'bika_setup/bika_instrumentlocations',
+ # 'InstrumentLocations': '',
+ # 'InstrumentMaintenanceTask': '',
+ # 'InstrumentScheduledTask': '',
+ 'InstrumentType': 'bika_setup/bika_instrumenttypes',
+ # 'InstrumentTypes': '',
+ # 'InstrumentValidation': '',
+ # 'Instruments': '',
+ # 'Invoice': '',
+ # 'InvoiceBatch': '',
+ # 'InvoiceFolder': '',
+ 'LabContact': 'bika_setup/bika_labcontacts',
+ # 'LabContacts': '',
+ 'LabProduct': 'bika_setup/bika_labproducts',
+ # 'LabProducts': '',
+ # 'Laboratory': '',
+ # 'Link': '',
+ 'Manufacturer': 'bika_setup/bika_manufacturers',
+ # 'Manufacturers': '',
+ 'Method': 'methods',
+ # 'Methods': '',
+ # 'Multifile': '',
+ # 'News Item': '',
+ # 'Plone Site': '',
+ 'Preservation': 'bika_setup/bika_preservations',
+ # 'Preservations': '',
+ # 'Pricelist': '',
+ # 'PricelistFolder': '',
+ # 'ReferenceAnalysis': '',
+ 'ReferenceDefinition': 'bika_setup/bika_referencedefinitions',
+ # 'ReferenceDefinitions': '',
+ # 'ReferenceSample': '',
+ # 'ReferenceSamplesFolder': '',
+ # 'RejectAnalysis': '',
+ # 'Report': '',
+ # 'ReportFolder': '',
+ 'SRTemplate': 'bika_setup/bika_srtemplates',
+ # 'SRTemplates': '',
+ # 'Sample': '',
+ 'SampleCondition': 'bika_setup/bika_sampleconditions',
+ # 'SampleConditions': '',
+ # 'SampleMatrices': '',
+ 'SampleMatrix': 'bika_setup/bika_samplematrices',
+ # 'SamplePartition': '',
+ 'SamplePoint': 'bika_setup/bika_samplepoints',
+ # 'SamplePoints': '',
+ 'SampleType': 'bika_setup/bika_sampletypes',
+ # 'SampleTypes': '',
+ # 'SamplesFolder': '',
+ 'SamplingDeviation': 'bika_setup/bika_samplingdeviations',
+ # 'SamplingDeviations': '',
+ 'SamplingRound': 'bika_setup/bika_samplingrounds',
+ # 'SamplingRounds': '',
+ 'StorageLocation': 'bika_setup/bika_storagelocations',
+ # 'StorageLocations': '',
+ 'SubGroup': 'bika_setup/bika_subgroups',
+ # 'SubGroups': '',
+ 'Supplier': 'bika_setup/bika_suppliers',
+ # 'SupplierContact': '',
+ # 'Suppliers': '',
+ # 'SupplyOrder': '',
+ # 'SupplyOrderFolder': '',
+ # 'TempFolder': '',
+ # 'Topic': '',
+ # 'Worksheet': '',
+ # 'WorksheetFolder': '',
+ 'WorksheetTemplate': 'bika_setup/bika_worksheettemplates',
+ # 'WorksheetTemplates': '',
+}
+
+ALLOWED_PORTAL_TYPES_TO_CREATE = [
+ # 'ARImport',
+ # 'ARPriorities',
+ 'ARPriority',
+ # 'ARReport',
+ 'ARTemplate',
+ # 'ARTemplates',
+ # 'ATBooleanCriterion',
+ # 'ATCurrentAuthorCriterion',
+ # 'ATDateCriteria',
+ # 'ATDateRangeCriterion',
+ # 'ATListCriterion',
+ # 'ATPathCriterion',
+ # 'ATPortalTypeCriterion',
+ # 'ATReferenceCriterion',
+ # 'ATRelativePathCriterion',
+ # 'ATSelectionCriterion',
+ # 'ATSimpleIntCriterion',
+ # 'ATSimpleStringCriterion',
+ # 'ATSortCriterion',
+ 'Analysis',
+ # 'AnalysisCategories',
+ 'AnalysisCategory',
+ 'AnalysisProfile',
+ # 'AnalysisProfiles',
+ 'AnalysisRequest',
+ # 'AnalysisRequestsFolder',
+ 'AnalysisService',
+ # 'AnalysisServices',
+ 'AnalysisSpec',
+ # 'AnalysisSpecs',
+ 'Attachment',
+ 'AttachmentType',
+ # 'AttachmentTypes',
+ 'Batch',
+ # 'BatchFolder',
+ 'BatchLabel',
+ # 'BatchLabels',
+ # 'BikaSetup',
+ 'Calculation',
+ # 'Calculations',
+ 'Client',
+ # 'ClientFolder',
+ # 'Collection',
+ 'Contact',
+ 'Container',
+ 'ContainerType',
+ # 'ContainerTypes',
+ # 'Containers',
+ 'Department',
+ # 'Departments',
+ # 'Discussion Item',
+ 'Document',
+ # 'DuplicateAnalysis',
+ # 'Event',
+ 'File',
+ # 'Folder',
+ 'IdentifierType',
+ # 'IdentifierTypes',
+ 'Image',
+ 'Instrument',
+ 'InstrumentCalibration',
+ # 'InstrumentCertification',
+ 'InstrumentLocation',
+ # 'InstrumentLocations',
+ # 'InstrumentMaintenanceTask',
+ # 'InstrumentScheduledTask',
+ 'InstrumentType',
+ # 'InstrumentTypes',
+ # 'InstrumentValidation',
+ # 'Instruments',
+ 'Invoice',
+ 'InvoiceBatch',
+ # 'InvoiceFolder',
+ 'LabContact',
+ # 'LabContacts',
+ 'LabProduct',
+ # 'LabProducts',
+ # 'Laboratory',
+ # 'Link',
+ 'Manufacturer',
+ # 'Manufacturers',
+ 'Method',
+ # 'Methods',
+ 'Multifile',
+ # 'News Item',
+ # 'Plone Site',
+ 'Preservation',
+ # 'Preservations',
+ 'Pricelist',
+ # 'PricelistFolder',
+ # 'ReferenceAnalysis',
+ 'ReferenceDefinition',
+ # 'ReferenceDefinitions',
+ 'ReferenceSample',
+ # 'ReferenceSamplesFolder',
+ 'RejectAnalysis',
+ 'Report',
+ # 'ReportFolder',
+ 'SRTemplate',
+ # 'SRTemplates',
+ 'Sample',
+ 'SampleCondition',
+ # 'SampleConditions',
+ # 'SampleMatrices',
+ 'SampleMatrix',
+ # 'SamplePartition',
+ 'SamplePoint',
+ # 'SamplePoints',
+ 'SampleType',
+ # 'SampleTypes',
+ # 'SamplesFolder',
+ 'SamplingDeviation',
+ # 'SamplingDeviations',
+ 'SamplingRound',
+ # 'SamplingRounds',
+ 'StorageLocation',
+ # 'StorageLocations',
+ 'SubGroup',
+ # 'SubGroups',
+ 'Supplier',
+ 'SupplierContact',
+ # 'Suppliers',
+ 'SupplyOrder',
+ # 'SupplyOrderFolder',
+ # 'TempFolder',
+ # 'Topic',
+ 'Worksheet',
+ # 'WorksheetFolder',
+ 'WorksheetTemplate',
+ # 'WorksheetTemplates',
+]
diff --git a/src/senaite/jsonapi/configure.zcml b/src/senaite/jsonapi/configure.zcml
new file mode 100644
index 0000000..590f4d5
--- /dev/null
+++ b/src/senaite/jsonapi/configure.zcml
@@ -0,0 +1,200 @@
+