diff --git a/.#buildout.cfg b/.#buildout.cfg new file mode 120000 index 0000000..ee14dcf --- /dev/null +++ b/.#buildout.cfg @@ -0,0 +1 @@ +rbartl@siegfried.local.32593 \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..9cecc1d --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {one line to give the program's name and a brief idea of what it does.} + Copyright (C) {year} {name of author} + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + {project} Copyright (C) {year} {fullname} + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md new file mode 100644 index 0000000..2613e53 --- /dev/null +++ b/README.md @@ -0,0 +1,22 @@ +
+ +

+ +
+ senaite.jsonapi +
+
+

+ +

The JSON API provides a RESTful JSON interface to SENAITE LIMS.

+ + + +
diff --git a/bootstrap.py b/bootstrap.py new file mode 100644 index 0000000..a459921 --- /dev/null +++ b/bootstrap.py @@ -0,0 +1,210 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Bootstrap a buildout-based project + +Simply run this script in a directory containing a buildout.cfg. +The script accepts buildout command-line options, so you can +use the -c option to specify an alternate configuration file. +""" + +import os +import shutil +import sys +import tempfile + +from optparse import OptionParser + +__version__ = '2015-07-01' +# See zc.buildout's changelog if this version is up to date. + +tmpeggs = tempfile.mkdtemp(prefix='bootstrap-') + +usage = '''\ +[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options] + +Bootstraps a buildout-based project. + +Simply run this script in a directory containing a buildout.cfg, using the +Python that you want bin/buildout to use. + +Note that by using --find-links to point to local resources, you can keep +this script from going over the network. +''' + +parser = OptionParser(usage=usage) +parser.add_option("--version", + action="store_true", default=False, + help=("Return bootstrap.py version.")) +parser.add_option("-t", "--accept-buildout-test-releases", + dest='accept_buildout_test_releases', + action="store_true", default=False, + help=("Normally, if you do not specify a --version, the " + "bootstrap script and buildout gets the newest " + "*final* versions of zc.buildout and its recipes and " + "extensions for you. If you use this flag, " + "bootstrap and buildout will get the newest releases " + "even if they are alphas or betas.")) +parser.add_option("-c", "--config-file", + help=("Specify the path to the buildout configuration " + "file to be used.")) +parser.add_option("-f", "--find-links", + help=("Specify a URL to search for buildout releases")) +parser.add_option("--allow-site-packages", + action="store_true", default=False, + help=("Let bootstrap.py use existing site packages")) +parser.add_option("--buildout-version", + help="Use a specific zc.buildout version") +parser.add_option("--setuptools-version", + help="Use a specific setuptools version") +parser.add_option("--setuptools-to-dir", + help=("Allow for re-use of existing directory of " + "setuptools versions")) + +options, args = parser.parse_args() +if options.version: + print("bootstrap.py version %s" % __version__) + sys.exit(0) + + +###################################################################### +# load/install setuptools + +try: + from urllib.request import urlopen +except ImportError: + from urllib2 import urlopen + +ez = {} +if os.path.exists('ez_setup.py'): + exec(open('ez_setup.py').read(), ez) +else: + exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez) + +if not options.allow_site_packages: + # ez_setup imports site, which adds site packages + # this will remove them from the path to ensure that incompatible versions + # of setuptools are not in the path + import site + # inside a virtualenv, there is no 'getsitepackages'. + # We can't remove these reliably + if hasattr(site, 'getsitepackages'): + for sitepackage_path in site.getsitepackages(): + # Strip all site-packages directories from sys.path that + # are not sys.prefix; this is because on Windows + # sys.prefix is a site-package directory. + if sitepackage_path != sys.prefix: + sys.path[:] = [x for x in sys.path + if sitepackage_path not in x] + +setup_args = dict(to_dir=tmpeggs, download_delay=0) + +if options.setuptools_version is not None: + setup_args['version'] = options.setuptools_version +if options.setuptools_to_dir is not None: + setup_args['to_dir'] = options.setuptools_to_dir + +ez['use_setuptools'](**setup_args) +import setuptools +import pkg_resources + +# This does not (always?) update the default working set. We will +# do it. +for path in sys.path: + if path not in pkg_resources.working_set.entries: + pkg_resources.working_set.add_entry(path) + +###################################################################### +# Install buildout + +ws = pkg_resources.working_set + +setuptools_path = ws.find( + pkg_resources.Requirement.parse('setuptools')).location + +# Fix sys.path here as easy_install.pth added before PYTHONPATH +cmd = [sys.executable, '-c', + 'import sys; sys.path[0:0] = [%r]; ' % setuptools_path + + 'from setuptools.command.easy_install import main; main()', + '-mZqNxd', tmpeggs] + +find_links = os.environ.get( + 'bootstrap-testing-find-links', + options.find_links or + ('http://downloads.buildout.org/' + if options.accept_buildout_test_releases else None) + ) +if find_links: + cmd.extend(['-f', find_links]) + +requirement = 'zc.buildout' +version = options.buildout_version +if version is None and not options.accept_buildout_test_releases: + # Figure out the most recent final version of zc.buildout. + import setuptools.package_index + _final_parts = '*final-', '*final' + + def _final_version(parsed_version): + try: + return not parsed_version.is_prerelease + except AttributeError: + # Older setuptools + for part in parsed_version: + if (part[:1] == '*') and (part not in _final_parts): + return False + return True + + index = setuptools.package_index.PackageIndex( + search_path=[setuptools_path]) + if find_links: + index.add_find_links((find_links,)) + req = pkg_resources.Requirement.parse(requirement) + if index.obtain(req) is not None: + best = [] + bestv = None + for dist in index[req.project_name]: + distv = dist.parsed_version + if _final_version(distv): + if bestv is None or distv > bestv: + best = [dist] + bestv = distv + elif distv == bestv: + best.append(dist) + if best: + best.sort() + version = best[-1].version +if version: + requirement = '=='.join((requirement, version)) +cmd.append(requirement) + +import subprocess +if subprocess.call(cmd) != 0: + raise Exception( + "Failed to execute command:\n%s" % repr(cmd)[1:-1]) + +###################################################################### +# Import and run buildout + +ws.add_entry(tmpeggs) +ws.require(requirement) +import zc.buildout.buildout + +if not [a for a in args if '=' not in a]: + args.append('bootstrap') + +# if -c was provided, we push it back into args for buildout' main function +if options.config_file is not None: + args[0:0] = ['-c', options.config_file] + +zc.buildout.buildout.main(args) +shutil.rmtree(tmpeggs) diff --git a/buildout.cfg b/buildout.cfg new file mode 100644 index 0000000..eefae2a --- /dev/null +++ b/buildout.cfg @@ -0,0 +1,137 @@ +[buildout] +parts = + instance + test + robot + code-analysis + ipzope + omelette + +extends = + http://dist.plone.org/release/4.3.15/versions.cfg + +find-links = + http://dist.plone.org/release/4.3.15 + http://dist.plone.org/thirdparty + +eggs = + Plone + Pillow + plone.reload + i18ndude + Products.PrintingMailHost + +zcml = + +versions = versions +extensions = mr.developer + +eggs-directory = ${buildout:directory}/eggs +download-cache = ~/.buildout/downloads + +sources = sources +auto-checkout = * + +develop = . + +[sources] +bika.lims = git https://github.com/bikalims/bika.lims.git branch=master +senaite.api = git https://github.com/senaite/senaite.api.git branch=master + +[instance] +recipe = plone.recipe.zope2instance +user = admin:admin +http-address = 0.0.0.0:8080 +eggs = + senaite.jsonapi + ${buildout:eggs} +zcml = + senaite.jsonapi + ${buildout:zcml} + +[test] +recipe = zc.recipe.testrunner +eggs = + senaite.jsonapi [test] +defaults = ['--auto-color', '--auto-progress'] + +[code-analysis] +recipe = plone.recipe.codeanalysis +directory = ${buildout:directory}/src/senaite/jsonapi +clean-lines = True +clean-lines-exclude = + **/node_modules/* +find-untranslated = False +i18ndude-bin = ${buildout:bin-directory}/i18ndude +utf8-header = True +deprecated-aliases = True +flake8-extensions = + flake8-blind-except + flake8-coding + flake8-debugger +flake8-ignore = E241,E301,E501 +multiprocessing = True +return-status-codes = False + +[robot] +recipe = zc.recipe.egg +eggs = + ${test:eggs} + plone.app.robotframework +scripts = + robot-server + robot + +# Please download ipy_profile_zope to ~/.ipython/profile_zope/startup/ipy_profile_zope.py +# https://raw.githubusercontent.com/collective/dotipython/master/ipy_profile_zope.py +[ipzope] +recipe = zc.recipe.egg +eggs = + ipython<=4 + ${instance:eggs} +initialization = + import sys, os + os.environ["SOFTWARE_HOME"] = "${instance:location}" + os.environ["INSTANCE_HOME"] = "${instance:location}" + sys.argv[1:1] = "--profile zope".split() +extra-paths = + ${instance:location}/lib/python +scripts = ipython=ipzope + +[omelette] +recipe = collective.recipe.omelette +eggs = ${buildout:eggs} + +[i18ndude] +unzip = true +recipe = zc.recipe.egg +eggs = i18ndude + +[update_gtags] +recipe = collective.recipe.template +output = ${buildout:directory}/bin/update_gtags +input = ${buildout:directory}/templates/update_gtags.in +mode = 755 + +[update_translations] +recipe = collective.recipe.template +output = ${buildout:directory}/bin/update_translations +input = ${buildout:directory}/templates/update_translations.in +mode = 755 + +[write_code_headers] +recipe = collective.recipe.template +output = ${buildout:directory}/bin/write_code_headers +input = ${buildout:directory}/templates/write_code_headers.py.in +mode = 755 + +[sphinxbuilder] +recipe = collective.recipe.sphinxbuilder +interpreter = ${buildout:directory}/bin/zopepy +build = ${buildout:directory}/docs/_build +source = ${buildout:directory}/docs + +[versions] +setuptools = +zc.buildout = +CairoSVG = 1.0.20 \ No newline at end of file diff --git a/docs/Changelog.rst b/docs/Changelog.rst new file mode 100644 index 0000000..a22d2b6 --- /dev/null +++ b/docs/Changelog.rst @@ -0,0 +1,4 @@ +1.0.0 (unreleased) +------------------ + +- initial package setup diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..3cf5636 --- /dev/null +++ b/setup.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- + +from setuptools import setup, find_packages + +version = '1.0.0' + + +setup( + name='senaite.jsonapi', + version=version, + description="SENAITE JSON API", + long_description=open("README.md").read() + "\n" + + open("docs/Changelog.rst").read() + "\n" + + "\n\n" + + "Authors and maintainers\n" + + "-----------------------\n" + + "- RIDING BYTES, http://ridingbytes.com", + # Get more strings from + # http://pypi.python.org/pypi?:action=list_classifiers + classifiers=[ + "Programming Language :: Python", + "Framework :: Plone", + "Framework :: Zope2", + ], + keywords='', + author='SENAITE Foundation', + author_email='hello@senaite.com', + url='https://github.com/senaite/senaite.jsonapi', + license='GPLv3', + packages=find_packages('src', exclude=['ez_setup']), + package_dir={'': 'src'}, + namespace_packages=['senaite'], + include_package_data=True, + zip_safe=False, + install_requires=[ + 'setuptools', + 'plone.api', + 'senaite.api', + 'bika.lims>=3.2.1rc3', + ], + extras_require={ + 'test': [ + 'Products.PloneTestCase', + 'Products.SecureMailHost', + 'plone.app.robotframework', + 'plone.app.testing', + 'robotframework-debuglibrary', + 'robotframework-selenium2library', + 'robotsuite', + 'unittest2', + ] + }, + entry_points=""" + # -*- Entry points: -*- + [z3c.autoinclude.plugin] + target = plone + """, +) diff --git a/src/senaite/__init__.py b/src/senaite/__init__.py new file mode 100644 index 0000000..7185b20 --- /dev/null +++ b/src/senaite/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2017 SENAITE + +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/src/senaite/docs/JSONAPIv1.rst b/src/senaite/docs/JSONAPIv1.rst new file mode 100644 index 0000000..7666f9a --- /dev/null +++ b/src/senaite/docs/JSONAPIv1.rst @@ -0,0 +1,63 @@ +SENAITE JSON API V1 +=================== + +Running this test from the buildout directory:: + + bin/test test_doctests -t JSONAPIv1 + + +Test Setup +---------- + +Needed Imports:: + + >>> import transaction + + >>> from plone.app.testing import TEST_USER_ID + >>> from plone.app.testing import TEST_USER_PASSWORD + + >>> from senaite.lims import api + +Functional Helpers:: + + >>> def start_server(): + ... from Testing.ZopeTestCase.utils import startZServer + ... ip, port = startZServer() + ... return "http://{}:{}/{}".format(ip, port, portal.id) + + >>> def login(user=TEST_USER_ID, password=TEST_USER_PASSWORD): + ... browser.open(portal_url + "/login_form") + ... browser.getControl(name='__ac_name').value = user + ... browser.getControl(name='__ac_password').value = password + ... browser.getControl(name='submit').click() + ... assert("__ac_password" not in browser.contents) + + >>> def logout(): + ... browser.open(portal_url + "/logout") + ... assert("You are now logged out" in browser.contents) + +Variables:: + + >>> portal = self.getPortal() + >>> portal_url = portal.absolute_url() + >>> bika_setup = portal.bika_setup + >>> bika_setup_url = portal_url + "/bika_setup" + >>> browser = self.getBrowser() + +JSON API:: + + >>> api_base_url = portal_url + "/@@API/senaite/v1" + + +Version +======= + +Ensure we are logged out:: + + >>> logout() + +The version route should be visible to unauthenticated users:: + + >>> browser.open(api_base_url + "/version") + >>> browser.contents + '{"url": "http://nohost/plone/@@API/v2/version", "date": "...", "version": ..., "_runtime": ...}' diff --git a/src/senaite/jsonapi/__init__.py b/src/senaite/jsonapi/__init__.py new file mode 100644 index 0000000..34e1ed3 --- /dev/null +++ b/src/senaite/jsonapi/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- + +from plone.jsonapi.core import router + +from senaite.lims import logger + + +def add_route(route, endpoint=None, **kw): + """Add a new JSON API route + """ + def wrapper(f): + try: + router.DefaultRouter.add_url_rule(route, + endpoint=endpoint, + view_func=f, + options=kw) + except AssertionError, e: + logger.warn("Failed to register route {}: {}".format(route, e)) + return f + return wrapper + + +def url_for(endpoint, default="senaite.lims.jsonapi.get", **values): + """Looks up the API URL for the given endpoint + + :param endpoint: The name of the registered route (aka endpoint) + :type endpoint: string + :returns: External URL for this endpoint + :rtype: string/None + """ + + try: + return router.url_for(endpoint, force_external=True, values=values) + except Exception: + # XXX plone.jsonapi.core should catch the BuildError of Werkzeug and + # throw another error which can be handled here. + logger.debug("Could not build API URL for endpoint '%s'. " + "No route provider registered?" % endpoint) + + # build generic API URL + return router.url_for(default, force_external=True, values=values) diff --git a/src/senaite/jsonapi/api.py b/src/senaite/jsonapi/api.py new file mode 100644 index 0000000..c817cd6 --- /dev/null +++ b/src/senaite/jsonapi/api.py @@ -0,0 +1,1401 @@ +# -*- coding: utf-8 -*- + +import json +import datetime + +from DateTime import DateTime +from AccessControl import Unauthorized +from Products.CMFPlone.PloneBatch import Batch +from Products.ZCatalog.Lazy import LazyMap +from Acquisition import ImplicitAcquisitionWrapper + +from zope.schema import getFields + +from plone import api as ploneapi +from plone.jsonapi.core import router +from plone.behavior.interfaces import IBehaviorAssignable + +from senaite.lims import api +from senaite.lims import logger +from senaite.lims.jsonapi import config +from senaite.lims.jsonapi import request as req +from senaite.lims.jsonapi import underscore as u +from senaite.lims.jsonapi.interfaces import IInfo +from senaite.lims.jsonapi.interfaces import IBatch +from senaite.lims.jsonapi.interfaces import ICatalog +from senaite.lims.jsonapi.exceptions import APIError +from senaite.lims.jsonapi.interfaces import IDataManager +from senaite.lims.jsonapi.interfaces import IFieldManager +from senaite.lims.jsonapi.interfaces import ICatalogQuery +from bika.lims.utils.analysisrequest import create_analysisrequest as create_ar + +_marker = object() + +DEFAULT_ENDPOINT = "senaite.lims.jsonapi.v1.get" + + +# ----------------------------------------------------------------------------- +# JSON API (CRUD) Functions (called by the route providers) +# ----------------------------------------------------------------------------- + +# GET RECORD +def get_record(uid=None): + """Get a single record + """ + obj = None + if uid is not None: + obj = get_object_by_uid(uid) + else: + obj = get_object_by_request() + if obj is None: + fail(404, "No object found") + complete = req.get_complete(default=_marker) + if complete is _marker: + complete = True + items = make_items_for([obj], complete=complete) + return u.first(items) + + +# GET BATCHED +def get_batched(portal_type=None, uid=None, endpoint=None, **kw): + """Get batched results + """ + + # fetch the catalog results + results = get_search_results(portal_type=portal_type, uid=uid, **kw) + + # fetch the batch params from the request + size = req.get_batch_size() + start = req.get_batch_start() + + # check for existing complete flag + complete = req.get_complete(default=_marker) + if complete is _marker: + # if the uid is given, get the complete information set + complete = uid and True or False + + # return a batched record + return get_batch(results, size, start, endpoint=endpoint, + complete=complete) + + +# CREATE +def create_items(portal_type=None, uid=None, endpoint=None, **kw): + """ create items + + 1. If the uid is given, get the object and create the content in there + (assumed that it is folderish) + 2. If the uid is 0, the target folder is assumed the portal. + 3. If there is no uid given, the payload is checked for either a key + - `parent_uid` specifies the *uid* of the target folder + - `parent_path` specifies the *physical path* of the target folder + """ + + # disable CSRF + req.disable_csrf_protection() + + # destination where to create the content + container = uid and get_object_by_uid(uid) or None + + # extract the data from the request + records = req.get_request_data() + + results = [] + for record in records: + + # get the portal_type + if portal_type is None: + # try to fetch the portal type out of the request data + portal_type = record.pop("portal_type", None) + + # check if it is allowed to create the portal_type + if not is_creation_allowed(portal_type): + fail(401, "Creation of '{}' is not allowed".format(portal_type)) + + if container is None: + # find the container for content creation + container = find_target_container(portal_type, record) + + # Check if we have a container and a portal_type + if not all([container, portal_type]): + fail(400, "Please provide a container path/uid and portal_type") + + # create the object and pass in the record data + obj = create_object(container, portal_type, **record) + results.append(obj) + + if not results: + fail(400, "No Objects could be created") + + return make_items_for(results, endpoint=endpoint) + + +# UPDATE +def update_items(portal_type=None, uid=None, endpoint=None, **kw): + """ update items + + 1. If the uid is given, the user wants to update the object with the data + given in request body + 2. If no uid is given, the user wants to update a bunch of objects. + -> each record contains either an UID, path or parent_path + id + """ + + # disable CSRF + req.disable_csrf_protection() + + # the data to update + records = req.get_request_data() + + # we have an uid -> try to get an object for it + obj = get_object_by_uid(uid) + if obj: + record = records[0] # ignore other records if we got an uid + obj = update_object_with_data(obj, record) + return make_items_for([obj], endpoint=endpoint) + + # no uid -> go through the record items + results = [] + for record in records: + obj = get_object_by_record(record) + + # no object found for this record + if obj is None: + continue + + # update the object with the given record data + obj = update_object_with_data(obj, record) + results.append(obj) + + if not results: + fail(400, "No Objects could be updated") + + return make_items_for(results, endpoint=endpoint) + + +# DELETE +def delete_items(portal_type=None, uid=None, endpoint=None, **kw): + """ delete items + + 1. If the uid is given, we can ignore the request body and delete the + object with the given uid (if the uid was valid). + 2. If no uid is given, the user wants to delete more than one item. + => go through each item and extract the uid. Delete it afterwards. + // we should do this kind of transaction base. So if we can not get an + // object for an uid, no item will be deleted. + 3. we could check if the portal_type matches, just to be sure the user + wants to delete the right content. + """ + + # disable CSRF + req.disable_csrf_protection() + + # try to find the requested objects + objects = find_objects(uid=uid) + + # We don't want to delete the portal object + if filter(lambda o: is_root(o), objects): + fail(400, "Can not delete the portal object") + + results = [] + for obj in objects: + # We deactivate only! + deactivate_object(obj) + info = IInfo(obj)() + results.append(info) + + if not results: + fail(404, "No Objects could be found") + + return results + + +def make_items_for(brains_or_objects, endpoint=None, complete=False): + """Generate API compatible data items for the given list of brains/objects + + :param brains_or_objects: List of objects or brains + :type brains_or_objects: list/Products.ZCatalog.Lazy.LazyMap + :param endpoint: The named URL endpoint for the root of the items + :type endpoint: str/unicode + :param complete: Flag to wake up the object and fetch all data + :type complete: bool + :returns: A list of extracted data items + :rtype: list + """ + + # check if the user wants to include children + include_children = req.get_children(False) + + def extract_data(brain_or_object): + info = get_info(brain_or_object, endpoint=endpoint, complete=complete) + if include_children and is_folderish(brain_or_object): + info.update(get_children_info(brain_or_object, complete=complete)) + return info + + return map(extract_data, brains_or_objects) + + +# ----------------------------------------------------------------------------- +# Info Functions (JSON compatible data representation) +# ----------------------------------------------------------------------------- + +def get_info(brain_or_object, endpoint=None, complete=False): + """Extract the data from the catalog brain or object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :param endpoint: The named URL endpoint for the root of the items + :type endpoint: str/unicode + :param complete: Flag to wake up the object and fetch all data + :type complete: bool + :returns: Data mapping for the object/catalog brain + :rtype: dict + """ + + # extract the data from the initial object with the proper adapter + info = IInfo(brain_or_object).to_dict() + + # update with url info (always included) + url_info = get_url_info(brain_or_object, endpoint) + info.update(url_info) + + # include the parent url info + parent = get_parent_info(brain_or_object) + info.update(parent) + + # add the complete data of the object if requested + # -> requires to wake up the object if it is a catalog brain + if complete: + # ensure we have a full content object + obj = api.get_object(brain_or_object) + # get the compatible adapter + adapter = IInfo(obj) + # update the data set with the complete information + info.update(adapter.to_dict()) + + # update the data set with the workflow information + # -> only possible if `?complete=yes&workflow=yes` + if req.get_workflow(False): + info.update(get_workflow_info(obj)) + + # # add sharing data if the user requested it + # # -> only possible if `?complete=yes` + # if req.get_sharing(False): + # sharing = get_sharing_info(obj) + # info.update({"sharing": sharing}) + + return info + + +def get_url_info(brain_or_object, endpoint=None): + """Generate url information for the content object/catalog brain + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :param endpoint: The named URL endpoint for the root of the items + :type endpoint: str/unicode + :returns: URL information mapping + :rtype: dict + """ + + # If no endpoint was given, guess the endpoint by portal type + if endpoint is None: + endpoint = get_endpoint(brain_or_object) + + uid = get_uid(brain_or_object) + portal_type = get_portal_type(brain_or_object) + resource = portal_type_to_resource(portal_type) + + return { + "uid": uid, + "url": get_url(brain_or_object), + "api_url": url_for(endpoint, resource=resource, uid=uid), + } + + +def get_parent_info(brain_or_object, endpoint=None): + """Generate url information for the parent object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :param endpoint: The named URL endpoint for the root of the items + :type endpoint: str/unicode + :returns: URL information mapping + :rtype: dict + """ + + # special case for the portal object + if is_root(brain_or_object): + return {} + + # get the parent object + parent = get_parent(brain_or_object) + portal_type = get_portal_type(parent) + resource = portal_type_to_resource(portal_type) + + # fall back if no endpoint specified + if endpoint is None: + endpoint = get_endpoint(parent) + + return { + "parent_id": get_id(parent), + "parent_uid": get_uid(parent), + "parent_url": url_for(endpoint, resource=resource, uid=get_uid(parent)) + } + + +def get_children_info(brain_or_object, complete=False): + """Generate data items of the contained contents + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :param complete: Flag to wake up the object and fetch all data + :type complete: bool + :returns: info mapping of contained content items + :rtype: list + """ + + # fetch the contents (if folderish) + children = get_contents(brain_or_object) + + def extract_data(brain_or_object): + return get_info(brain_or_object, complete=complete) + items = map(extract_data, children) + + return { + "children_count": len(items), + "children": items + } + + +def get_file_info(obj, fieldname, default=None): + """Extract file data from a file field + + :param obj: Content object + :type obj: ATContentType/DexterityContentType + :param fieldname: Schema name of the field + :type fieldname: str/unicode + :returns: File data mapping + :rtype: dict + """ + + # extract the file field from the object if omitted + field = get_field(obj, fieldname) + + # get the value with the fieldmanager + fm = IFieldManager(field) + + # return None if we have no file data + if fm.get_size(obj) == 0: + return None + + out = { + "content_type": fm.get_content_type(obj), + "filename": fm.get_filename(obj), + "download": fm.get_download_url(obj), + } + + # only return file data only if requested (?filedata=yes) + if req.get_filedata(False): + data = fm.get_data(obj) + out["data"] = data.encode("base64") + + return out + + +def get_workflow_info(brain_or_object, endpoint=None): + """Generate workflow information of the assigned workflows + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :param endpoint: The named URL endpoint for the root of the items + :type endpoint: str/unicode + :returns: Workflows info + :rtype: dict + """ + + # ensure we have a full content object + obj = get_object(brain_or_object) + + # get the portal workflow tool + wf_tool = get_tool("portal_workflow") + + # the assigned workflows of this object + workflows = wf_tool.getWorkflowsFor(obj) + + # no worfkflows assigned -> return + if not workflows: + return [] + + def to_transition_info(transition): + """ return the transition information + """ + return { + "title": transition["title"], + "value": transition["id"], + "display": transition["description"], + "url": transition["url"], + } + + out = [] + + for workflow in workflows: + + # get the status info of the current state (dictionary) + info = wf_tool.getStatusOf(workflow.getId(), obj) + + # get the current review_status + review_state = info.get("review_state", None) + inactive_state = info.get("inactive_state", None) + cancellation_state = info.get("cancellation_state", None) + worksheetanalysis_review_state = info.get("worksheetanalysis_review_state", None) + + state = review_state or \ + inactive_state or \ + cancellation_state or \ + worksheetanalysis_review_state + + if state is None: + logger.warn("No state variable found for {} -> {}".format( + repr(obj), info)) + continue + + # get the wf status object + status_info = workflow.states[state] + + # get the title of the current status + status = status_info.title + + # get the transition informations + transitions = map(to_transition_info, wf_tool.getTransitionsFor(obj)) + + out.append({ + "workflow": workflow.getId(), + "status": status, + "review_state": state, + "transitions": transitions, + }) + + return {"workflow_info": out} + + +# ----------------------------------------------------------------------------- +# API +# ----------------------------------------------------------------------------- + +def fail(status, msg): + """API Error + """ + if msg is None: + msg = "Reason not given." + raise APIError(status, "{}".format(msg)) + + +def search(**kw): + """Search the catalog adapter + + :returns: Catalog search results + :rtype: iterable + """ + portal = get_portal() + catalog = ICatalog(portal) + catalog_query = ICatalogQuery(catalog) + query = catalog_query.make_query(**kw) + return catalog(query) + + +def get_search_results(portal_type=None, uid=None, **kw): + """Search the catalog and return the results + + :returns: Catalog search results + :rtype: iterable + """ + + # If we have an UID, return the object immediately + if uid is not None: + logger.info("UID '%s' found, returning the object immediately" % uid) + return u.to_list(get_object_by_uid(uid)) + + # allow to search search for the Plone Site with portal_type + include_portal = False + if u.to_string(portal_type) == "Plone Site": + include_portal = True + + # The request may contain a list of portal_types, e.g. + # `?portal_type=Document&portal_type=Plone Site` + if "Plone Site" in u.to_list(req.get("portal_type")): + include_portal = True + + # Build and execute a catalog query + results = search(portal_type=portal_type, uid=uid, **kw) + + if include_portal: + results = list(results) + u.to_list(get_portal()) + + return results + + +def get_portal(): + """Proxy to senaite.lims.api.get_portal + """ + return api.get_portal() + + +def get_tool(name, default=_marker): + """Proxy to senaite.lims.api.get_tool + """ + return api.get_tool(name, default) + + +def get_object(brain_or_object): + """Proxy to senaite.lims.api.get_object + """ + return api.get_object(brain_or_object) + + +def is_brain(brain_or_object): + """Proxy to senaite.lims.api.is_brain + """ + return api.is_brain(brain_or_object) + + +def is_at_content(brain_or_object): + """Proxy to senaite.lims.api.is_at_content + """ + return api.is_at_content(brain_or_object) + + +def is_dexterity_content(brain_or_object): + """Proxy to senaite.lims.api.is_dexterity_content + """ + return api.is_dexterity_content(brain_or_object) + + +def get_schema(brain_or_object): + """Get the schema of the content + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :returns: Schema object + """ + obj = get_object(brain_or_object) + if is_root(obj): + return None + if is_dexterity_content(obj): + pt = get_tool("portal_types") + fti = pt.getTypeInfo(obj.portal_type) + return fti.lookupSchema() + if is_at_content(obj): + return obj.Schema() + fail(400, "{} has no Schema.".format(repr(brain_or_object))) + + +def get_fields(brain_or_object): + """Get the list of fields from the object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :returns: List of fields + :rtype: list + """ + obj = get_object(brain_or_object) + # The portal object has no schema + if is_root(obj): + return {} + schema = get_schema(obj) + if is_dexterity_content(obj): + names = schema.names() + fields = map(lambda name: schema.get(name), names) + schema_fields = dict(zip(names, fields)) + # update with behavior fields + schema_fields.update(get_behaviors(obj)) + return schema_fields + return dict(zip(schema.keys(), schema.fields())) + + +def get_field(brain_or_object, name, default=None): + """Return the named field + """ + fields = get_fields(brain_or_object) + return fields.get(name, default) + + +def get_behaviors(brain_or_object): + """Iterate over all behaviors that are assigned to the object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :returns: Behaviors + :rtype: list + """ + obj = get_object(brain_or_object) + if not is_dexterity_content(obj): + fail(400, "Only Dexterity contents can have assigned behaviors") + assignable = IBehaviorAssignable(obj, None) + if not assignable: + return {} + out = {} + for behavior in assignable.enumerateBehaviors(): + for name, field in getFields(behavior.interface).items(): + out[name] = field + return out + + +def is_root(brain_or_object): + """Proxy to senaite.lims.api.is_portal + """ + return api.is_portal(brain_or_object) + + +def is_folderish(brain_or_object): + """Proxy to senaite.lims.api.is_folderish + """ + return api.is_folderish(brain_or_object) + + +def is_uid(uid): + """Checks if the passed in uid is a valid UID + + :param uid: The uid to check + :type uid: string + :return: True if the uid is a valid 32 alphanumeric uid or '0' + :rtype: bool + """ + if not isinstance(uid, basestring): + return False + if uid != "0" and len(uid) != 32: + return False + return True + + +def is_path(path): + """Checks if the passed in path is a valid Path within the portal + + :param path: The path to check + :type uid: string + :return: True if the path is a valid path within the portal + :rtype: bool + """ + if not isinstance(path, basestring): + return False + portal_path = get_path(get_portal()) + if not path.startswith(portal_path): + return False + obj = get_object_by_path(path) + if obj is None: + return False + return True + + +def is_json_serializable(thing): + """Checks if the given thing can be serialized to JSON + + :param thing: The object to check if it can be serialized + :type thing: arbitrary object + :returns: True if it can be JSON serialized + :rtype: bool + """ + try: + json.dumps(thing) + return True + except TypeError: + return False + + +def to_json_value(obj, fieldname, value=_marker, default=None): + """JSON save value encoding + + :param obj: Content object + :type obj: ATContentType/DexterityContentType + :param fieldname: Schema name of the field + :type fieldname: str/unicode + :param value: The field value + :type value: depends on the field type + :returns: JSON encoded field value + :rtype: field dependent + """ + + # This function bridges the value of the field to a probably more complex + # JSON structure to return to the client. + + # extract the value from the object if omitted + if value is _marker: + value = IDataManager(obj).json_data(fieldname) + + # convert objects + if isinstance(value, ImplicitAcquisitionWrapper): + return get_url_info(value) + + # convert dates + if is_date(value): + return to_iso_date(value) + + # check if the value is callable + if callable(value): + value = value() + + # check if the value is JSON serializable + if not is_json_serializable(value): + logger.warn("Output {} is not JSON serializable".format(repr(value))) + return default + + return value + + +def is_date(thing): + """Checks if the given thing represents a date + + :param thing: The object to check if it is a date + :type thing: arbitrary object + :returns: True if we have a date object + :rtype: bool + """ + # known date types + date_types = (datetime.datetime, + datetime.date, + DateTime) + return isinstance(thing, date_types) + + +def is_lazy_map(thing): + """Checks if the passed in thing is a LazyMap + + :param thing: The thing to test + :type thing: any + :returns: True if the thing is a richtext value + :rtype: bool + """ + return isinstance(thing, LazyMap) + + +def to_iso_date(date, default=None): + """ISO representation for the date object + + :param date: A date object + :type field: datetime/DateTime + :returns: The ISO format of the date + :rtype: str + """ + + # not a date + if not is_date(date): + return default + + # handle Zope DateTime objects + if isinstance(date, (DateTime)): + return date.ISO8601() + + # handle python datetime objects + return date.isoformat() + + +def get_contents(brain_or_object, depth=1): + """Lookup folder contents for this object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :returns: List of contained contents + :rtype: list/Products.ZCatalog.Lazy.LazyMap + """ + + # Nothing to do if the object is contentish + if not is_folderish(brain_or_object): + return [] + + query = { + "path": { + "query": get_path(brain_or_object), + "depth": depth, + } + } + + return search(query=query) + + +def get_parent(brain_or_object): + """Locate the parent object of the content/catalog brain + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :returns: parent object + :rtype: Parent content + """ + + if is_root(brain_or_object): + return get_portal() + + if is_brain(brain_or_object): + parent_path = get_parent_path(brain_or_object) + return get_object_by_path(parent_path) + + return brain_or_object.aq_parent + + +def get_object_by_uid(uid, default=None): + """Proxy to senaite.lims.api.get_object_by_uid + """ + return api.get_object_by_uid(uid, default) + + +def get_path(brain_or_object): + """Proxy to senaite.lims.api.get_path + """ + return api.get_path(brain_or_object) + + +def get_parent_path(brain_or_object): + """Proxy to senaite.lims.api.get_parent_path + """ + return api.get_parent_path(brain_or_object) + + +def get_id(brain_or_object): + """Proxy to senaite.lims.api.get_id + """ + return api.get_id(brain_or_object) + + +def get_uid(brain_or_object): + """Proxy to senaite.lims.api.get_uid + """ + return api.get_uid(brain_or_object) + + +def get_url(brain_or_object): + """Proxy to senaite.lims.api.get_url + """ + return api.get_url(brain_or_object) + + +def get_portal_type(brain_or_object): + """Proxy to senaite.lims.api.get_portal_type + """ + return api.get_portal_type(brain_or_object) + + +def do_transition_for(brain_or_object, transition): + """Proxy to senaite.lims.api.do_transition_for + """ + return api.do_transition_for(brain_or_object, transition) + + +def get_portal_types(): + """Get a list of all portal types + + :retruns: List of portal type names + :rtype: list + """ + types_tool = get_tool("portal_types") + return types_tool.listContentTypes() + + +def get_resource_mapping(): + """Map resources used in the routes to portal types + + :returns: Mapping of resource->portal_type + :rtype: dict + """ + portal_types = get_portal_types() + resources = map(portal_type_to_resource, portal_types) + return dict(zip(resources, portal_types)) + + +def portal_type_to_resource(portal_type): + """Converts a portal type name to a resource name + + :param portal_type: Portal type name + :type name: string + :returns: Resource name as it is used in the content route + :rtype: string + """ + resource = portal_type.lower() + resource = resource.replace(" ", "") + return resource + + +def resource_to_portal_type(resource): + """Converts a resource to a portal type + + :param resource: Resource name as it is used in the content route + :type name: string + :returns: Portal type name + :rtype: string + """ + if resource is None: + return None + + resource_mapping = get_resource_mapping() + portal_type = resource_mapping.get(resource.lower()) + + if portal_type is None: + logger.warn("Could not map the resource '{}' " + "to any known portal type".format(resource)) + + return portal_type + + +def get_container_for(portal_type): + """Returns the single holding container object of this content type + + :param portal_type: The portal type requested + :type portal_type: string + :returns: Folderish container where the portal type can be created + :rtype: AT content object + """ + container_paths = config.CONTAINER_PATHS_FOR_PORTAL_TYPES + container_path = container_paths.get(portal_type) + + if container_path is None: + return None + + portal_path = get_path(get_portal()) + return get_object_by_path("/".join([portal_path, container_path])) + + +def is_creation_allowed(portal_type): + """Checks if it is allowed to create the portal type + + :param portal_type: The portal type requested + :type portal_type: string + :returns: True if it is allowed to create this object + :rtype: bool + """ + allowed_portal_types = config.ALLOWED_PORTAL_TYPES_TO_CREATE + return portal_type in allowed_portal_types + + +def url_for(endpoint, default=DEFAULT_ENDPOINT, **values): + """Looks up the API URL for the given endpoint + + :param endpoint: The name of the registered route (aka endpoint) + :type endpoint: string + :returns: External URL for this endpoint + :rtype: string/None + """ + + try: + return router.url_for(endpoint, force_external=True, values=values) + except Exception: + logger.warn("Could not build API URL for endpoint '%s'. " + "No route provider registered?" % endpoint) + # build generic API URL + return router.url_for(default, force_external=True, values=values) + + +def get_endpoint(brain_or_object, default=DEFAULT_ENDPOINT): + """Calculate the endpoint for this object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :returns: Endpoint for this object + :rtype: string + """ + portal_type = get_portal_type(brain_or_object) + resource = portal_type_to_resource(portal_type) + + # Try to get the right namespaced endpoint + endpoints = router.DefaultRouter.view_functions.keys() + if resource in endpoints: + return resource # exact match + endpoint_candidates = filter(lambda e: e.endswith(resource), endpoints) + if len(endpoint_candidates) == 1: + # only return the namespaced endpoint, if we have an exact match + return endpoint_candidates[0] + + return default + + +def get_catalog(): + """Get catalog adapter + + :returns: ICatalog adapter for the Portal + :rtype: CatalogTool + """ + portal = get_portal() + return ICatalog(portal) + + +def get_object_by_request(): + """Find an object by request parameters + + Inspects request parameters to locate an object + + :returns: Found Object or None + :rtype: object + """ + data = req.get_form() or req.get_query_string() + return get_object_by_record(data) + + +def get_object_by_record(record): + """Find an object by a given record + + Inspects request the record to locate an object + + :param record: A dictionary representation of an object + :type record: dict + :returns: Found Object or None + :rtype: object + """ + + # nothing to do here + if not record: + return None + + if record.get("uid"): + return get_object_by_uid(record["uid"]) + if record.get("path"): + return get_object_by_path(record["path"]) + if record.get("parent_path") and record.get("id"): + path = "/".join([record["parent_path"], record["id"]]) + return get_object_by_path(path) + + logger.warn("get_object_by_record::No object found! record='%r'" % record) + return None + + +def get_object_by_path(path): + """Find an object by a given physical path + + :param path: The physical path of the object to find + :type path: string + :returns: Found Object or None + :rtype: object + """ + + # nothing to do here + if not path: + return None + + portal = get_portal() + portal_path = get_path(portal) + + if path == portal_path: + return portal + + if path.startswith(portal_path): + segments = path.split("/") + path = "/".join(segments[2:]) + + try: + return portal.restrictedTraverse(str(path)) + except (KeyError, AttributeError): + fail(404, "No object could be found at {}".format(str(path))) + + +def is_anonymous(): + """Check if the current user is authenticated or not + + :returns: True if the current user is authenticated + :rtype: bool + """ + return ploneapi.user.is_anonymous() + + +def get_current_user(): + """Get the current logged in user + + :returns: Member + :rtype: object + """ + return ploneapi.user.get_current() + + +def get_member_ids(): + """Return all member ids of the portal. + """ + pm = get_tool("portal_membership") + member_ids = pm.listMemberIds() + # How can it be possible to get member ids with None? + return filter(lambda x: x, member_ids) + + +def get_user(user_or_username=None): + """Return Plone User + + :param user_or_username: Plone user or user id + :type groupname: PloneUser/MemberData/str + :returns: Plone MemberData + :rtype: object + """ + if user_or_username is None: + return None + if hasattr(user_or_username, "getUserId"): + return ploneapi.user.get(user_or_username.getUserId()) + return ploneapi.user.get(userid=u.to_string(user_or_username)) + + +def get_user_properties(user_or_username): + """Return User Properties + + :param user_or_username: Plone group identifier + :type groupname: PloneUser/MemberData/str + :returns: Plone MemberData + :rtype: object + """ + user = get_user(user_or_username) + if user is None: + return {} + if not callable(user.getUser): + return {} + out = {} + plone_user = user.getUser() + for sheet in plone_user.listPropertysheets(): + ps = plone_user.getPropertysheet(sheet) + out.update(dict(ps.propertyItems())) + return out + + +def find_objects(uid=None): + """Find the object by its UID + + 1. get the object from the given uid + 2. fetch objects specified in the request parameters + 3. fetch objects located in the request body + + :param uid: The UID of the object to find + :type uid: string + :returns: List of found objects + :rtype: list + """ + # The objects to cut + objects = [] + + # get the object by the given uid or try to find it by the request + # parameters + obj = get_object_by_uid(uid) or get_object_by_request() + + if obj: + objects.append(obj) + else: + # no uid -> go through the record items + records = req.get_request_data() + for record in records: + # try to get the object by the given record + obj = get_object_by_record(record) + + # no object found for this record + if obj is None: + continue + objects.append(obj) + + return objects + + +def find_target_container(portal_type, record): + """Locates a target container for the given portal_type and record + + :param record: The dictionary representation of a content object + :type record: dict + :returns: folder which contains the object + :rtype: object + """ + portal_type = portal_type or record.get("portal_type") + container = get_container_for(portal_type) + if container: + return container + + parent_uid = record.pop("parent_uid", None) + parent_path = record.pop("parent_path", None) + + target = None + + # Try to find the target object + if parent_uid: + target = get_object_by_uid(parent_uid) + elif parent_path: + target = get_object_by_path(parent_path) + else: + fail(404, "No target UID/PATH information found") + + if not target: + fail(404, "No target container found") + + return target + + +def create_object(container, portal_type, **data): + """Creates an object slug + + :returns: The new created content object + :rtype: object + """ + + if "id" in data: + # always omit the id as senaite LIMS generates a proper one + id = data.pop("id") + logger.warn("Passed in ID '{}' omitted! Senaite LIMS " + "generates a proper ID for you" .format(id)) + + try: + # Special case for ARs + # => return immediately w/o update + if portal_type == "AnalysisRequest": + obj = create_analysisrequest(container, **data) + # Omit values which are already set through the helper + data = u.omit(data, "SampleType", "Analyses") + # Set the container as the client, as the AR lives in it + data["Client"] = container + # Standard content creation + else: + # we want just a minimun viable object and set the data later + obj = api.create(container, portal_type) + # obj = api.create(container, portal_type, **data) + except Unauthorized: + fail(401, "You are not allowed to create this content") + + # Update the object with the given data, but omit the id + try: + update_object_with_data(obj, data) + except APIError: + + # Failure in creation process, delete the invalid object + container.manage_delObjects(obj.id) + # reraise the error + raise + + return obj + + +def create_analysisrequest(container, **data): + """Create a minimun viable AnalysisRequest + + :param container: A single folderish catalog brain or content object + :type container: ATContentType/DexterityContentType/CatalogBrain + """ + container = get_object(container) + request = req.get_request() + # we need to resolve the SampleType to a full object + sample_type = data.get("SampleType", None) + if sample_type is None: + fail(400, "Please provide a SampleType") + + # TODO We should handle the same values as in the DataManager for this field + # (UID, path, objects, dictionaries ...) + results = search(portal_type="SampleType", title=sample_type) + + values = { + "Analyses": data.get("Analyses", []), + "SampleType": results and get_object(results[0]) or None, + } + + return create_ar(container, request, values) + + +def update_object_with_data(content, record): + """Update the content with the record data + + :param content: A single folderish catalog brain or content object + :type content: ATContentType/DexterityContentType/CatalogBrain + :param record: The data to update + :type record: dict + :returns: The updated content object + :rtype: object + :raises: + APIError, + :class:`~plone.jsonapi.routes.exceptions.APIError` + """ + + # ensure we have a full content object + content = get_object(content) + + # get the proper data manager + dm = IDataManager(content) + + if dm is None: + fail(400, "Update for this object is not allowed") + + # Iterate through record items + for k, v in record.items(): + try: + success = dm.set(k, v, **record) + except Unauthorized: + fail(401, "Not allowed to set the field '%s'" % k) + except ValueError, exc: + fail(400, str(exc)) + + if not success: + logger.warn("update_object_with_data::skipping key=%r", k) + continue + + logger.debug("update_object_with_data::field %r updated", k) + + # Validate the entire content object + invalid = validate_object(content, record) + if invalid: + fail(400, u.to_json(invalid)) + + # do a wf transition + if record.get("transition", None): + t = record.get("transition") + logger.debug(">>> Do Transition '%s' for Object %s", t, content.getId()) + do_transition_for(content, t) + + # reindex the object + content.reindexObject() + return content + + +def validate_object(brain_or_object, data): + """Validate the entire object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :param data: The sharing dictionary as returned from the API + :type data: dict + :returns: invalidity status + :rtype: dict + """ + obj = get_object(brain_or_object) + + # Call the validator of AT Content Types + if is_at_content(obj): + return obj.validate(data=data) + + return {} + + +def deactivate_object(brain_or_object): + """Deactivate the given object + + :param brain_or_object: A single catalog brain or content object + :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain + :returns: Nothing + :rtype: None + """ + obj = get_object(brain_or_object) + # we do not want to delete the site root! + if is_root(obj): + fail(401, "Deactivating the Portal is not allowed") + try: + do_transition_for(brain_or_object, "deactivate") + except Unauthorized: + fail(401, "Not allowed to deactivate object '%s'" % obj.getId()) + + +# ----------------------------------------------------------------------------- +# Batching Helpers +# ----------------------------------------------------------------------------- + +def get_batch(sequence, size, start=0, endpoint=None, complete=False): + """ create a batched result record out of a sequence (catalog brains) + """ + + batch = make_batch(sequence, size, start) + + return { + "pagesize": batch.get_pagesize(), + "next": batch.make_next_url(), + "previous": batch.make_prev_url(), + "page": batch.get_pagenumber(), + "pages": batch.get_numpages(), + "count": batch.get_sequence_length(), + "items": make_items_for([b for b in batch.get_batch()], + endpoint, complete=complete), + } + + +def make_batch(sequence, size=25, start=0): + """Make a batch of the given size from the sequence + """ + # we call an adapter here to allow backwards compatibility hooks + return IBatch(Batch(sequence, size, start)) diff --git a/src/senaite/jsonapi/batch.py b/src/senaite/jsonapi/batch.py new file mode 100644 index 0000000..ad95563 --- /dev/null +++ b/src/senaite/jsonapi/batch.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- + +import urllib + +from zope import interface + +from senaite.lims.jsonapi import request as req +from senaite.lims.jsonapi.interfaces import IBatch + + +class Batch(object): + """Adapter for Plone 4.3 batching functionality + """ + interface.implements(IBatch) + + def __init__(self, batch): + self.batch = batch + + def get_batch(self): + return self.batch + + def get_pagesize(self): + return self.batch.pagesize + + def get_pagenumber(self): + return self.batch.pagenumber + + def get_numpages(self): + return self.batch.numpages + + def get_sequence_length(self): + return self.batch.sequence_length + + def make_next_url(self): + if not self.batch.has_next: + return None + request = req.get_request() + params = request.form + params["b_start"] = self.batch.pagenumber * self.batch.pagesize + return "%s?%s" % (request.URL, urllib.urlencode(params)) + + def make_prev_url(self): + if not self.batch.has_previous: + return None + request = req.get_request() + params = request.form + pagesize = self.batch.pagesize + pagenumber = self.batch.pagenumber + params["b_start"] = max(pagenumber - 2, 0) * pagesize + return "%s?%s" % (request.URL, urllib.urlencode(params)) + + +class Batch42(object): + """Adapter for Plone 4.2 batching functionality + """ + interface.implements(IBatch) + + def __init__(self, batch): + self.batch = batch + + def get_batch(self): + return self.batch + + def get_pagesize(self): + return self.batch.size + + def get_pagenumber(self): + return self.batch.pagenumber + + def get_numpages(self): + return self.batch.numpages + + def get_sequence_length(self): + return self.batch.sequence_length + + def make_next_url(self): + if self.batch.next is not None: + return None + request = req.get_request() + params = request.form + params["b_start"] = self.batch.numpages * self.batch.size + return "%s?%s" % (request.URL, urllib.urlencode(params)) + + def make_prev_url(self): + if self.batch.previous is not None: + return None + request = req.get_request() + params = request.form + params["b_start"] = max(self.batch.numpages - 2, 0) * self.batch.size + return "%s?%s" % (request.URL, urllib.urlencode(params)) diff --git a/src/senaite/jsonapi/catalog.py b/src/senaite/jsonapi/catalog.py new file mode 100644 index 0000000..993e666 --- /dev/null +++ b/src/senaite/jsonapi/catalog.py @@ -0,0 +1,237 @@ +# -*- coding: utf-8 -*- + +from zope import interface + +from DateTime import DateTime +from ZPublisher import HTTPRequest + +from senaite.lims import logger +from senaite.lims import api as bikaapi +from senaite.lims.jsonapi import api +from senaite.lims.jsonapi import request as req +from senaite.lims.jsonapi import underscore as _ +from senaite.lims.jsonapi.interfaces import ICatalog +from senaite.lims.jsonapi.interfaces import ICatalogQuery + + +class Catalog(object): + """Plone catalog adapter + """ + interface.implements(ICatalog) + + def __init__(self, context): + self._catalog = api.get_tool("portal_catalog") + self._bika_catalog = api.get_tool("bika_catalog") + self._bika_analysis_catalog = api.get_tool("bika_analysis_catalog") + self._bika_setup_catalog = api.get_tool("bika_setup_catalog") + + self._catalogs = { + "portal_catalog": self._catalog, + "bika_catalog": self._bika_catalog, + "bika_analysis_catalog": self._bika_analysis_catalog, + "bika_setup_catalog": self._bika_setup_catalog, + } + + def search(self, query): + """search the catalog + """ + logger.info("Catalog query={}".format(query)) + + # Support to set the catalog as a request parameter + catalogs = _.to_list(req.get("catalog", None)) + if catalogs: + return bikaapi.search(query, catalog=catalogs) + # Delegate to the search API of Bika LIMS + return bikaapi.search(query) + + def __call__(self, query): + return self.search(query) + + def get_catalog(self, name="portal_catalog"): + return self._catalogs[name] + + def get_schema(self): + catalog = self.get_catalog() + return catalog.schema() + + def get_indexes(self): + """get all indexes managed by this catalog + + TODO: Combine indexes of relevant catalogs depending on the portal_type + which is searched for. + """ + catalog = self.get_catalog() + return catalog.indexes() + + def get_index(self, name): + """get an index by name + + TODO: Combine indexes of relevant catalogs depending on the portal_type + which is searched for. + """ + catalog = self.get_catalog() + index = catalog._catalog.getIndex(name) + logger.debug("get_index={} of catalog '{}' --> {}".format( + name, catalog.__name__, index)) + return index + + def to_index_value(self, value, index): + """Convert the value for a given index + """ + + # ZPublisher records can be passed to the catalog as is. + if isinstance(value, HTTPRequest.record): + return value + + if isinstance(index, basestring): + index = self.get_index(index) + + if index.id == "portal_type": + return filter(lambda x: x, _.to_list(value)) + if index.meta_type == "DateIndex": + return DateTime(value) + if index.meta_type == "BooleanIndex": + return bool(value) + if index.meta_type == "KeywordIndex": + return value.split(",") + + return value + + +class CatalogQuery(object): + """Catalog query adapter + """ + interface.implements(ICatalogQuery) + + def __init__(self, catalog): + self.catalog = catalog + + def make_query(self, **kw): + """create a query suitable for the catalog + """ + query = kw.pop("query", {}) + + query.update(self.get_request_query()) + query.update(self.get_custom_query()) + query.update(self.get_keyword_query(**kw)) + + sort_on, sort_order = self.get_sort_spec() + if sort_on and "sort_on" not in query: + query.update({"sort_on": sort_on}) + if sort_order and "sort_order" not in query: + query.update({"sort_order": sort_order}) + + logger.info("make_query:: query={} | catalog={}".format( + query, self.catalog)) + + return query + + def get_request_query(self): + """Checks the request for known catalog indexes and converts the values + to fit the type of the catalog index. + + :param catalog: The catalog to build the query for + :type catalog: ZCatalog + :returns: Catalog query + :rtype: dict + """ + query = {} + + # only known indexes get observed + indexes = self.catalog.get_indexes() + + for index in indexes: + # Check if the request contains a parameter named like the index + value = req.get(index) + # No value found, continue + if value is None: + continue + # Convert the found value to format understandable by the index + index_value = self.catalog.to_index_value(value, index) + # Conversion returned None, continue + if index_value is None: + continue + # Append the found value to the query + query[index] = index_value + + return query + + def get_custom_query(self): + """Extracts custom query keys from the index. + + Parameters which get extracted from the request: + + `q`: Passes the value to the `SearchableText` + `path`: Creates a path query + `recent_created`: Creates a date query + `recent_modified`: Creates a date query + + :param catalog: The catalog to build the query for + :type catalog: ZCatalog + :returns: Catalog query + :rtype: dict + """ + query = {} + + # searchable text queries + q = req.get_query() + if q: + query["SearchableText"] = q + + # physical path queries + path = req.get_path() + if path: + query["path"] = {'query': path, 'depth': req.get_depth()} + + # special handling for recent created/modified + recent_created = req.get_recent_created() + if recent_created: + date = api.calculate_delta_date(recent_created) + query["created"] = {'query': date, 'range': 'min'} + + recent_modified = req.get_recent_modified() + if recent_modified: + date = api.calculate_delta_date(recent_modified) + query["modified"] = {'query': date, 'range': 'min'} + + return query + + def get_keyword_query(self, **kw): + """Generates a query from the given keywords. + Only known indexes make it into the generated query. + + :returns: Catalog query + :rtype: dict + """ + query = dict() + + # Only known indexes get observed + indexes = self.catalog.get_indexes() + + # Handle additional keyword parameters + for k, v in kw.iteritems(): + # handle uid in keywords + if k.lower() == "uid": + k = "UID" + # handle portal_type in keywords + if k.lower() == "portal_type": + if v: + v = _.to_list(v) + if k not in indexes: + logger.warn("Skipping unknown keyword parameter '%s=%s'" % (k, v)) + continue + if v is None: + logger.warn("Skip None value in kw parameter '%s=%s'" % (k, v)) + continue + logger.debug("Adding '%s=%s' to query" % (k, v)) + query[k] = v + + return query + + def get_sort_spec(self): + """Build sort specification + """ + all_indexes = self.catalog.get_indexes() + si = req.get_sort_on(allowed_indexes=all_indexes) + so = req.get_sort_order() + return si, so diff --git a/src/senaite/jsonapi/config.py b/src/senaite/jsonapi/config.py new file mode 100644 index 0000000..04ed8dc --- /dev/null +++ b/src/senaite/jsonapi/config.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- + + +# The location mentioned here take precendence over any given container path +# defined in the request. +CONTAINER_PATHS_FOR_PORTAL_TYPES = { + # 'ARImport': '', + # 'ARPriorities': '', + 'ARPriority': 'bika_setup/bika_arpriorities', + # 'ARReport': '', + 'ARTemplate': 'bika_setup/bika_artemplates', + # 'ARTemplates': '', + # 'ATBooleanCriterion': '', + # 'ATCurrentAuthorCriterion': '', + # 'ATDateCriteria': '', + # 'ATDateRangeCriterion': '', + # 'ATListCriterion': '', + # 'ATPathCriterion': '', + # 'ATPortalTypeCriterion': '', + # 'ATReferenceCriterion': '', + # 'ATRelativePathCriterion': '', + # 'ATSelectionCriterion': '', + # 'ATSimpleIntCriterion': '', + # 'ATSimpleStringCriterion': '', + # 'ATSortCriterion': '', + # 'Analysis': '', + # 'AnalysisCategories': '', + 'AnalysisCategory': 'bika_setup/bika_analysiscategories', + 'AnalysisProfile': 'bika_setup/bika_analysisprofiles', + # 'AnalysisProfiles': '', + # 'AnalysisRequest': '', + # 'AnalysisRequestsFolder': '', + 'AnalysisService': 'bika_setup/bika_analysisservices', + # 'AnalysisServices': '', + 'AnalysisSpec': 'bika_setup/bika_analysisspecs', + # 'AnalysisSpecs': '', + # 'Attachment': '', + 'AttachmentType': 'bika_setup/bika_attachmenttypes', + # 'AttachmentTypes': '', + # 'Batch': '', + # 'BatchFolder': '', + 'BatchLabel': 'bika_setup/bika_batchlabels', + # 'BatchLabels': '', + # 'BikaSetup': '', + 'Calculation': 'bika_setup/bika_calculations', + # 'Calculations': '', + 'Client': 'clients', + # 'ClientFolder': '', + # 'Collection': '', + # 'Contact': '', + 'Container': 'bika_setup/bika_containers', + # 'ContainerType': '', + # 'ContainerTypes': '', + # 'Containers': '', + 'Department': 'bika_setup/bika_departments', + # 'Departments': '', + # 'Discussion Item': '', + # 'Document': '', + # 'DuplicateAnalysis': '', + # 'Event': '', + # 'File': '', + # 'Folder': '', + 'IdentifierType': 'bika_setup/bika_identifiertypes', + # 'IdentifierTypes': '', + # 'Image': '', + 'Instrument': 'bika_setup/bika_instruments', + # 'InstrumentCalibration': '', + # 'InstrumentCertification': '', + 'InstrumentLocation': 'bika_setup/bika_instrumentlocations', + # 'InstrumentLocations': '', + # 'InstrumentMaintenanceTask': '', + # 'InstrumentScheduledTask': '', + 'InstrumentType': 'bika_setup/bika_instrumenttypes', + # 'InstrumentTypes': '', + # 'InstrumentValidation': '', + # 'Instruments': '', + # 'Invoice': '', + # 'InvoiceBatch': '', + # 'InvoiceFolder': '', + 'LabContact': 'bika_setup/bika_labcontacts', + # 'LabContacts': '', + 'LabProduct': 'bika_setup/bika_labproducts', + # 'LabProducts': '', + # 'Laboratory': '', + # 'Link': '', + 'Manufacturer': 'bika_setup/bika_manufacturers', + # 'Manufacturers': '', + 'Method': 'methods', + # 'Methods': '', + # 'Multifile': '', + # 'News Item': '', + # 'Plone Site': '', + 'Preservation': 'bika_setup/bika_preservations', + # 'Preservations': '', + # 'Pricelist': '', + # 'PricelistFolder': '', + # 'ReferenceAnalysis': '', + 'ReferenceDefinition': 'bika_setup/bika_referencedefinitions', + # 'ReferenceDefinitions': '', + # 'ReferenceSample': '', + # 'ReferenceSamplesFolder': '', + # 'RejectAnalysis': '', + # 'Report': '', + # 'ReportFolder': '', + 'SRTemplate': 'bika_setup/bika_srtemplates', + # 'SRTemplates': '', + # 'Sample': '', + 'SampleCondition': 'bika_setup/bika_sampleconditions', + # 'SampleConditions': '', + # 'SampleMatrices': '', + 'SampleMatrix': 'bika_setup/bika_samplematrices', + # 'SamplePartition': '', + 'SamplePoint': 'bika_setup/bika_samplepoints', + # 'SamplePoints': '', + 'SampleType': 'bika_setup/bika_sampletypes', + # 'SampleTypes': '', + # 'SamplesFolder': '', + 'SamplingDeviation': 'bika_setup/bika_samplingdeviations', + # 'SamplingDeviations': '', + 'SamplingRound': 'bika_setup/bika_samplingrounds', + # 'SamplingRounds': '', + 'StorageLocation': 'bika_setup/bika_storagelocations', + # 'StorageLocations': '', + 'SubGroup': 'bika_setup/bika_subgroups', + # 'SubGroups': '', + 'Supplier': 'bika_setup/bika_suppliers', + # 'SupplierContact': '', + # 'Suppliers': '', + # 'SupplyOrder': '', + # 'SupplyOrderFolder': '', + # 'TempFolder': '', + # 'Topic': '', + # 'Worksheet': '', + # 'WorksheetFolder': '', + 'WorksheetTemplate': 'bika_setup/bika_worksheettemplates', + # 'WorksheetTemplates': '', +} + +ALLOWED_PORTAL_TYPES_TO_CREATE = [ + # 'ARImport', + # 'ARPriorities', + 'ARPriority', + # 'ARReport', + 'ARTemplate', + # 'ARTemplates', + # 'ATBooleanCriterion', + # 'ATCurrentAuthorCriterion', + # 'ATDateCriteria', + # 'ATDateRangeCriterion', + # 'ATListCriterion', + # 'ATPathCriterion', + # 'ATPortalTypeCriterion', + # 'ATReferenceCriterion', + # 'ATRelativePathCriterion', + # 'ATSelectionCriterion', + # 'ATSimpleIntCriterion', + # 'ATSimpleStringCriterion', + # 'ATSortCriterion', + 'Analysis', + # 'AnalysisCategories', + 'AnalysisCategory', + 'AnalysisProfile', + # 'AnalysisProfiles', + 'AnalysisRequest', + # 'AnalysisRequestsFolder', + 'AnalysisService', + # 'AnalysisServices', + 'AnalysisSpec', + # 'AnalysisSpecs', + 'Attachment', + 'AttachmentType', + # 'AttachmentTypes', + 'Batch', + # 'BatchFolder', + 'BatchLabel', + # 'BatchLabels', + # 'BikaSetup', + 'Calculation', + # 'Calculations', + 'Client', + # 'ClientFolder', + # 'Collection', + 'Contact', + 'Container', + 'ContainerType', + # 'ContainerTypes', + # 'Containers', + 'Department', + # 'Departments', + # 'Discussion Item', + 'Document', + # 'DuplicateAnalysis', + # 'Event', + 'File', + # 'Folder', + 'IdentifierType', + # 'IdentifierTypes', + 'Image', + 'Instrument', + 'InstrumentCalibration', + # 'InstrumentCertification', + 'InstrumentLocation', + # 'InstrumentLocations', + # 'InstrumentMaintenanceTask', + # 'InstrumentScheduledTask', + 'InstrumentType', + # 'InstrumentTypes', + # 'InstrumentValidation', + # 'Instruments', + 'Invoice', + 'InvoiceBatch', + # 'InvoiceFolder', + 'LabContact', + # 'LabContacts', + 'LabProduct', + # 'LabProducts', + # 'Laboratory', + # 'Link', + 'Manufacturer', + # 'Manufacturers', + 'Method', + # 'Methods', + 'Multifile', + # 'News Item', + # 'Plone Site', + 'Preservation', + # 'Preservations', + 'Pricelist', + # 'PricelistFolder', + # 'ReferenceAnalysis', + 'ReferenceDefinition', + # 'ReferenceDefinitions', + 'ReferenceSample', + # 'ReferenceSamplesFolder', + 'RejectAnalysis', + 'Report', + # 'ReportFolder', + 'SRTemplate', + # 'SRTemplates', + 'Sample', + 'SampleCondition', + # 'SampleConditions', + # 'SampleMatrices', + 'SampleMatrix', + # 'SamplePartition', + 'SamplePoint', + # 'SamplePoints', + 'SampleType', + # 'SampleTypes', + # 'SamplesFolder', + 'SamplingDeviation', + # 'SamplingDeviations', + 'SamplingRound', + # 'SamplingRounds', + 'StorageLocation', + # 'StorageLocations', + 'SubGroup', + # 'SubGroups', + 'Supplier', + 'SupplierContact', + # 'Suppliers', + 'SupplyOrder', + # 'SupplyOrderFolder', + # 'TempFolder', + # 'Topic', + 'Worksheet', + # 'WorksheetFolder', + 'WorksheetTemplate', + # 'WorksheetTemplates', +] diff --git a/src/senaite/jsonapi/configure.zcml b/src/senaite/jsonapi/configure.zcml new file mode 100644 index 0000000..590f4d5 --- /dev/null +++ b/src/senaite/jsonapi/configure.zcml @@ -0,0 +1,200 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/senaite/jsonapi/datamanagers.py b/src/senaite/jsonapi/datamanagers.py new file mode 100644 index 0000000..0cc0a76 --- /dev/null +++ b/src/senaite/jsonapi/datamanagers.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- + +import Missing + +from zope import interface + +from AccessControl import Unauthorized +from AccessControl import getSecurityManager + +from Products.CMFCore import permissions + +from senaite.lims import logger +from senaite.lims.jsonapi import api +from senaite.lims.jsonapi.interfaces import IDataManager +from senaite.lims.jsonapi.interfaces import IFieldManager + + +class BaseDataManager(object): + """Base Data Manager + """ + interface.implements(IDataManager) + + def __init__(self, context): + self.context = context + + def get(self, name): + """Get the value for name + """ + raise NotImplemented("Getter must be implemented by subclass") + + def set(self, name, value, **kw): + """Set the value for name + """ + raise NotImplemented("Setter must be implemented by subclass") + + def json_data(self, name, default=None): + """Get a JSON compatible value of the field + """ + raise NotImplemented("Get Info must be implemented by subclass") + + +class BrainDataManager(BaseDataManager): + """Data Adapter for Catalog Brains + """ + + def get(self, name): + """Get a JSON compatible structure for the named attribute + """ + # read the attribute + attr = getattr(self.context, name, None) + if callable(attr): + return attr() + return attr + + def set(self, name, value, **kw): + """Setter is not used for catalog brains + """ + logger.warn("Setting is not allowed on catalog brains") + + def json_data(self, name, default=None): + """Get a JSON compatible value of the field + """ + value = self.get(name) + if value is Missing.Value: + return default + return value + + +class PortalDataManager(BaseDataManager): + """Data Adapter for the Portal Object + """ + + def get(self, name): + """Get the value by name + """ + + # check read permission + sm = getSecurityManager() + permission = permissions.View + if not sm.checkPermission(permission, self.context): + raise Unauthorized("Not allowed to view the Plone portal") + + # read the attribute + attr = getattr(self.context, name, None) + if callable(attr): + return attr() + + # XXX no really nice, but we want the portal to behave like an ordinary + # content type. Therefore we need to inject the neccessary data. + if name == "uid": + return "0" + if name == "path": + return "/%s" % self.context.getId() + return attr + + def set(self, name, value, **kw): + """Set the attribute to the given value. + + The keyword arguments represent the other attribute values + to integrate constraints to other values. + """ + + # check write permission + sm = getSecurityManager() + permission = permissions.ManagePortal + if not sm.checkPermission(permission, self.context): + raise Unauthorized("Not allowed to modify the Plone portal") + + # set the attribute + if not hasattr(self.context, name): + return False + self.context[name] = value + return True + + def json_data(self, name, default=None): + """Get a JSON compatible structure for the named attribute + """ + value = self.get(name) + return value + + +class ATDataManager(BaseDataManager): + """Data Adapter for AT Content Types + """ + + def get(self, name): + """Get the value of the field by name + """ + + # fetch the field by name + field = api.get_field(self.context, name) + + # bail out if we have no field + if not field: + return None + + # call the field adapter and set the value + fieldmanager = IFieldManager(field) + return fieldmanager.get(self.context) + + def set(self, name, value, **kw): + """Set the field to the given value. + + The keyword arguments represent the other field values + to integrate constraints to other values. + """ + + # fetch the field by name + field = api.get_field(self.context, name) + + # bail out if we have no field + if not field: + return False + + # call the field adapter and set the value + fieldmanager = IFieldManager(field) + return fieldmanager.set(self.context, value, **kw) + + def json_data(self, name): + """Get a JSON compatible structure for the named attribute + """ + + # fetch the field by name + field = api.get_field(self.context, name) + + # bail out if we have no field + if not field: + return None + + fieldmanager = IFieldManager(field) + return fieldmanager.json_data(self.context) + + +class DexterityDataManager(BaseDataManager): + """Data Adapter for Dexterity Content Types + """ + + def get(self, name): + """Get the value of the field by name + """ + + # Check the read permission of the context + # XXX: This should be done on field level by the field manager adapter + if not self.can_write(): + raise Unauthorized("You are not allowed to modify this content") + + # fetch the field by name + field = api.get_field(self.context, name) + + # bail out if we have no field + if field is None: + return None + + # call the field adapter and set the value + fieldmanager = IFieldManager(field) + return fieldmanager.get(self.context) + + def set(self, name, value, **kw): + """Set the field to the given value. + + The keyword arguments represent the other field values + to integrate constraints to other values. + """ + + # Check the write permission of the context + # XXX: This should be done on field level by the field manager adapter + if not self.can_write(): + raise Unauthorized("You are not allowed to modify this content") + + # fetch the field by name + field = api.get_field(self.context, name) + + # bail out if we have no field + if not field: + return False + + # call the field adapter and set the value + fieldmanager = IFieldManager(field) + return fieldmanager.set(self.context, value, **kw) + + def json_data(self, name): + """Get a JSON compatible structure for the named attribute + """ + + # Check the write permission of the context + # XXX: This should be done on field level by the field manager adapter + if not self.can_write(): + raise Unauthorized("You are not allowed to modify this content") + + # fetch the field by name + field = api.get_field(self.context, name) + + # bail out if we have no field + if not field: + return None + + fieldmanager = IFieldManager(field) + return fieldmanager.json_data(self.context) + + def can_write(self): + """Check if the field is writeable + """ + sm = getSecurityManager() + permission = permissions.ModifyPortalContent + + if not sm.checkPermission(permission, self.context): + return False + return True + + def can_read(self): + """Check if the field is readable + """ + sm = getSecurityManager() + if not sm.checkPermission(permissions.View, self.context): + return False + return True diff --git a/src/senaite/jsonapi/dataproviders.py b/src/senaite/jsonapi/dataproviders.py new file mode 100644 index 0000000..d8aba76 --- /dev/null +++ b/src/senaite/jsonapi/dataproviders.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- + +from zope import interface +from zope import component + +from plone.dexterity.interfaces import IDexterityContent + +from AccessControl import Unauthorized +from Products.CMFCore.interfaces import ISiteRoot +from Products.ZCatalog.interfaces import ICatalogBrain +from Products.ATContentTypes.interfaces import IATContentType + +from senaite.lims import logger +from senaite.lims.jsonapi import api +from senaite.lims.jsonapi.interfaces import IInfo +from senaite.lims.jsonapi.interfaces import ICatalog +from senaite.lims.jsonapi.interfaces import IDataManager + +_marker = object + + +class Base(object): + """ Base Adapter + """ + interface.implements(IInfo) + + def __init__(self, context): + self.context = context + self.keys = [] + self.ignore = [] + + # Mapped attributes to extract from the object besides the schema keys. + # These keys are always included + self.attributes = { + "id": "getId", + "uid": "UID", + "title": "Title", + "description": "Description", + "created": "created", + "modified": "modified", + "effective": "effective", + "portal_type": "portal_type", + "tags": "Subject", + "author": "Creator", + } + + def to_dict(self): + """ extract the data of the content and return it as a dictionary + """ + + # 1. extract the schema fields + data = self.extract_fields() + + # 2. include custom key-value pairs listed in the mapping dictionary + for key, attr in self.attributes.iteritems(): + # key already extracted in the first step + if data.get(key, _marker) is not _marker: + continue # don't overwrite + if key in self.ignore: + continue # skip ignores + # fetch the mapped attribute + value = getattr(self.context, attr, None) + # handle function calls + if callable(value): + value = value() + # map the value to the given key from the mapping + data[key] = api.to_json_value(self.context, key, value) + return data + + def extract_fields(self): + """Extract the given fieldnames from the object + + :returns: Schema name/value mapping + :rtype: dict + """ + + # get the proper data manager for the object + dm = IDataManager(self.context) + + # filter out ignored fields + fieldnames = filter(lambda name: name not in self.ignore, self.keys) + + # schema mapping + out = dict() + + for fieldname in fieldnames: + try: + # get the field value with the data manager + fieldvalue = dm.json_data(fieldname) + # https://github.com/collective/plone.jsonapi.routes/issues/52 + # -> skip restricted fields + except Unauthorized: + logger.debug("Skipping restricted field '%s'" % fieldname) + continue + except ValueError: + logger.debug("Skipping invalid field '%s'" % fieldname) + continue + + out[fieldname] = api.to_json_value(self.context, fieldname, fieldvalue) + + return out + + def __call__(self): + return self.to_dict() + + +class ZCDataProvider(Base): + """ Catalog Brain Adapter + """ + interface.implements(IInfo) + component.adapts(ICatalogBrain) + + def __init__(self, context): + super(ZCDataProvider, self).__init__(context) + catalog_adapter = ICatalog(context) + # extract the metadata + self.keys = catalog_adapter.get_schema() + + # add specific catalog brain mappings + self.attributes.update({ + "path": "getPath", + }) + + # ignore some metadata values, which we already mapped + self.ignore = [ + 'CreationDate', + 'Creator', + 'Date', + 'Description', + 'EffectiveDate', + 'ExpirationDate', + 'ModificationDate', + 'Subject', + 'Title', + 'Type', + 'UID', + 'cmf_uid', + 'getIcon', + 'getId', + 'getObjSize', + 'getRemoteUrl', + 'listCreators', + 'meta_type', + ] + + +class DexterityDataProvider(Base): + """ Data Provider for Dexterity based content types + """ + interface.implements(IInfo) + component.adapts(IDexterityContent) + + def __init__(self, context): + super(DexterityDataProvider, self).__init__(context) + + # get the behavior and schema fields from the data manager + schema = api.get_schema(context) + behaviors = api.get_behaviors(context) + self.keys = schema.names() + behaviors.keys() + + +class ATDataProvider(Base): + """ Archetypes Adapter + """ + interface.implements(IInfo) + component.adapts(IATContentType) + + def __init__(self, context): + super(ATDataProvider, self).__init__(context) + + # get the schema fields from the data manager + schema = api.get_schema(context) + self.keys = schema.keys() + + +class SiteRootDataProvider(Base): + """ Site Root Adapter + """ + interface.implements(IInfo) + component.adapts(ISiteRoot) + + def __init__(self, context): + super(SiteRootDataProvider, self).__init__(context) + # virtual keys, which are handled by the data manager + self.keys = ["uid", "path"] diff --git a/src/senaite/jsonapi/exceptions.py b/src/senaite/jsonapi/exceptions.py new file mode 100644 index 0000000..3e9e0b6 --- /dev/null +++ b/src/senaite/jsonapi/exceptions.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +from senaite.lims.jsonapi import request as req + + +class APIError(Exception): + """Exception Class for API Errors + """ + + def __init__(self, status, message): + self.message = message + self.status = status + self.setStatus(status) + + def setStatus(self, status): + request = req.getRequest() + request.response.setStatus(status) + + def __str__(self): + return self.message diff --git a/src/senaite/jsonapi/fieldmanagers.py b/src/senaite/jsonapi/fieldmanagers.py new file mode 100644 index 0000000..972bd27 --- /dev/null +++ b/src/senaite/jsonapi/fieldmanagers.py @@ -0,0 +1,504 @@ +# -*- coding: utf-8 -*- + +import mimetypes + +from zope import interface + +from DateTime import DateTime +from AccessControl import Unauthorized +from Products.Archetypes.utils import mapply + +from senaite.lims import logger +from senaite.lims.jsonapi import api +from senaite.lims.jsonapi import underscore as u +from senaite.lims.jsonapi.interfaces import IFieldManager + + +class ZopeSchemaFieldManager(object): + """Adapter to get/set the value of Zope Schema Fields + """ + interface.implements(IFieldManager) + + def __init__(self, field): + self.field = field + + def get_field_name(self): + return self.field.getName() + + def get(self, instance, **kw): + """Get the value of the field + """ + return self._get(instance, **kw) + + def set(self, instance, value, **kw): + """Set the value of the field + """ + return self._set(instance, value, **kw) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + value = self.get(instance) + return value or default + + def _set(self, instance, value, **kw): + """Set the value of the field + """ + logger.debug("DexterityFieldManager::set: value=%r" % value) + + # Check if the field is read only + if self.field.readonly: + raise Unauthorized("Field is read only") + + # Validate + self.field.validate(value) + + # TODO: Check security on the field level + return self.field.set(instance, value) + + def _get(self, instance, **kw): + """Get the value of the field + """ + logger.debug("DexterityFieldManager::get: instance={} field={}" + .format(instance, self.field)) + + # TODO: Check security on the field level + return self.field.get(instance) + + +class RichTextFieldManager(ZopeSchemaFieldManager): + """Adapter to get/set the value of Rich Text Fields + """ + interface.implements(IFieldManager) + + def set(self, instance, value, **kw): + from plone.app.textfield.value import RichTextValue + value = RichTextValue(raw=value, + outputMimeType=self.field.output_mime_type) + return self._set(instance, value, **kw) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + value = self.get(instance) + if value: + return value.output + return value + + +class NamedFileFieldManager(ZopeSchemaFieldManager): + """Adapter to get/set the value of Named File Fields + """ + interface.implements(IFieldManager) + + def get_size(self, instance): + """Return the file size of the file + """ + value = self.get(instance) + return getattr(value, "size", 0) + + def get_data(self, instance): + """Return the file data + """ + value = self.get(instance) + return getattr(value, "data", "") + + def get_filename(self, instance): + """Get the filename + """ + value = self.get(instance) + return getattr(value, "filename", "") + + def get_content_type(self, instance): + """Get the content type of the file object + """ + value = self.get(instance) + return getattr(value, "contentType", "") + + def get_download_url(self, instance, default=None): + """Calculate the download url + """ + download = default + # calculate the download url + download = "{url}/@@download/{fieldname}/{filename}".format( + url=api.get_url(instance), + fieldname=self.get_field_name(), + filename=self.get_filename(instance), + ) + return download + + def set(self, instance, value, **kw): + logger.debug("NamedFileFieldManager::set:File field" + "detected ('%r'), base64 decoding value", self.field) + + data = str(value).decode("base64") + filename = kw.get("filename") or kw.get("id") or kw.get("title") + contentType = kw.get("mimetype") or kw.get("content_type") + + if contentType: + # create NamedFile with content type information + value = self.field._type(data=data, + contentType=contentType, + filename=filename) + else: + # create NamedFile w/o content type information + # -> will be guessed by the extension of the filename + value = self.field._type(data=data, filename=filename) + + return self.field.set(instance, value) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + return api.get_file_info(instance, self.get_field_name()) + + +class NamedImageFieldManager(NamedFileFieldManager): + """Adapter to get/set the value of Named Image Fields + """ + interface.implements(IFieldManager) + + +class RelationListFieldManager(ZopeSchemaFieldManager): + """Adapter to get/set the value of Z3C Relation Lists + """ + interface.implements(IFieldManager) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + value = self.get(instance) + + out = [] + for rel in value: + if rel.isBroken(): + logger.warn("Skipping broken relation {}".format(repr(rel))) + continue + obj = rel.to_object + out.append(api.get_url_info(obj)) + return out + + +class ATFieldManager(object): + """Adapter to get/set the value of AT Fields + """ + interface.implements(IFieldManager) + + def __init__(self, field): + self.field = field + self.name = self.get_field_name() + + def get_field(self): + """Get the adapted field + """ + return self.field + + def get_field_name(self): + """Get the field name + """ + return self.field.getName() + + def get(self, instance, **kw): + """Get the value of the field + """ + return self._get(instance, **kw) + + def set(self, instance, value, **kw): + """Set the value of the field + """ + return self._set(instance, value, **kw) + + def _set(self, instance, value, **kw): + """Set the value of the field + """ + logger.debug("ATFieldManager::set: value=%r" % value) + + # check field permission + if not self.field.checkPermission("write", instance): + raise Unauthorized("You are not allowed to write the field {}" + .format(self.name)) + + # check if field is writable + if not self.field.writeable(instance): + raise Unauthorized("Field {} is read only." + .format(self.name)) + + # id fields take only strings + if self.name == "id": + value = str(value) + + # get the field mutator + mutator = self.field.getMutator(instance) + + # Inspect function and apply *args and **kwargs if possible. + mapply(mutator, value, **kw) + + return True + + def _get(self, instance, **kw): + """Get the value of the field + """ + logger.debug("ATFieldManager::get: instance={} field={}" + .format(instance, self.field)) + + # check the field permission + if not self.field.checkPermission("read", instance): + raise Unauthorized("You are not allowed to read the field {}" + .format(self.name)) + + # return the field value + return self.field.get(instance) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + value = self.get(instance) + return value or default + + +class ComputedFieldManager(ATFieldManager): + """Adapter to get/set the value of Text Fields + """ + interface.implements(IFieldManager) + + def set(self, instance, value, **kw): + """Not applicable for Computed Fields + """ + logger.warn("Setting is not allowed for computed fields") + + def get(self, instance, **kw): + """Get the value of the field + """ + # Gracefully avoid programming errors in Computed fields + try: + return self._get(instance, **kw) + except AttributeError: + logger.error("Could not get the value of the computed field '{}'" + .format(self.get_field_name())) + return None + + +class TextFieldManager(ATFieldManager): + """Adapter to get/set the value of Text Fields + """ + interface.implements(IFieldManager) + + +class DateTimeFieldManager(ATFieldManager): + """Adapter to get/set the value of DateTime Fields + """ + interface.implements(IFieldManager) + + def set(self, instance, value, **kw): + """Converts the value into a DateTime object before setting. + """ + try: + value = DateTime(value) + except SyntaxError: + logger.warn("Value '{}' is not a valid DateTime string" + .format(value)) + return False + + self._set(instance, value, **kw) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + value = self.get(instance) + return api.to_iso_date(value) or default + + +class FileFieldManager(ATFieldManager): + """Adapter to get/set the value of File Fields + """ + interface.implements(IFieldManager) + + def get_size(self, instance): + """Return the file size of the file + """ + return self.field.get_size(instance) + + def get_data(self, instance): + """Return the file data + """ + value = self.get(instance) + return getattr(value, "data", "") + + def get_filename(self, instance): + """Get the filename + """ + filename = self.field.getFilename(instance) + if filename: + return filename + + fieldname = self.get_field_name() + content_type = self.get_content_type(instance) + extension = mimetypes.guess_extension(content_type) + + return fieldname + extension + + def get_content_type(self, instance): + """Get the content type of the file object + """ + return self.field.getContentType(instance) + + def get_download_url(self, instance, default=None): + """Calculate the download url + """ + download = default + # calculate the download url + download = "{url}/at_download/{fieldname}".format( + url=instance.absolute_url(), fieldname=self.get_field_name()) + return download + + def set(self, instance, value, **kw): + """Decodes base64 value and set the file object + """ + value = str(value).decode("base64") + + # handle the filename + if "filename" not in kw: + logger.debug("FielFieldManager::set: No Filename detected " + "-> using title or id") + kw["filename"] = kw.get("id") or kw.get("title") + + self._set(instance, value, **kw) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + return api.get_file_info(instance, self.get_field_name()) + + +class ReferenceFieldManager(ATFieldManager): + """Adapter to get/set the value of Reference Fields + """ + interface.implements(IFieldManager) + + def __init__(self, field): + super(ReferenceFieldManager, self).__init__(field) + self.allowed_types = field.allowed_types + self.multi_valued = field.multiValued + + def is_multi_valued(self): + return self.multi_valued + + def set(self, instance, value, **kw): # noqa + """Set the value of the refernce field + """ + ref = [] + + # The value is an UID + if api.is_uid(value): + ref.append(api.get_object_by_uid(value)) + + # The value is already an object + if api.is_at_content(value): + ref.append(value) + + # The value is a dictionary + # -> handle it like a catalog query + if u.is_dict(value): + results = api.search(portal_type=self.allowed_types, **value) + ref = map(api.get_object, results) + + # The value is a list + if u.is_list(value): + for item in value: + # uid + if api.is_uid(item): + ref.append(api.get_object_by_uid(item)) + continue + + # object + if api.is_at_content(item): + ref.append(api.get_object(item)) + continue + + # path + if api.is_path(item): + ref.append(api.get_object_by_path(item)) + continue + + # dict (catalog query) + if u.is_dict(item): + results = api.search(portal_type=self.allowed_types, **item) + objs = map(api.get_object, results) + ref.extend(objs) + continue + + # Plain string + # -> do a catalog query for title + if isinstance(item, basestring): + results = api.search(portal_type=self.allowed_types, title=item) + objs = map(api.get_object, results) + ref.extend(objs) + continue + + # The value is a physical path + if api.is_path(value): + ref.append(api.get_object_by_path(value)) + + # Handle non multi valued fields + if not self.multi_valued and len(ref) > 1: + raise ValueError("Multiple values given for single valued field {}" + .format(self.field)) + + return self._set(instance, ref, **kw) + + def json_data(self, instance, default=None): + """Get a JSON compatible value + """ + value = self.get(instance) + if value and self.is_multi_valued(): + return map(api.get_url_info, value) + elif value and not self.is_multi_valued(): + return api.get_url_info(value) + return value or default + + +class ProxyFieldManager(ATFieldManager): + """Adapter to get/set the value of Proxy Fields + """ + interface.implements(IFieldManager) + + def __init__(self, field): + super(ProxyFieldManager, self).__init__(field) + self.proxy_object = None + self.proxy_field = None + + def get_proxy_object(self, instance): + """Get the proxy object of the field + """ + return self.field._get_proxy(instance) + + def get_proxy_field(self, instance): + """Get the proxied field of this field + """ + proxy_object = self.get_proxy_object(instance) + if not proxy_object: + return None + return proxy_object.getField(self.name) + + def set(self, instance, value, **kw): + """Set the value of the (proxy) field + """ + proxy_field = self.get_proxy_field(instance) + if proxy_field is None: + return None + # set the field with the proper field manager of the proxy field + fieldmanager = IFieldManager(proxy_field) + return fieldmanager.set(instance, value, **kw) + + +class ARAnalysesFieldManager(ATFieldManager): + """Adapter to get/set the value of Bika AR Analyses Fields + """ + interface.implements(IFieldManager) + + def json_data(self, instance, default=[]): + """Get a JSON compatible value + """ + value = self.get(instance) + out = map(api.get_url_info, value) + return out or default diff --git a/src/senaite/jsonapi/interfaces.py b/src/senaite/jsonapi/interfaces.py new file mode 100644 index 0000000..053a617 --- /dev/null +++ b/src/senaite/jsonapi/interfaces.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- + +from zope import interface + + +class IInfo(interface.Interface): + """ JSON Info Interface + """ + + def to_dict(): + """ return the dictionary representation of the object + """ + + def __call__(): + """ return the dictionary representation of the object + """ + + +class IDataManager(interface.Interface): + """ Field Interface + """ + + def get(name): + """ Get the value of the named field with + """ + + def set(name, value): + """ Set the value of the named field + """ + + def json_data(name, default=None): + """ Get a JSON compatible structure from the value + """ + + +class IFieldManager(interface.Interface): + """A Field Manager is able to set/get the values of a single field. + """ + + def get(instance, **kwargs): + """Get the value of the field + """ + + def set(instance, value, **kwargs): + """Set the value of the field + """ + + def json_data(instance, default=None): + """Get a JSON compatible structure from the value + """ + + +class ICatalog(interface.Interface): + """ Plone catalog interface + """ + + def search(query): + """ search the catalog and return the results + """ + + def get_catalog(): + """ get the used catalog tool + """ + + def get_indexes(): + """ get all indexes managed by this catalog + """ + + def get_index(name): + """ get an index by name + """ + + def to_index_value(value, index): + """ Convert the value for a given index + """ + + +class ICatalogQuery(interface.Interface): + """ Plone catalog query interface + """ + + def make_query(**kw): + """ create a new query or augment an given query + """ + + +class IBatch(interface.Interface): + """ Batch Interface + """ + + def get_batch(): + """ return the wrapped batch object + """ + + def get_pagesize(): + """ return the current page size + """ + + def get_pagenumber(): + """ return the current page number + """ + + def get_numpages(): + """ return the current number of pages + """ + + def get_sequence_length(): + """ return the length + """ + + def make_next_url(): + """ build and return the next url + """ + + def make_prev_url(): + """ build and return the previous url + """ diff --git a/src/senaite/jsonapi/request.py b/src/senaite/jsonapi/request.py new file mode 100644 index 0000000..8067cf7 --- /dev/null +++ b/src/senaite/jsonapi/request.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- + +import json +import urlparse +import pkg_resources + +from zope import interface + +from zope.globalrequest import getRequest + +try: + pkg_resources.get_distribution('plone.protect') + from plone.protect.interfaces import IDisableCSRFProtection +except (pkg_resources.DistributionNotFound, ImportError): + HAS_PLONE_PROTECT = False +else: + HAS_PLONE_PROTECT = True + +from senaite.lims import logger +from senaite.lims.jsonapi import underscore as _ + + +# These values evaluate to True +TRUE_VALUES = ["y", "yes", "1", "true", True] + + +def get_request(): + """ return the request object + """ + return getRequest() + + +def disable_csrf_protection(): + """ disables the CSRF protection + https://pypi.python.org/pypi/plone.protect + """ + if not HAS_PLONE_PROTECT: + logger.warn( + "Can not disable CSRF protection – please install plone.protect" + ) + return False + request = get_request() + interface.alsoProvides(request, IDisableCSRFProtection) + return True + + +def get_form(): + """ return the request form dictionary + """ + return get_request().form + + +def get_query_string(): + """ return the parsed query string + """ + qs = get_request().get("QUERY_STRING") + return dict(urlparse.parse_qsl(qs)) + + +def get(key, default=None): + """ return the key from the request + """ + data = get_form() or get_query_string() + return data.get(key, default) + + +def is_true(key, default=False): + """ Check if the value is in TRUE_VALUES + """ + value = get(key, default) + if isinstance(value, list): + value = value[0] + if isinstance(value, bool): + return value + if value is default: + return default + return value.lower() in TRUE_VALUES + + +def get_cookie(key, default=None): + """ return the key from the request + """ + return get_request().cookies.get(key, default) + + +def get_complete(default=None): + """ returns the 'complete' from the request + """ + return is_true("complete", default) + + +def get_children(default=None): + """ returns the 'children' from the request + """ + return is_true("children", default) + + +def get_filedata(default=None): + """ returns the 'filedata' from the request + """ + return is_true('filedata') + + +def get_workflow(default=None): + """ returns the 'workflow' from the request + """ + return is_true("workflow", default) + + +def get_sharing(default=None): + """ returns the 'sharing' from the request + """ + return is_true("sharing", default) + + +def get_sort_limit(): + """ returns the 'sort_limit' from the request + """ + limit = _.convert(get("sort_limit"), _.to_int) + if (limit < 1): + limit = None # catalog raises IndexError if limit < 1 + return limit + + +def get_batch_size(): + """ returns the 'limit' from the request + """ + return _.convert(get("limit"), _.to_int) or 25 + + +def get_batch_start(): + """ returns the 'start' from the request + """ + return _.convert(get("b_start"), _.to_int) or 0 + + +def get_sort_on(allowed_indexes=None): + """ returns the 'sort_on' from the request + """ + sort_on = get("sort_on") + if allowed_indexes and sort_on not in allowed_indexes: + logger.warn("Index '{}' is not in allowed_indexes".format(sort_on)) + return None + return sort_on + + +def get_sort_order(): + """ returns the 'sort_order' from the request + """ + sort_order = get("sort_order", "ascending") + if sort_order in ["ASC", "ascending", "a", "asc", "up", "high"]: + return "ascending" + if sort_order in ["DESC", "descending", "d", "desc", "down", "low"]: + return "descending" + return sort_order + + +def get_query(): + """ returns the 'query' from the request + """ + q = get("q", "") + + qs = q.lstrip("*.!$%&/()=#-+:'`´^") + if qs and not qs.endswith("*"): + qs += "*" + return qs + + +def get_path(): + """ returns the 'path' from the request + """ + return get("path", "") + + +def get_depth(): + """ returns the 'depth' from the request + """ + return _.convert(get("depth", 0), _.to_int) + + +def get_recent_created(): + """ returns the 'recent_created' from the request + """ + return get("recent_created", None) + + +def get_recent_modified(): + """ returns the 'recent_modified' from the request + """ + return get("recent_modified", None) + + +def get_request_data(): + """ extract and convert the json data from the request + + returns a list of dictionaries + """ + request = get_request() + data = request.get("BODY", "{}") + if not is_json_deserializable(data): + from plone.jsonapi.routes.exceptions import APIError + raise APIError(400, "Request Data is not JSON deserializable – Check JSON Syntax!") + return _.convert(json.loads(data), _.to_list) + + +def get_json(): + """ get the request json payload + """ + data = get_request_data().pop() + return data or dict() + + +def get_json_key(key, default=None): + """ return the key from the json payload + """ + return get_json().get(key, default) + + +def set_json_item(key, value): + """ manipulate json data on the fly + """ + data = get_json() + data[key] = value + + request = get_request() + request["BODY"] = json.dumps(data) + + +def is_json_deserializable(thing): + """Checks if the given thing can be deserialized from JSON + + :param thing: The object to check if it can be serialized + :type thing: arbitrary object + :returns: True if it can be JSON deserialized + :rtype: bool + """ + try: + json.loads(thing) + return True + except (ValueError): + return False diff --git a/src/senaite/jsonapi/underscore.py b/src/senaite/jsonapi/underscore.py new file mode 100644 index 0000000..e4039bb --- /dev/null +++ b/src/senaite/jsonapi/underscore.py @@ -0,0 +1,344 @@ +# -*- coding: utf-8 -*- + +import ast +import json +import types + + +def fail(error): + """ Raises a RuntimeError with the given error Message + + >>> fail("This failed badly") + Traceback (most recent call last): + ... + RuntimeError: This failed badly + """ + raise RuntimeError(error) + + +def is_string(thing): + """ checks if an object is a string/unicode type + + >>> is_string("") + True + >>> is_string(u"") + True + >>> is_string(str()) + True + >>> is_string(unicode()) + True + >>> is_string(1) + False + """ + return type(thing) in types.StringTypes + + +def is_list(thing): + """ checks if an object is a list type + + >>> is_list([]) + True + >>> is_list(list()) + True + >>> is_list("[]") + False + >>> is_list({}) + False + """ + return isinstance(thing, types.ListType) + + +def is_tuple(thing): + """ checks if an object is a tuple type + + >>> is_tuple(()) + True + >>> is_tuple(tuple()) + True + >>> is_tuple("()") + False + >>> is_tuple([]) + False + """ + return isinstance(thing, types.TupleType) + + +def is_dict(thing): + """ checks if an object is a dictionary type + + >>> is_dict({}) + True + >>> is_dict(dict()) + True + >>> is_dict("{}") + False + >>> is_dict([]) + False + """ + return isinstance(thing, types.DictType) + + +def is_digit(thing): + """ checks if an object is a digit + + >>> is_digit(1) + True + >>> is_digit("1") + True + >>> is_digit("a") + False + >>> is_digit([]) + False + """ + return str(thing).isdigit() + + +def to_int(thing): + """ coverts an object to int + + >>> to_int("0") + 0 + >>> to_int(1) + 1 + >>> to_int("1") + 1 + >>> to_int("a") + + """ + if is_digit(thing): + return int(thing) + return None + + +def to_string(thing): + """ coverts an object to string + + >>> to_string(1) + '1' + >>> to_string([]) + '[]' + >>> to_string(u"a") + 'a' + >>> to_string(None) + 'None' + >>> to_string(object()) + '' + """ + try: + return str(thing) + except UnicodeEncodeError: + return thing.encode('ascii', 'replace') + + +def to_list(thing): + """ converts an object to a list + + >>> to_list(1) + [1] + >>> to_list([1,2,3]) + [1, 2, 3] + >>> to_list(("a", "b", "c")) + ['a', 'b', 'c'] + >>> to_list(dict(a=1, b=2)) + [{'a': 1, 'b': 2}] + >>> to_list(None) + [] + >>> to_list("['a', 'b', 'c']") + ['a', 'b', 'c'] + >>> to_list("") + [''] + >>> to_list([]) + [] + >>> to_list("['[]']") + ['[]'] + >>> sorted(to_list(set(["a", "b", "c"]))) + ['a', 'b', 'c'] + """ + if thing is None: + return [] + if isinstance(thing, set): + return list(thing) + if isinstance(thing, types.StringTypes): + if thing.startswith("["): + # handle a list inside a string coming from the batch navigation + return ast.literal_eval(thing) + if not (is_list(thing) or is_tuple(thing)): + return [thing] + return list(thing) + + +def convert(value, converter): + """ Converts a value with a given converter function. + + >>> convert("1", to_int) + 1 + >>> convert("0", to_int) + 0 + >>> convert("a", to_int) + + """ + if not callable(converter): + fail("Converter must be a function") + return converter(value) + + +def pluck(col, key, default=None): + """ Extracts a list of values from a collection of dictionaries + + >>> stooges = [{"name": "moe", "age": 40}, + ... {"name": "larry", "age": 50}, + ... {"name": "curly", "age": 60}] + >>> pluck(stooges, "name") + ['moe', 'larry', 'curly'] + + It only works with collections + + >>> curly = stooges.pop() + >>> pluck(curly, "age") + Traceback (most recent call last): + ... + RuntimeError: First argument must be a list or tuple + """ + if not (is_list(col) or is_tuple(col)): + fail("First argument must be a list or tuple") + + def _block(dct): + if not is_dict(dct): + return [] + return dct.get(key, default) + + return map(_block, col) + + +def pick(dct, *keys): + """ Returns a copy of the dictionary filtered to only have values for the + whitelisted keys (or list of valid keys) + + >>> pick({"name": "moe", "age": 50, "userid": "moe1"}, "name", "age") + {'age': 50, 'name': 'moe'} + + """ + copy = dict() + for key in keys: + if key in dct.keys(): + copy[key] = dct[key] + return copy + + +def omit(dct, *keys): + """ Returns a copy of the dictionary filtered to omit the blacklisted keys + (or list of keys) + + >>> omit({"name": "moe", "age": 50, "userid": "moe1"}, "userid", "age") + {'name': 'moe'} + """ + copy = dict() + for key in dct: + if key not in keys: + copy[key] = dct[key] + return copy + + +def rename(dct, mapping): + """ Rename the keys of a dictionary with the given mapping + + >>> rename({"a": 1, "BBB": 2}, {"a": "AAA"}) + {'AAA': 1, 'BBB': 2} + """ + + def _block(memo, key): + if key in dct: + memo[mapping[key]] = dct[key] + return memo + else: + return memo + return reduce(_block, mapping, omit(dct, *mapping.keys())) + + +def alias(col, mapping): + """ Returns a collection of dictionaries with the keys renamed according to + the mapping + + >>> libraries = [{"isbn": 1, "ed": 1}, {"isbn": 2, "ed": 2}] + >>> alias(libraries, {"ed": "edition"}) + [{'edition': 1, 'isbn': 1}, {'edition': 2, 'isbn': 2}] + + >>> alias({"a": 1}, {"a": "b"}) + [{'b': 1}] + """ + if not is_list(col): + col = [col] + + def _block(dct): + return rename(dct, mapping) + + return map(_block, col) + + +def first(thing, n=0): + """ get the first element of a list + + >>> lst = [1, 2, 3, 4, 5] + >>> first(lst) + 1 + >>> first(lst, 3) + [1, 2, 3] + >>> first(lst, 10) + [1, 2, 3, 4, 5] + >>> first({"key": "value"}) + {'key': 'value'} + >>> first(("a", "b", "c")) + 'a' + >>> first(['']) + '' + >>> first([''], 5) + [''] + >>> first(['', '']) + '' + >>> first(False) + False + >>> first("") + '' + >>> first(None) + >>> first([]) + """ + n = to_int(n) + if is_list(thing) or is_tuple(thing): + if len(thing) == 0: + return None + if n > 0: + return thing[0:n] + return thing[0] + return thing + + +def to_json(thing): + """ parse to JSON + + >>> data = {} + >>> to_json(data) + '{}' + >>> data = None + >>> to_json(data) + 'null' + >>> data = object() + >>> to_json(data) + '' + >>> data = {"format": "json"} + >>> to_json(data) + '{"format": "json"}' + >>> data = 1 + >>> to_json(data) + '1' + """ + try: + return json.dumps(thing) + except TypeError: + return "" + + +if __name__ == '__main__': + import doctest + doctest.testmod(raise_on_error=False, + optionflags=doctest.ELLIPSIS | + doctest.NORMALIZE_WHITESPACE) diff --git a/src/senaite/jsonapi/v1/__init__.py b/src/senaite/jsonapi/v1/__init__.py new file mode 100644 index 0000000..def0ffd --- /dev/null +++ b/src/senaite/jsonapi/v1/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +import pkgutil + +from senaite.lims import logger +from senaite.lims.jsonapi.v1 import routes +from senaite.lims.jsonapi import add_route as add_senaite_route + +__version__ = 1 +__date__ = "2017-08-08" + +BASE_URL = "/senaite/v1" + + +def add_route(route, endpoint=None, **kw): + """Add a new JSON API route + """ + + # ensure correct amout of slashes + def apiurl(route): + return '/'.join(s.strip('/') for s in ["", BASE_URL, route]) + + return add_senaite_route(apiurl(route), endpoint, **kw) + + +prefix = routes.__name__ + "." +for importer, modname, ispkg in pkgutil.iter_modules( + routes.__path__, prefix): + module = __import__(modname, fromlist="dummy") + logger.info("INITIALIZED SENAITE JSONAPI V1 ROUTE ---> %s" % module.__name__) diff --git a/src/senaite/jsonapi/v1/configure.zcml b/src/senaite/jsonapi/v1/configure.zcml new file mode 100644 index 0000000..c8efd4d --- /dev/null +++ b/src/senaite/jsonapi/v1/configure.zcml @@ -0,0 +1,5 @@ + + + diff --git a/src/senaite/jsonapi/v1/routes/__init__.py b/src/senaite/jsonapi/v1/routes/__init__.py new file mode 100644 index 0000000..40a96af --- /dev/null +++ b/src/senaite/jsonapi/v1/routes/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/src/senaite/jsonapi/v1/routes/content.py b/src/senaite/jsonapi/v1/routes/content.py new file mode 100644 index 0000000..453f11a --- /dev/null +++ b/src/senaite/jsonapi/v1/routes/content.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +from senaite.lims.jsonapi import api +from senaite.lims.jsonapi.v1 import add_route +from senaite.lims.jsonapi.exceptions import APIError + +ACTIONS = "create,update,delete" + + +# / +@add_route("/", + "senaite.lims.jsonapi.v1.get", methods=["GET"]) +# +# // +@add_route("//", + "senaite.lims.jsonapi.v1.get", methods=["GET"]) +def get(context, request, resource=None, uid=None): + """GET + """ + # we have a UID as resource, return the record + if api.is_uid(resource): + return api.get_record(resource) + + portal_type = api.resource_to_portal_type(resource) + if portal_type is None: + raise APIError(404, "Not Found") + return api.get_batched(portal_type=portal_type, uid=uid, endpoint="senaite.lims.jsonapi.v1.get") + + +# http://werkzeug.pocoo.org/docs/0.11/routing/#builtin-converters +# http://werkzeug.pocoo.org/docs/0.11/routing/#custom-converters +# +# / +@add_route("/", + "senaite.lims.jsonapi.v1.action", methods=["POST"]) +# +# // +@add_route("//", + "senaite.lims.jsonapi.v1.action", methods=["POST"]) +# +# // +@add_route("//", + "senaite.lims.jsonapi.v1.action", methods=["POST"]) +# +# /// +@add_route("///", + "senaite.lims.jsonapi.v1.action", methods=["POST"]) +def action(context, request, action=None, resource=None, uid=None): + """Various HTTP POST actions + + Case 1: / + -> Return the full object immediately in the root of the JSON API response + /@@API/senaite/v1/ + + Case 2: // + -> The actions (update, delete) will performed on the object identified by + -> The actions (create) will use the as the parent folder + /@@API/senaite/v1// + + Case 3: / + -> The "target" object will be located by a location given in the request body (uid, path, parent_path + id) + -> The actions (cut, copy, update, delete) will performed on the target object + -> The actions (create) will use the target object as the container + /@@API/senaite/v1// + + Case 4: // + -> The actions (cut, copy, update, delete) will performed on the object identified by + -> The actions (create) will use the as the parent folder + /@@API/senaite/v1// + """ + + # Fetch and call the action function of the API + func_name = "{}_items".format(action) + action_func = getattr(api, func_name, None) + if action_func is None: + api.fail(500, "API has no member named '{}'".format(func_name)) + + portal_type = api.resource_to_portal_type(resource) + items = action_func(portal_type=portal_type, uid=uid) + + return { + "count": len(items), + "items": items, + "url": api.url_for("senaite.lims.jsonapi.v1.action", action=action), + } + + +@add_route("/search", + "senaite.lims.jsonapi.v1.search", methods=["GET"]) +def search(context, request): + """Generic search route + + /@@API/v2/search -> returns all contents of the portal + /@@API/v2/search?portal_type=Folder -> returns only folders + ... + """ + return api.get_batched() diff --git a/src/senaite/jsonapi/v1/routes/users.py b/src/senaite/jsonapi/v1/routes/users.py new file mode 100644 index 0000000..fb6e997 --- /dev/null +++ b/src/senaite/jsonapi/v1/routes/users.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- + +from plone import api as ploneapi + +from senaite.lims.jsonapi import api +from senaite.lims.jsonapi import logger +from senaite.lims.jsonapi import request as req +from senaite.lims.jsonapi.v1 import add_route + + +def get_user_info(user): + """Get the user information + """ + user = api.get_user(user) + current = api.get_current_user() + + if api.is_anonymous(): + return { + "username": current.getUserName(), + "authenticated": False, + "roles": current.getRoles(), + "api_url": api.url_for("plone.jsonapi.routes.users", username="current"), + } + + # nothing to do + if user is None: + logger.warn("No user found for {}".format(user)) + return None + + # plone user + pu = user.getUser() + + info = { + "username": user.getUserName(), + "roles": user.getRoles(), + "groups": pu.getGroups(), + "authenticated": current == user, + "api_url": api.url_for("senaite.lims.jsonapi.v1.users", username=user.getId()), + } + + for k, v in api.get_user_properties(user).items(): + if api.is_date(v): + v = api.to_iso_date(v) + if not api.is_json_serializable(v): + logger.warn("User property '{}' is not JSON serializable".format(k)) + continue + info[k] = v + + return info + + +# ----------------------------------------------------------------------------- +# API ROUTES +# ----------------------------------------------------------------------------- + +@add_route("/users", "senaite.lims.jsonapi.v1.users", methods=["GET"]) +@add_route("/users/", "senaite.lims.jsonapi.v1.users", methods=["GET"]) +def get(context, request, username=None): + """Plone users route + """ + user_ids = [] + + # Don't allow anonymous users to query a user other than themselves + if api.is_anonymous(): + username = "current" + + # query all users if no username was given + if username is None: + user_ids = api.get_member_ids() + elif username == "current": + current_user = api.get_current_user() + user_ids = [current_user.getId()] + else: + user_ids = [username] + + # Prepare batch + size = req.get_batch_size() + start = req.get_batch_start() + batch = api.make_batch(user_ids, size, start) + + # get the user info for the user ids in the current batch + users = map(get_user_info, batch.get_batch()) + + return { + "pagesize": batch.get_pagesize(), + "next": batch.make_next_url(), + "previous": batch.make_prev_url(), + "page": batch.get_pagenumber(), + "pages": batch.get_numpages(), + "count": batch.get_sequence_length(), + "items": users, + } + + +@add_route("/auth", "senaite.lims.jsonapi.v1.auth", methods=["GET"]) +@add_route("/users/auth", "senaite.lims.jsonapi.v1.users.auth", methods=["GET"]) +def auth(context, request): + """ Basic Authentication + """ + + if ploneapi.user.is_anonymous(): + request.response.setStatus(401) + request.response.setHeader('WWW-Authenticate', + 'basic realm="JSONAPI AUTH"', 1) + + logger.info("*** BASIC AUTHENTICATE ***") + return {} + + +@add_route("/login", "senaite.lims.jsonapi.v1.login", methods=["GET", "POST"]) +@add_route("/users/login", "senaite.lims.jsonapi.v1.users.login", methods=["GET", "POST"]) +def login(context, request): + """ Login Route + + Login route to authenticate a user against Plone. + """ + # extract the data + __ac_name = request.get("__ac_name", None) + __ac_password = request.get("__ac_password", None) + + logger.info("*** LOGIN %s ***" % __ac_name) + + if __ac_name is None: + api.fail(400, "__ac_name is missing") + if __ac_password is None: + api.fail(400, "__ac_password is missing") + + acl_users = api.get_tool("acl_users") + + # XXX hard coded + acl_users.credentials_cookie_auth.login() + + # XXX amin user won't be logged in if I use this approach + # acl_users.login() + # response = request.response + # acl_users.updateCredentials(request, response, __ac_name, __ac_password) + + if api.is_anonymous(): + api.fail(401, "Invalid Credentials") + + # return the JSON in the same format like the user route + return get(context, request, username=__ac_name) + + +@add_route("/logout", "senaite.lims.jsonapi.v1.logout", methods=["GET"]) +@add_route("/users/logout", "senaite.lims.jsonapi.v1.users.logout", methods=["GET"]) +def logout(context, request): + """ Logout Route + """ + logger.info("*** LOGOUT ***") + + acl_users = api.get_tool("acl_users") + acl_users.logout(request) + + return { + "url": api.url_for("senaite.lims.jsonapi.v1.users"), + "success": True + } diff --git a/src/senaite/jsonapi/v1/routes/version.py b/src/senaite/jsonapi/v1/routes/version.py new file mode 100644 index 0000000..4a4d29c --- /dev/null +++ b/src/senaite/jsonapi/v1/routes/version.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- + +from senaite.lims.jsonapi import url_for +from senaite.lims.jsonapi import add_route + +from senaite.lims.jsonapi.v1 import __version__ +from senaite.lims.jsonapi.v1 import __date__ + + +@add_route("/senaite/v1", "senaite.lims.jsonapi.v1.version", methods=["GET"]) +@add_route("/senaite/v1/version", "senaite.lims.jsonapi.v1.version", methods=["GET"]) +def version(context, request): + """get the version, build number and date of this API + """ + return { + "url": url_for("senaite.lims.jsonapi.v1.version"), + "version": __version__, + "date": __date__, + } diff --git a/src/senaite/tests/__init__.py b/src/senaite/tests/__init__.py new file mode 100644 index 0000000..40a96af --- /dev/null +++ b/src/senaite/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/src/senaite/tests/base.py b/src/senaite/tests/base.py new file mode 100644 index 0000000..332e191 --- /dev/null +++ b/src/senaite/tests/base.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- + +import unittest2 as unittest + +from plone.testing import z2 + +from plone.app.testing import setRoles +from plone.app.testing import applyProfile +from plone.app.testing import TEST_USER_ID +from plone.app.testing import PLONE_FIXTURE +from plone.app.testing import PloneSandboxLayer +from plone.app.testing import FunctionalTesting + +from bika.lims.testing import BIKA_SIMPLE_FIXTURE +from bika.lims.testing import BIKA_FUNCTIONAL_FIXTURE + + +class SimpleTestLayer(PloneSandboxLayer): + """Setup Plone with installed AddOn only + """ + defaultBases = (BIKA_SIMPLE_FIXTURE, PLONE_FIXTURE,) + + def setUpZope(self, app, configurationContext): + super(SimpleTestLayer, self).setUpZope(app, configurationContext) + + # Load ZCML + import senaite.lims + + self.loadZCML(package=senaite.lims) + + # Install product and call its initialize() function + z2.installProduct(app, 'senaite.lims') + + def setUpPloneSite(self, portal): + super(SimpleTestLayer, self).setUpPloneSite(portal) + + # Apply Setup Profile (portal_quickinstaller) + applyProfile(portal, 'senaite.lims:default') + + +class FunctionalTestLayer(SimpleTestLayer): + """Setup Plone for functional Tests + """ + defaultBases = (BIKA_FUNCTIONAL_FIXTURE, ) + + def setUpZope(self, app, configurationContext): + super(FunctionalTestLayer, self).setUpZope(app, configurationContext) + + def setUpPloneSite(self, portal): + super(FunctionalTestLayer, self).setUpPloneSite(portal) + + +### +# Use for simple tests (w/o contents) +### +SIMPLE_FIXTURE = SimpleTestLayer() +SIMPLE_TESTING = FunctionalTesting( + bases=(SIMPLE_FIXTURE, ), + name="senaite.lims:SimpleTesting" +) + +### +# Use for functional tests (w/ contents) +# Using this Layer takes approx. 1 minute to load +### +FUNCTIONAL_FIXTURE = FunctionalTestLayer() +FUNCTIONAL_TESTING = FunctionalTesting( + bases=(FUNCTIONAL_FIXTURE, ), + name="senaite.lims:FunctionalTesting" +) + + +class SimpleTestCase(unittest.TestCase): + layer = SIMPLE_TESTING + + def setUp(self): + super(SimpleTestCase, self).setUp() + + self.app = self.layer['app'] + self.portal = self.layer['portal'] + self.request = self.layer['request'] + self.request['ACTUAL_URL'] = self.portal.absolute_url() + setRoles(self.portal, TEST_USER_ID, ['LabManager', 'Manager']) + + +class FunctionalTestCase(unittest.TestCase): + layer = FUNCTIONAL_TESTING + + def setUp(self): + super(FunctionalTestCase, self).setUp() + + self.app = self.layer['app'] + self.portal = self.layer['portal'] + self.request = self.layer['request'] + self.request['ACTUAL_URL'] = self.portal.absolute_url() + setRoles(self.portal, TEST_USER_ID, ['LabManager', 'Member']) diff --git a/src/senaite/tests/test_doctests.py b/src/senaite/tests/test_doctests.py new file mode 100644 index 0000000..d4e1481 --- /dev/null +++ b/src/senaite/tests/test_doctests.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2016-2017 BECHEM GmbH. + +import doctest + +import unittest2 as unittest + +from Testing import ZopeTestCase as ztc + +from .base import SimpleTestCase + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTests([ + ztc.ZopeDocFileSuite( + '../docs/API.rst', + test_class=SimpleTestCase, + optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE, + ), + ]) + return suite diff --git a/src/senaite/tests/test_setup.py b/src/senaite/tests/test_setup.py new file mode 100644 index 0000000..10c50a6 --- /dev/null +++ b/src/senaite/tests/test_setup.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- + +from senaite.lims.tests.base import SimpleTestCase + + +class TestSetup(SimpleTestCase): + """ Test Setup + """ + + def test_is_bika_lims_installed(self): + qi = self.portal.portal_quickinstaller + self.assertTrue(qi.isProductInstalled("bika.lims")) + + def test_is_senaite_lims_installed(self): + qi = self.portal.portal_quickinstaller + self.assertTrue(qi.isProductInstalled("senaite.lims")) + + +def test_suite(): + from unittest import TestSuite, makeSuite + suite = TestSuite() + suite.addTest(makeSuite(TestSetup)) + return suite diff --git a/static/logo.png b/static/logo.png new file mode 100644 index 0000000..7696a0e Binary files /dev/null and b/static/logo.png differ