diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..0349f255 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,4 @@ +* text=auto +*.py text +*.yaml text +*.md text diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..fecb95be --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +**/__pycache__ +build/ +dist/ +*egg-info/ +.coverage +*,cover +*.xml +htmlcov/ +.pylint.d/ diff --git a/Dockerfile-builder b/Dockerfile-builder new file mode 100644 index 00000000..bffe78c1 --- /dev/null +++ b/Dockerfile-builder @@ -0,0 +1,15 @@ +FROM python:3.6-buster + +COPY requirements.txt requirements-dev.txt /tmp/ + +RUN pip3 install -r /tmp/requirements-dev.txt \ + && rm -f /tmp/requirements-dev.txt \ + && rm -f /tmp/requirements.txt + +ENV USER=builder +ENV HOME=/home/${USER} +ENV PATH=${HOME}/.local/bin:${PATH} +RUN mkdir -p ${HOME} +WORKDIR ${HOME} + + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..db551701 --- /dev/null +++ b/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2021 Jitsuin Inc + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md index e69de29b..3b28a550 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,65 @@ +# Jitsuin Archivist Client + +The standard Jitsuin Archivist python client. + +Please note that the canonical API for Jitsuin Archivist is always the REST API +documented at https://jitsuin-archivist.readthedocs.io + +# Development + +## Pre-requisites + +Required tools for this repo are task-runner and docker-ce. + +Install task runner: https://github.com/go-task/task +Install docker-ce: https://docs.docker.com/get-docker/ + +## Workflow + +To see what options are available simply execute: + +```bash +task +``` + +All development is done using a docker image. To create the image execute +the following command. This command only has to be repeated if requirements.txt +or requirements-dev.txt change. + +Dependencies are defined in requirements.txt for the archivist package and +requirements-dev.txt for the tools used to build, test and publish the +archivist package. + +To build the docker builder image: +```bash +task builder +``` + +Make a change to the code and validate the changes: + +```bash +task check +``` + +If ok run the unittests: + +```bash +task unittests +``` + +If 100% coverage and no test failures generate the wheel: + +```bash +task wheel +``` + +Lastly to publish the package to PyPi: + +```bash +task publish +``` + +Note that this requires credentials and will only normally be done by a Jitsuin +representative. + + diff --git a/Taskfile.yml b/Taskfile.yml new file mode 100644 index 00000000..150f4a40 --- /dev/null +++ b/Taskfile.yml @@ -0,0 +1,38 @@ +version: '3' + +tasks: + + builder: + desc: Build a docker environment with the right dependencies and utilities + cmds: + - docker build --no-cache -f Dockerfile-builder -t jitsuin-archivist-python-builder . + + check: + desc: Check the style, bug and quality of the code + cmds: + - ./scripts/builder.sh pycodestyle --format=pylint archivist unittests examples + - ./scripts/builder.sh python3 -m pylint --rcfile=pylintrc archivist unittests examples + + clean: + desc: Clean git repo + cmds: + - find -name '*,cover' -type f -delete + - git clean -fdX + + unittests: + desc: Run unittests + cmds: + - ./scripts/builder.sh ./scripts/unittests.sh + + publish: + desc: pubish wheel package (will require username and password) + cmds: + - ./scripts/builder.sh python3 -m twine upload --repository pypi dist/* + + wheel: + desc: Builds python wheel package + cmds: + - rm -rf *egg-info + - rm -rf build + - rm -f dist/* + - python3 setup.py bdist_wheel diff --git a/archivist/__init__.py b/archivist/__init__.py new file mode 100644 index 00000000..03b55211 --- /dev/null +++ b/archivist/__init__.py @@ -0,0 +1,2 @@ +"""Archivist SDK +""" diff --git a/archivist/access_policies.py b/archivist/access_policies.py new file mode 100644 index 00000000..acd21515 --- /dev/null +++ b/archivist/access_policies.py @@ -0,0 +1,128 @@ +"""access_policies interface + + NOT TESTED +""" + +from .constants import ( + SEP, + ACCESS_POLICIES_SUBPATH, + ACCESS_POLICIES_LABEL, + ASSETS_LABEL, +) + +DEFAULT_PAGE_SIZE=500 + + +class _AccessPoliciesClient: + """docstring + """ + def __init__(self, archivist): + """docstring + """ + self._archivist = archivist + + def create(self, request): + """docstring + """ + + return AccessPolicy(**self._archivist.post( + f"{ACCESS_POLICIES_SUBPATH}/{ACCESS_POLICIES_LABEL}", + request, + )) + + def read(self, identity): + """docstring + """ + return AccessPolicy(**self._archivist.get( + ACCESS_POLICIES_SUBPATH, + identity, + )) + + def update(self, identity, request): + """docstring + """ + return AccessPolicy(**self._archivist.patch( + ACCESS_POLICIES_SUBPATH, + identity, + request, + )) + + def delete(self, identity): + """docstring + """ + return self._archivist.delete(ACCESS_POLICIES_SUBPATH, identity) + + @staticmethod + def __query(props): + """docstring + """ + query = props or {} + return query + + def count(self, *, query=None): + """docstring + """ + return self._archivist.count( + f"{ACCESS_POLICIES_SUBPATH}/{ACCESS_POLICIES_LABEL}", + query=self.__query(query) + ) + + def list(self, *, page_size=DEFAULT_PAGE_SIZE, query=None): + """docstring + """ + return ( + AccessPolicy(**a) for a in self._archivist.list( + f"{ACCESS_POLICIES_SUBPATH}/{ACCESS_POLICIES_LABEL}", + ACCESS_POLICIES_LABEL, + page_size=page_size, + query=self.__query(query) + ) + ) + + # additional queries on different endpoints + def count_matching_assets(self, access_policy_id, *, query=None): + """docstring + """ + return self._archivist.count( + SEP.join((ACCESS_POLICIES_SUBPATH, access_policy_id, ASSETS_LABEL)), + ASSETS_LABEL, + query=self.__query(query) + ) + + def list_matching_assets(self, access_policy_id, *, page_size=DEFAULT_PAGE_SIZE, query=None): + """docstring + """ + return ( + AccessPolicy(**a) for a in self._archivist.list( + SEP.join((ACCESS_POLICIES_SUBPATH, access_policy_id, ASSETS_LABEL)), + ASSETS_LABEL, + page_size=page_size, + query=self.__query(query) + ) + ) + + def count_matching_access_policies(self, asset_id, *, query=None): + """docstring + """ + return self._archivist.count( + SEP.join((ACCESS_POLICIES_SUBPATH, asset_id, ACCESS_POLICIES_LABEL)), + ACCESS_POLICIES_LABEL, + query=self.__query(query) + ) + + def list_matching_access_policies(self, asset_id, *, page_size=DEFAULT_PAGE_SIZE, query=None): + """docstring + """ + return ( + AccessPolicy(**a) for a in self._archivist.list( + SEP.join((ACCESS_POLICIES_SUBPATH, asset_id, ASSETS_LABEL)), + ACCESS_POLICIES_LABEL, + page_size=page_size, + query=self.__query(query) + ) + ) + + +class AccessPolicy(dict): + """AccessPolicy object + """ diff --git a/archivist/archivist.py b/archivist/archivist.py new file mode 100644 index 00000000..450a33bc --- /dev/null +++ b/archivist/archivist.py @@ -0,0 +1,370 @@ +"""Archivist connection interface (python 3.8) + + Uses CRUDL - user is expected to know the values of the various constants + The comment details how this may be used for assets - other endpoints + will require differing values. + + This class is sufficient for all endpoints but differeneces for each endpoint + will have to be documented. We can do better. + +""" + +import json +from os.path import isfile as os_path_isfile + +from flatten_dict import flatten +import requests +from requests_toolbelt.multipart.encoder import MultipartEncoder + +from .constants import ( + HEADERS_REQUEST_TOTAL_COUNT, + HEADERS_TOTAL_COUNT, + ROOT, + SEP, +) +from .errors import ( + parse_response, + ArchivistBadFieldError, + ArchivistDuplicateError, + ArchivistIllegalArgumentError, + ArchivistNotFoundError, +) + +from .assets import _AssetsClient +from .events import _EventsClient +from .locations import _LocationsClient +from .attachments import _AttachmentsClient + + +class Archivist: # pylint: disable=too-many-instance-attributes + """docstring + + auth: string representing JWT token. + cert: filepath containing both private key and certificate + + Either auth or cert must be specified + """ + def __init__(self, url, *, auth=None, cert=None, verify=True): + """docstring + """ + self._headers = {'content-type': 'application/json'} + if auth is not None: + self._headers['authorization'] = 'Bearer ' + auth.strip() + + self._url = url + self._verify = verify + if not cert and not auth: + raise ArchivistIllegalArgumentError( + "Either auth or cert must be specified" + ) + + if cert and auth: + raise ArchivistIllegalArgumentError( + "Either auth or cert must be specified but not both" + ) + + if cert: + if not os_path_isfile(cert): + raise ArchivistNotFoundError( + f"Cert file {cert} does not exist" + ) + + self._cert = cert + + self.assets = _AssetsClient(self) + self.events = _EventsClient(self) + self.locations = _LocationsClient(self) + self.attachments = _AttachmentsClient(self) + + @property + def headers(self): + """docstring + """ + return self._headers + + @property + def url(self): + """docstring + """ + return self._url + + @property + def verify(self): + """docstring + """ + return self._verify + + @property + def cert(self): + """docstring + """ + return self._cert + + def __add_headers(self, headers): + """docstring + """ + if headers is not None: + newheaders = {**self.headers, **headers} + else: + newheaders = self.headers + + return newheaders + + def get(self, subpath, identity, *, headers=None): + """ + subpath: e.g. v2 or iam/v1 + identity: e.g. assets/xxxxxxxxxxxxxxxxxxxxxxxxxxxx` + """ + response = requests.get( + SEP.join((self.url, ROOT, subpath, identity)), + headers=self.__add_headers(headers), + verify=self.verify, + cert=self.cert, + ) + + error = parse_response(response) + if error is not None: + raise error + + return response.json() + + def get_file(self, subpath, identity, fd, *, headers=None): + """ + subpath: e.g. v2 or iam/v1 + identity: e.g. assets/xxxxxxxxxxxxxxxxxxxxxxxxxxxx` + fd: an iterable representing a file (usually from open()) + """ + response = requests.get( + SEP.join((self.url, ROOT, subpath, identity)), + headers=self.__add_headers(headers), + verify=self.verify, + cert=self.cert, + stream=True, + ) + error = parse_response(response) + if error is not None: + raise error + + for chunk in response.iter_content(chunk_size=4096): + if chunk: + fd.write(chunk) + + return response.json() + + def post(self, path, request, *, headers=None): + """ + path: e.g. v2/assets + """ + + response = requests.post( + SEP.join((self.url, ROOT, path)), + data=json.dumps(request), + headers=self.__add_headers(headers), + verify=self.verify, + cert=self.cert, + ) + + error = parse_response(response) + if error is not None: + raise error + + return response.json() + + def post_file(self, path, fd, mtype): + """ + + Uploads a file to an endpoint + + path: e.g. v1/blobs + fd: an iterable representing a file (usually from open()) + mtype: mime tiype (image/jpg) + """ + + multipart = MultipartEncoder( + fields={ + 'file': ('filename', fd, mtype), + } + ) + headers = { + 'content-type': multipart.content_type, + } + response = requests.post( + SEP.join((self.url, ROOT, path)), + data=multipart, + headers=self.__add_headers(headers), + verify=self.verify, + cert=self.cert, + ) + + error = parse_response(response) + if error is not None: + raise error + + return response.json() + + def delete(self, subpath, identity, *, headers=None): + """ + subpath: e.g. v2 or iam/v1 + identity: e.g. assets/xxxxxxxxxxxxxxxxxxxxxxxxxxxx` + """ + response = requests.delete( + SEP.join((self.url, ROOT, subpath, identity)), + headers=self.__add_headers(headers), + verify=self.verify, + cert=self.cert, + ) + + error = parse_response(response) + if error is not None: + raise error + + return response.json() + + def patch(self, subpath, identity, request, *, headers=None): + """ + subpath: e.g. v2 or iam/v1 + identity: e.g. assets/xxxxxxxxxxxxxxxxxxxxxxxxxxxx` + """ + + response = requests.patch( + SEP.join((self.url, ROOT, subpath, identity)), + data=json.dumps(request), + headers=self.__add_headers(headers), + verify=self.verify, + cert=self.cert, + ) + + error = parse_response(response) + if error is not None: + raise error + + return response.json() + + def __list(self, path, args, *, headers=None): + if args: + path = '?'.join((path, args)) + + response = requests.get( + SEP.join((self.url, ROOT, path)), + headers=self.__add_headers(headers), + verify=self.verify, + cert=self.cert, + ) + error = parse_response(response) + if error is not None: + raise error + + return response + + @staticmethod + def __query(query): + return query and '&'.join( + sorted( + f"{k}={v}" for k, v in flatten(query, reducer='dot').items() + ) + ) + + def get_by_signature(self, path, field, query, *, headers=None): + """Reads an entity indirectly by searching for its signature + + It is expected that the query parameters will result in only a single entity + being returned. + + path: e.g. v2/assets + query: query dictionary e.g. {"attributes": { + "arc_display_name": "waste container no. 1", }, + } + """ + + paging = "page_size=2" + qry = self.__query(query) + + response = self.__list( + path, + '&'.join( + (a for a in (paging, qry) if a) + ), + headers=headers, + ) + + data = response.json() + + try: + records = data[field] + except KeyError as ex: + raise ArchivistBadFieldError(f"No {field} found") from ex + + if len(records) == 0: + raise ArchivistNotFoundError("No entity found") + + if len(records) > 1: + raise ArchivistDuplicateError(f"{len(records)} found") + + return records[0] + + def count(self, path, *, query=None): + """Returns the count of objects that meet query + + path: e.g. v2/assets + query: query dictionary e.g. {"confirmation_status": "CONFIRMED", } + """ + + paging = "page_size=1" + qry = self.__query(query) + headers = {HEADERS_REQUEST_TOTAL_COUNT: 'true'} + + # v2/assets?page_size=10&something=something... + + response = self.__list( + path, + '&'.join( + (a for a in (paging, qry) if a) + ), + headers=headers, + ) + + return int(response.headers[HEADERS_TOTAL_COUNT]) + + def list(self, path, field, *, page_size=None, query=None, headers=None): + """Returns generator that lists objects + + path: e.g. v2/assets + field: e.g. assets - collective noun of entity + page_size: optional number of items per request e.g. 50 + query: query dictionary e.g. {"confirmation_status": "CONFIRMED", } + + If page size is specified return the list of records in batches of page_size + until next_page_token in response is null. + + If page size is unspecified return up to the internal limit of records. + (different for each endpoint) + """ + + paging = page_size and f"page_size={page_size}" + qry = self.__query(query) + + # v2/assets?page_size=10&something=something... + + while True: + response = self.__list( + path, + '&'.join( + (a for a in (paging, qry) if a) + ), + headers=headers, + ) + data = response.json() + + try: + records = data[field] + except KeyError as ex: + raise ArchivistBadFieldError(f"No {field} found") from ex + + for record in records: + yield record + + token = data.get("next_page_token") + if not token: + break + + paging = f"page_token={token}" diff --git a/archivist/assets.py b/archivist/assets.py new file mode 100644 index 00000000..0b857f01 --- /dev/null +++ b/archivist/assets.py @@ -0,0 +1,133 @@ +"""assets interface + + Wrap base methods with constants for assets (path, etc... +""" + +from copy import deepcopy + +from .constants import ( + ASSETS_SUBPATH, + ASSETS_LABEL, +) +from .confirm import wait_for_confirmation, wait_for_confirmed + +DEFAULT_PAGE_SIZE=500 + + +class _AssetsClient: + """docstring + """ + def __init__(self, archivist): + """docstring + """ + self._archivist = archivist + + def create(self, behaviours, attrs, confirm=False): + """docstring + """ + return self.create_from_data( + { + 'behaviours': behaviours, + 'attributes': attrs, + }, + confirm=confirm, + ) + + def create_from_data(self, data, confirm=False): + """docstring + + read request from data stream + suitable for reading data from a file using json.load or yaml.load + """ + asset = Asset(**self._archivist.post( + f"{ASSETS_SUBPATH}/{ASSETS_LABEL}", + data, + )) + if not confirm: + return asset + + return wait_for_confirmation(self, asset['identity']) + + def read(self, identity): + """docstring + """ + return Asset(**self._archivist.get(ASSETS_SUBPATH, identity)) + + @staticmethod + def __query(props, attrs): + """docstring + """ + query = deepcopy(props) if props else {} + if attrs: + query['attributes'] = attrs + + return query + + def count(self, *, props=None, attrs=None): + """docstring + """ + return self._archivist.count( + f"{ASSETS_SUBPATH}/{ASSETS_LABEL}", + query=self.__query(props, attrs) + ) + + def wait_for_confirmed(self, *, props=None, attrs=None): + """docstring + """ + return wait_for_confirmed(self, props=props, attrs=attrs) + + def list(self, *, page_size=DEFAULT_PAGE_SIZE, props=None, attrs=None): + """docstring + """ + return ( + Asset(**a) for a in self._archivist.list( + f"{ASSETS_SUBPATH}/{ASSETS_LABEL}", + ASSETS_LABEL, + page_size=page_size, + query=self.__query(props, attrs) + ) + ) + + def read_by_signature(self, *, props=None, attrs=None): + """docstring + """ + return Asset(**self._archivist.get_by_signature( + f"{ASSETS_SUBPATH}/{ASSETS_LABEL}", + ASSETS_LABEL, + query=self.__query(props, attrs) + )) + + +class Asset(dict): + """docstring + """ + @property + def primary_image(self): + """docstring + """ + try: + attachments = self['attributes']['arc_attachments'] + except (KeyError, TypeError): + pass + else: + return next( # pragma: no cover + (a for a in attachments + if 'arc_display_name' in a + if a['arc_display_name'] == "arc_primary_image"), + None + ) + + return None + + @property + def name(self): + """docstring + """ + try: + name = self['attributes']['arc_display_name'] + except (KeyError, TypeError): + pass + else: + return name + + return None diff --git a/archivist/attachments.py b/archivist/attachments.py new file mode 100644 index 00000000..bf520aae --- /dev/null +++ b/archivist/attachments.py @@ -0,0 +1,38 @@ +"""attachments interface + +""" + +# pylint:disable=too-few-public-methods + +from .constants import ATTACHMENTS_SUBPATH, ATTACHMENTS_LABEL + + +class _AttachmentsClient: + + def __init__(self, archivist): + """docstring + """ + self._archivist = archivist + + def upload(self, fd, *, mtype='image/jpg'): + """docstring + """ + return Attachment(**self._archivist.post_file( + f"{ATTACHMENTS_SUBPATH}/{ATTACHMENTS_LABEL}", + fd, + mtype, + )) + + def download(self, identity, fd): + """docstring + """ + return Attachment(**self._archivist.get_file( + ATTACHMENTS_SUBPATH, + identity, + fd, + )) + + +class Attachment(dict): + """Attachment object + """ diff --git a/archivist/confirm.py b/archivist/confirm.py new file mode 100644 index 00000000..9b5c537b --- /dev/null +++ b/archivist/confirm.py @@ -0,0 +1,105 @@ +"""assets interface + + Wrap base methods with constants for assets (path, etc... +""" + +from copy import deepcopy + +import backoff + +from .constants import ( + CONFIRMATION_CONFIRMED, + CONFIRMATION_FAILED, + CONFIRMATION_PENDING, + CONFIRMATION_STATUS, +) +from .errors import ArchivistUnconfirmedError +from .logger import LOGGER + +MAX_TIME=1200 + + +def __lookup_max_time(): + return MAX_TIME + + +def __backoff_handler(details): + LOGGER.debug("MAX_TIME %s", MAX_TIME) + LOGGER.debug("Backing off {wait:0.1f} seconds afters {tries} tries " + "calling function {target} with args {args} and kwargs " + "{kwargs}".format(**details)) + + +def __on_giveup_confirmation(details): + identity = details['args'][1] + elapsed = details['elapsed'] + raise ArchivistUnconfirmedError( + f"confirmation for {identity} timed out after {elapsed} seconds" + ) + + +@backoff.on_predicate( + backoff.expo, + logger=LOGGER, + max_time=__lookup_max_time, + on_backoff=__backoff_handler, + on_giveup=__on_giveup_confirmation, +) +def wait_for_confirmation(self, identity): + """docstring + """ + entity = self.read(identity) + + if CONFIRMATION_STATUS not in entity: + raise ArchivistUnconfirmedError( + f"cannot confirm {identity} as confirmation_status is not present" + ) + + if entity[CONFIRMATION_STATUS] == CONFIRMATION_FAILED: + raise ArchivistUnconfirmedError( + f"confirmation for {identity} FAILED - this is unusable" + ) + + if entity[CONFIRMATION_STATUS] == CONFIRMATION_CONFIRMED: + return entity + + return None + + +def __on_giveup_confirmed(details): + self = details['args'][0] + count = self.pending_count + elapsed = details['elapsed'] + raise ArchivistUnconfirmedError( + f"{count} pending assets still present after {elapsed} seconds" + ) + + +@backoff.on_predicate( + backoff.expo, + logger=LOGGER, + max_time=__lookup_max_time, + on_backoff=__backoff_handler, + on_giveup=__on_giveup_confirmed, +) +def wait_for_confirmed(self, *, props=None, **kwargs): + """docstring + """ + newprops = deepcopy(props) if props else {} + newprops[CONFIRMATION_STATUS] = CONFIRMATION_PENDING + + LOGGER.debug("Count unconfirmed assets %s", newprops) + count = self.count(props=newprops, **kwargs) + + if count == 0: + # did any fail + newprops = deepcopy(props) if props else {} + newprops[CONFIRMATION_STATUS] = CONFIRMATION_FAILED + count = self.count(props=newprops, **kwargs) + if count > 0: + raise ArchivistUnconfirmedError(f"There are {count} FAILED assets") + + return True + + self.pending_count = count + return False diff --git a/archivist/constants.py b/archivist/constants.py new file mode 100644 index 00000000..1aabbec5 --- /dev/null +++ b/archivist/constants.py @@ -0,0 +1,29 @@ +"""Archivist constants + +""" + +# these are in separate file to stop import loops +ROOT = "archivist" +SEP = "/" + +HEADERS_REQUEST_TOTAL_COUNT='x-request-total-count' +HEADERS_TOTAL_COUNT='x-total-count' + +CONFIRMATION_STATUS = "confirmation_status" +CONFIRMATION_PENDING = "PENDING" +CONFIRMATION_FAILED = "FAILED" +CONFIRMATION_CONFIRMED = "CONFIRMED" + +ASSETS_SUBPATH = "v2" +ASSETS_LABEL = "assets" +ASSETS_WILDCARD = "assets/-" +EVENTS_LABEL = "events" + +LOCATIONS_SUBPATH = "v2" +LOCATIONS_LABEL = "locations" + +ATTACHMENTS_SUBPATH = "v1" +ATTACHMENTS_LABEL = "blobs" + +ACCESS_POLICIES_SUBPATH = "iam/v1" +ACCESS_POLICIES_LABEL = "access_policies" diff --git a/archivist/errors.py b/archivist/errors.py new file mode 100644 index 00000000..65fe2803 --- /dev/null +++ b/archivist/errors.py @@ -0,0 +1,109 @@ +"""Archivist exceptions + +""" + +import json + + +class ArchivistError(Exception): + """Base exception fot=r archivist package + """ + + +class ArchivistBadFieldError(ArchivistError): + """Incorrect field name in list() method + """ + + +class ArchivistUnconfirmedError(ArchivistError): + """asset or event failed to confirm after fixed timeout + """ + + +class ArchivistIllegalArgumentError(ArchivistError): + """Option al keyword arguments are inconsistent + """ + + +class ArchivistBadRequestError(ArchivistError): + """Ill-formed request or validation error (400) + """ + + +class ArchivistDuplicateError(ArchivistError): + """Read by signature returns more than one asset + """ + + +class ArchivistUnauthenticatedError(ArchivistError): + """user is unknown 401) + """ + + +class ArchivistForbiddenError(ArchivistError): + """User does not have permission (403) + """ + + +class ArchivistNotFoundError(ArchivistError): + """Enetity does not exist (404) + """ + + +class Archivist4xxError(ArchivistError): + """Any other 4xx error + """ + + +class ArchivistNotImplementedError(ArchivistError): + """Illegal REST verb (501) + """ + + +class ArchivistUnavailableError(ArchivistError): + """Service is unavailable REST verb (503) + """ + + +class Archivist5xxError(ArchivistError): + """Any other 5xx error + """ + + +def __identity(response): + identity = "unknown" + if response.request: + req = response.request + body = getattr(req, 'body', None) + if body: + body = json.loads(body) + identity = body.get('identity', "unknown") + + return identity + + +def parse_response(response): + """Return exception if appropriate + """ + + status_code = response.status_code + if status_code < 400: + return None + + text = response.text or '' + + if 400 <= status_code < 500: + return { + 400: ArchivistBadRequestError(f"{text} ({status_code})"), + 401: ArchivistUnauthenticatedError(f"{text} ({status_code})"), + 403: ArchivistForbiddenError(f"{text} ({status_code})"), + 404: ArchivistNotFoundError(f"{__identity(response)} not found ({status_code})"), + }.get(status_code, Archivist4xxError(f"{text} ({status_code})")) + + if 500 <= status_code < 600: + return { + 501: ArchivistNotImplementedError(f"{text} ({status_code})"), + 503: ArchivistUnavailableError(f"{text} ({status_code})"), + }.get(status_code, Archivist5xxError(f"{text} ({status_code})")) + + return ArchivistError(f"{text} ({status_code})") diff --git a/archivist/events.py b/archivist/events.py new file mode 100644 index 00000000..73ea3b6f --- /dev/null +++ b/archivist/events.py @@ -0,0 +1,163 @@ +"""events interface +""" + +from copy import deepcopy + +from .constants import ( + SEP, + ASSETS_SUBPATH, + ASSETS_WILDCARD, + EVENTS_LABEL, +) +from .confirm import wait_for_confirmation, wait_for_confirmed + +DEFAULT_PAGE_SIZE=500 + + +class _EventsClient: + """docstring + """ + + def __init__(self, archivist): + """docstring + """ + self._archivist = archivist + + def create(self, asset_id, props, attrs, *, asset_attrs=None, confirm=False): + """docstring + """ + return self.create_from_data( + asset_id, + self.__query(props, attrs, asset_attrs), + confirm=confirm, + ) + + def create_from_data(self, asset_id, data, *, confirm=False): + """docstring + + read request from data stream + suitable for reading data from json.load,yaml.load from a file + """ + event = Event(**self._archivist.post( + SEP.join((ASSETS_SUBPATH, asset_id, EVENTS_LABEL)), + data, + )) + if not confirm: + return event + + return wait_for_confirmation(self, event['identity']) + + def read(self, identity): + """docstring + """ + return Event(**self._archivist.get( + ASSETS_SUBPATH, + identity, + )) + + @staticmethod + def __query(props, attrs, asset_attrs): + """docstring + """ + query = deepcopy(props) if props else {} + if attrs: + query['event_attributes'] = attrs + if asset_attrs: + query['asset_attributes'] = asset_attrs + + return query + + def count(self, *, asset_id=ASSETS_WILDCARD, props=None, attrs=None, asset_attrs=None): + """docstring + """ + return self._archivist.count( + SEP.join((ASSETS_SUBPATH, asset_id, EVENTS_LABEL)), + query=self.__query(props, attrs, asset_attrs) + ) + + def wait_for_confirmed(self, *, asset_id=ASSETS_WILDCARD, props=None, attrs=None): + """docstring + """ + return wait_for_confirmed(self, asset_id=asset_id, props=props, attrs=attrs) + + def list( + self, + *, + asset_id=ASSETS_WILDCARD, + page_size=DEFAULT_PAGE_SIZE, + props=None, + attrs=None, + asset_attrs=None, + ): + """docstring + """ + return ( + Event(**a) for a in self._archivist.list( + SEP.join((ASSETS_SUBPATH, asset_id, EVENTS_LABEL)), + EVENTS_LABEL, + page_size=page_size, + query=self.__query(props, attrs, asset_attrs) + ) + ) + + def read_by_signature( + self, + *, + asset_id=ASSETS_WILDCARD, + props=None, + attrs=None, + asset_attrs=None, + ): + """docstring + """ + return Event(**self._archivist.get_by_signature( + SEP.join((ASSETS_SUBPATH, asset_id, EVENTS_LABEL)), + EVENTS_LABEL, + query=self.__query(props, attrs, asset_attrs) + )) + + +class Event(dict): + """docstring + """ + + @property + def when(self): + """docstring + """ + try: + when = self['timestamp_declared'] + except KeyError: + pass + else: + return when + + try: + when = self['timestamp_accepted'] + except KeyError: + pass + else: + return when + + return None + + @property + def who(self): + """docstring + """ + + try: + who = self['principal_declared']['display_name'] + except (KeyError, TypeError): + pass + else: + return who + + try: + who = self['principal_accepted']['display_name'] + except (KeyError, TypeError): + pass + else: + return who + + return None diff --git a/archivist/locations.py b/archivist/locations.py new file mode 100644 index 00000000..c30f7612 --- /dev/null +++ b/archivist/locations.py @@ -0,0 +1,86 @@ +"""locations interface + + +""" + +from .constants import LOCATIONS_SUBPATH, LOCATIONS_LABEL + +DEFAULT_PAGE_SIZE=500 + + +class _LocationsClient: + """docstring + """ + def __init__(self, archivist): + """docstring + """ + self._archivist = archivist + + def create(self, props, *, attrs=None): + """docstring + """ + return self.create_from_data( + self.__query(props, attrs) + ) + + def create_from_data(self, data): + """docstring + + read request from data stream + """ + return Location(**self._archivist.post( + f"{LOCATIONS_SUBPATH}/{LOCATIONS_LABEL}", + data, + )) + + def read(self, identity): + """docstring + """ + return Location(**self._archivist.get( + LOCATIONS_SUBPATH, + identity, + )) + + @staticmethod + def __query(props, attrs): + """docstring + """ + query = props or {} + if attrs: + query['attributes'] = attrs + + return query + + def count(self, *, props=None, attrs=None): + """docstring + """ + return self._archivist.count( + f"{LOCATIONS_SUBPATH}/{LOCATIONS_LABEL}", + query=self.__query(props, attrs) + ) + + def list(self, *, page_size=DEFAULT_PAGE_SIZE, props=None, attrs=None): + """docstring + """ + return ( + Location(**a) for a in self._archivist.list( + f"{LOCATIONS_SUBPATH}/{LOCATIONS_LABEL}", + LOCATIONS_LABEL, + page_size=page_size, + query=self.__query(props, attrs) + ) + ) + + def read_by_signature(self, *, props=None, attrs=None): + """docstring + """ + return Location(**self._archivist.get_by_signature( + f"{LOCATIONS_SUBPATH}/{LOCATIONS_LABEL}", + LOCATIONS_LABEL, + query=self.__query(props, attrs) + )) + + +class Location(dict): + """Location object + """ diff --git a/archivist/logger.py b/archivist/logger.py new file mode 100644 index 00000000..27e011c1 --- /dev/null +++ b/archivist/logger.py @@ -0,0 +1,32 @@ +""" +Set up logging +""" +import logging + +# pragma: no cover + +# base logger from which all loggers are propagated +LOGGER = logging.getLogger() + +LOGFMT = ( + '%(asctime)s.%(msecs)03d|%(threadName)s' + '|%(levelname)s|%(name)s|%(message)s' +) +DATEFMT = '%Y-%m-%d %H:%M:%S' + + +def set_logger(level): + """ + Setup logger + Also used by unittests + """ + LOGGER.setLevel(level) + if not LOGGER.hasHandlers(): + handler = logging.StreamHandler() + handler.setFormatter( + logging.Formatter( + fmt=LOGFMT, + datefmt=DATEFMT, + ) + ) + LOGGER.addHandler(handler) diff --git a/archivist/timestamp.py b/archivist/timestamp.py new file mode 100755 index 00000000..c6d5b1f7 --- /dev/null +++ b/archivist/timestamp.py @@ -0,0 +1,15 @@ +"""Timestamp manipulation""" + +import iso8601 +import rfc3339 + + +def parse_timestamp(date_string): + """Parse an Archivist timestamp to a datetime object""" + return iso8601.parse_date(date_string) + + +def make_timestamp(date_object): + """Format a datetime object into an Archivist format + timestamp string""" + return rfc3339.rfc3339(date_object) diff --git a/examples/create_asset/create_asset.py b/examples/create_asset/create_asset.py new file mode 100644 index 00000000..f89469a9 --- /dev/null +++ b/examples/create_asset/create_asset.py @@ -0,0 +1,86 @@ +# Copyright 2019-2021 Jitsuin, inc + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is API SAMPLE CODE, not for production use. + +"""Create an asset given url to Archivist and user Token. + +The module contains two functions: main and create_asset. Main function parses in +a url to the Archivist and a token, which is a user authorization. +The main function would initialize an archivist connection using the url and +the token, called "arch", then call assets.create and the asset will be created. +""" + +from archivist.archivist import Archivist + + +def create_asset(arch): + """Create an asset using Archivist Connection. + + Args: + arch: archivist connection. + + Returns: + newasset: a new asset created. + """ + attrs = { + "arc_display_name": "display_name", # Asset's display name in the user interface + "arc_description": "display_description", # Asset's description in the user interface + "arc_display_type": "desplay_type", # Arc_display_type is a free text field + # allowing the creator of + # an asset to specify the asset + # type or class. Be careful when setting this: + # assets are grouped by type and + # sharing policies can be + # configured to share assets based on + # their arc_display_type. + # So a mistake here can result in asset data being + # under- or over-shared. + "some_custom_attribute": "value" # You can add any custom value as long as + # it does not start with arc_ + } + behaviours = ["Attachments", "Firmware", "LocationUpdate", "Maintenance", "RecordEvidence"] + + # The first argument is the behaviours of the asset + # The second argument is the attributes of the asset + # The third argument is wait for confirmation: + # If @confirm@ is True then this function will not + # return until the asset is confirmed on the blockchain and ready + # to accept events (or an error occurs) + # After an asset is submitted to the blockchain (submitted), + # it will be in the "Pending" status. + # Once it is added to the blockchain, the status will be changed to "Confirmed" + return arch.assets.create(behaviours, attrs=attrs, confirm=True) + + +def main(): + """ Main function of create asset. + + Parse in user input of url and auth token and use them to + create an example archivist connection and create an asset. + """ + with open(".auth_token", mode='r') as tokenfile: + authtoken = tokenfile.read().strip() + + # Initialize connection to Archivist + arch = Archivist( + "https://soak-0-avid.engineering-k8s-stage-2.dev.wild.jitsuin.io", + auth=authtoken, + ) + # Create a new asset + unused_asset = create_asset(arch) + + +if __name__ == "__main__": + main() diff --git a/examples/create_event/create_event.py b/examples/create_event/create_event.py new file mode 100644 index 00000000..a5089a06 --- /dev/null +++ b/examples/create_event/create_event.py @@ -0,0 +1,136 @@ +# Copyright 2019-2021 Jitsuin, inc + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is API SAMPLE CODE, not for production use. + +"""Create an event for an asset given url to Archivist and user Token. + +The module contains four functions: main, create_asset, create_event and fetch_event. +Main function parses in a url to the Archivist and a token, which is a user authorization. +The main function would initialize an archivist connection using the url and +the token, called "aconn", then call create_assets and pass in "aconn" and +create_assets will build create_asset, which is a archivist connection function +to create a new asset for the archivist through archivist connection. The main funciton then +calls create_event and pass in "aconn" and the created asset to create a new event for the asset. +Main function calls fetch_event and pass in "aconn" and the identity of the event to get the event. +""" + +from archivist.archivist import Archivist + + +def create_event(arch, asset): + """Create an event for the passed-in asset. + + Args: + arch: archivist connection. + asset: an asset created using aconn + + Returns: + new_event: a new event for the asset. + """ + # props can be defined for different behaviours and the attributes associated with + # different behaviours are also different. More details can be found here: + # https://jitsuin-archivist.readthedocs.io/en/latest/assetv2/index.html + props = { + "operation": "Record", + # This event is used to record evidence, more behaviour explanation can be found here: + # https://jitsuin-archivist.readthedocs.io/en/latest/assetv2/index.html + "behaviour": "RecordEvidence", + # Optional Client-claimed time at which the maintenance was performed + "timestamp_declared": "2019-11-27T14:44:19Z", + # Optional Client-claimed identity of person performing the operation + "principal_declared": { + "issuer": "idp.synsation.io/1234", + "subject": "phil.b", + "email": "phil.b@synsation.io" + } + } + attrs = { + # Required Details of the RecordEvidence request + "arc_description": "Safety conformance approved for version 1.6.", + # Required The evidence to be retained in the asset history + "arc_evidence": "DVA Conformance Report attached", + # Example Client can add any additional information in further attributes, + # including free text or attachments + "conformance_report": "blobs/e2a1d16c-03cd-45a1-8cd0-690831df1273" + } + + return arch.events.create(asset['identity'], props=props, attrs=attrs) + + +def create_asset(arch): + """Create an asset using Archivist Connection. + + Args: + arch: archivist connection. + + Returns: + newasset: a new asset created. + """ + attrs = { + "arc_display_name": "display_name", # Asset's display name in the user interface + "arc_description": "display_description", # Asset's description in the user interface + "arc_display_type": "desplay_type", # Arc_display_type is a free text field + # allowing the creator of + # an asset to specify the asset + # type or class. Be careful when setting this: + # assets are grouped by type and + # sharing policies can be + # configured to share assets based on + # their arc_display_type. + # So a mistake here can result in asset data being + # under- or over-shared. + "some_custom_attribute": "value" # You can add any custom value as long as + # it does not start with arc_ + } + behaviours = ["Attachments", "Firmware", "LocationUpdate", "Maintenance", "RecordEvidence"] + + # The first argument is the behaviours of the asset + # The second argument is the attributes of the asset + # The third argument is wait for confirmation: + # If @confirm@ is True then this function will not + # return until the asset is confirmed on the blockchain and ready + # to accept events (or an error occurs) + # After an asset is submitted to the blockchain (submitted), + # it will be in the "Pending" status. + # Once it is added to the blockchain, the status will be changed to "Confirmed" + return arch.assets.create(behaviours, attrs, confirm=True) + + +def main(): + """ Main function of create event. + + Parse in user input of url and auth token and use them to + create an example archivist connection and create an asset. + The main function then uses the asset to create an event for + the asset and fetch the event. + """ + with open(".auth_token", mode='r') as tokenfile: + authtoken = tokenfile.read().strip() + + # Initialize connection to Archivist + arch = Archivist( + "https://soak-0-avid.engineering-k8s-stage-2.dev.wild.jitsuin.io", + auth=authtoken, + ) + # Create a new asset + new_asset = create_asset(arch) + # Create a new event + new_event = create_event(arch, new_asset) + # Fetch the event + unused_event = arch.events.read(new_event['identity']) + + +if __name__ == "__main__": + main() diff --git a/examples/filter_assets/filter_assets.py b/examples/filter_assets/filter_assets.py new file mode 100644 index 00000000..64584fc8 --- /dev/null +++ b/examples/filter_assets/filter_assets.py @@ -0,0 +1,63 @@ +# Copyright 2019-2021 Jitsuin, inc + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is API SAMPLE CODE, not for production use. + +"""Filter assets of a archivist connection given url to Archivist and user Token. + +The module contains three functions: main, get_matching_assets and _get_matching_resources. +Main function parses in a url to the Archivist and a token, which is a user authorization. +The main function would initialize an archivist connection using the url and +the token, called "aconn", then call get_matching_assets and pass in "aconn", properties and +attribtues. "get_matching_assets" filters assets of certain properties and attributes from all +assets and return the filted assets. +""" + + +from archivist.archivist import Archivist + + +def main(): + """ Main function of filtering assets. + + Parse in user input of url and auth token and use them to + create an example archivist connection and passed-in properties + attributes to filter all assets of the selected properties and + attributes through function get_matching_assets. + """ + with open(".auth_token", mode='r') as tokenfile: + authtoken = tokenfile.read().strip() + + # Initialize connection to Archivist + arch = Archivist( + "https://soak-0-avid.engineering-k8s-stage-2.dev.wild.jitsuin.io", + auth=authtoken, + ) + + # list all assets with required attributes and properties + props = {"confirmation_status": "CONFIRMED"} + attrs = {"arc_display_type": "Traffic light"} + + # iterate through the generator.... + for asset in arch.assets.list(props=props, attrs=attrs): + print("asset", asset) + + # alternatively one could pull the list and cache locally... + assets = list(arch.assets.list(props=props, attrs=attrs)) + for asset in assets: + print("asset", asset) + + +if __name__ == "__main__": + main() diff --git a/examples/filter_events/filter_event.py b/examples/filter_events/filter_event.py new file mode 100644 index 00000000..b6fdbc7d --- /dev/null +++ b/examples/filter_events/filter_event.py @@ -0,0 +1,60 @@ +# Copyright 2019-2021 Jitsuin, inc + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is API SAMPLE CODE, not for production use. + +"""Filter assets of a archivist connection given url to Archivist and user Token. + +The module contains three functions: main, get_matching_events and _get_matching_resources. +Main function parses in a url to the Archivist and a token, which is a user authorization. +The main function would initialize an archivist connection using the url and +the token, called "aconn", then call get_matching_events and pass in "aconn", properties and +attribtues. "get_matching_assets" filters assets of certain properties and attributes from all +assets and return the filted assets. +""" + +from archivist.archivist import Archivist + + +def main(): + """ Main function of filtering events. + + Parse in user input of url and auth token and use them to + create an example archivist connection and passed-in properties + attributes to filter all events of the selected properties and + attributes through function get_matching_events. + """ + + with open(".auth_token", mode='r') as tokenfile: + authtoken = tokenfile.read().strip() + + # Initialize connection to Archivist + aconn = Archivist( + "https://soak-0-avid.engineering-k8s-stage-2.dev.wild.jitsuin.io", + auth=authtoken, + ) + # Get all assets with required attributes and properties + props = {"confirmation_status": "CONFIRMED"} + attrs = {"arc_display_type": "Traffic light"} + for event in aconn.events.list(asset_id="assets/-", props=props, attrs=attrs): + print("event", event) + + # alternatively one could pull the list and cache locally... + events = aconn.events.list(asset_id="assets/-", props=props, attrs=attrs) + for event in events: + print("event", event) + + +if __name__ == "__main__": + main() diff --git a/examples/get_asset/get_asset.py b/examples/get_asset/get_asset.py new file mode 100644 index 00000000..7813223f --- /dev/null +++ b/examples/get_asset/get_asset.py @@ -0,0 +1,49 @@ +# Copyright 2019-2021 Jitsuin, inc + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is API SAMPLE CODE, not for production use. + +"""Get all assets from a instance of Archivist + +The module contains two functions: main and fetch_all_assets. Main function parses in +a url, which is an instance of Archivist and a token, which is a user authorization. +The main function would then call fetch_all_assets and pass in the two variables and +fetch_all_assets will build a connection to the instance and fetch all assets from +the instances. +""" + + +from archivist.archivist import Archivist + + +def main(): + """ Main function of get_asset. + + Parse in user input of url and auth token and use them to + create an example archivist connection and fetch all assets. + """ + with open(".auth_token", mode='r') as tokenfile: + authtoken = tokenfile.read().strip() + + # Initialize connection to Archivist + arch = Archivist( + "https://soak-0-avid.engineering-k8s-stage-2.dev.wild.jitsuin.io", + auth=authtoken, + ) + for asset in arch.assets.list(): + print("asset id:", asset['identity']) + + +if __name__ == "__main__": + main() diff --git a/pylintrc b/pylintrc new file mode 100644 index 00000000..f8dcdca5 --- /dev/null +++ b/pylintrc @@ -0,0 +1,585 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=print-statement, + parameter-unpacking, + unpacking-in-except, + old-raise-syntax, + backtick, + long-suffix, + old-ne-operator, + old-octal-literal, + import-star-module-level, + non-ascii-bytes-literal, + raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + apply-builtin, + basestring-builtin, + buffer-builtin, + cmp-builtin, + coerce-builtin, + execfile-builtin, + file-builtin, + long-builtin, + raw_input-builtin, + reduce-builtin, + standarderror-builtin, + unicode-builtin, + xrange-builtin, + coerce-method, + delslice-method, + getslice-method, + setslice-method, + no-absolute-import, + old-division, + dict-iter-method, + dict-view-method, + next-method-called, + metaclass-assignment, + indexing-exception, + raising-string, + reload-builtin, + oct-method, + hex-method, + nonzero-method, + cmp-method, + input-builtin, + round-builtin, + intern-builtin, + unichr-builtin, + map-builtin-not-iterating, + zip-builtin-not-iterating, + range-builtin-not-iterating, + filter-builtin-not-iterating, + using-cmp-argument, + eq-without-hash, + div-method, + idiv-method, + rdiv-method, + exception-message-attribute, + invalid-str-codec, + sys-max-int, + bad-python3-import, + deprecated-string-function, + deprecated-str-translate-call, + deprecated-itertools-function, + deprecated-types-field, + next-method-defined, + dict-items-not-iterating, + dict-keys-not-iterating, + dict-values-not-iterating, + deprecated-operator-function, + deprecated-urllib-function, + xreadlines-attribute, + deprecated-sys-function, + exception-escape, + comprehension-escape, + invalid-name, + bad-classmethod-argument, + bad-mcs-classmethod-argument, + no-self-argument + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[LOGGING] + +# Format style used to check logging format string. `old` means using % +# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=100 + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[STRING] + +# This flag controls whether the implicit-str-concat-in-sequence should +# generate a warning on implicit string concatenation in sequences defined over +# several lines. +check-str-concat-over-line-jumps=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=8 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=15 + +# Maximum number of locals for function / method body. +max-locals=20 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=7 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=BaseException, + Exception diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..9a481200 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,10 @@ +-r requirements.txt + +# code quality +autopep8==1.5.5 +coverage==5.4 +pycodestyle==2.6.0 +pylint==2.6.0 + +# uploading to pypi +twine==3.4.1 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..a17552cf --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +backoff==1.10.0 +certifi==2020.12.5 +flatten-dict==0.3.0 +iso8601==0.1.13 +requests==2.22.0 +requests-toolbelt==0.9.1 +rfc3339==6.2 diff --git a/scripts/builder.sh b/scripts/builder.sh new file mode 100755 index 00000000..2a4b45b6 --- /dev/null +++ b/scripts/builder.sh @@ -0,0 +1,16 @@ +#!/bin/sh +# +# Executes a command inside the builder container +# +# Usage Examples +# +# ./scripts/builder.sh /bin/bash # for shell +# ./scripts/builder.sh # enters python REPL +# ./scripts/builder.sh autopep8 -i -r python # autopep8s all code + +docker run \ + --rm -it \ + -v $(pwd):/home/builder \ + -u $(id -u):$(id -g) \ + jitsuin-archivist-python-builder \ + "$@" diff --git a/scripts/unittests.sh b/scripts/unittests.sh new file mode 100755 index 00000000..d9e037f7 --- /dev/null +++ b/scripts/unittests.sh @@ -0,0 +1,13 @@ +#!/bin/sh +# +# run unittetss +# + +rm -f coverage.xml +rm -rf htmlcov +COVERAGE="coverage" +${COVERAGE} run --branch --source archivist -m unittest discover -v +${COVERAGE} annotate +${COVERAGE} html +${COVERAGE} xml +${COVERAGE} report diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..54e5db15 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,3 @@ +[pycodestyle] +ignore = E128, E225, E265, E266, E402, E501, E713, E722, E741, W504, +statistics = True \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..7a1c5c44 --- /dev/null +++ b/setup.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 + +''' +Setup script for python build system +''' +import os +from setuptools import setup, find_packages + +REPO_URL = 'https://github.com/jitsuin-inc/archivist-python/' +NAME = "jitsuin-archivist" + +with open('README.md') as FF: + DESC = FF.read() + +with open('requirements.txt') as FF: + requirements=[f"{line.strip()}" for line in FF] + +setup( + name=NAME, + version="0.1.0alpha3", + author="Jitsuin Inc.", + author_email="support@jitsuin.com", + description="Jitsuin Archivist Client", + long_description=DESC, + long_description_content_type="text/markdown", + url=REPO_URL, + packages=find_packages(exclude=( "examples", "unittests", )), + platforms=['any'], + classifiers=[ + 'Development Status :: 3 - Alpha', #pre-delivery + 'Environment :: Console', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', # MIT + 'Operating System :: POSIX :: Linux', # https://pypi.org/classifiers/ # on anything + 'Programming Language :: Python :: 3.6', + 'Topic :: Utilities' # https://pypi.org/classifiers/ # check another option client-sdk + ], + install_requires=requirements, + python_requires='>=3.6', + entry_points={ + 'console_scripts': [ + 'create_asset = archivist:entry.create_asset', + 'create_event = archivist:entry.create_event', + ], + }, +) diff --git a/unittests/__init__.py b/unittests/__init__.py new file mode 100644 index 00000000..0518c6a8 --- /dev/null +++ b/unittests/__init__.py @@ -0,0 +1,3 @@ +''' +Unit tests +''' diff --git a/unittests/mock_response.py b/unittests/mock_response.py new file mode 100644 index 00000000..19a80301 --- /dev/null +++ b/unittests/mock_response.py @@ -0,0 +1,34 @@ +''' +Mock response object +''' + +import json + +# pylint: disable=missing-docstring + + +class MockResponse(dict): + def __init__(self, status_code, request=None, headers=None, iter_content=None, **kwargs): + super().__init__(**kwargs) + self.status_code = status_code + self._headers = headers + self._request = request + self._iter_content = iter_content + + @property + def request(self): + return self._request + + @property + def headers(self): + return self._headers + + @property + def text(self): + return json.dumps(self) + + def json(self): + return self + + def iter_content(self, chunk_size=4096): + return self._iter_content(chunk_size=chunk_size) diff --git a/unittests/testarchivist.py b/unittests/testarchivist.py new file mode 100644 index 00000000..c90671a2 --- /dev/null +++ b/unittests/testarchivist.py @@ -0,0 +1,909 @@ +''' +Test archivist +''' + +from io import BytesIO +from unittest import TestCase, mock + +from archivist.archivist import Archivist +from archivist.constants import ROOT, HEADERS_TOTAL_COUNT +from archivist.errors import ( + ArchivistBadFieldError, + ArchivistBadRequestError, + ArchivistDuplicateError, + ArchivistIllegalArgumentError, + ArchivistNotFoundError, +) + +from .mock_response import MockResponse + + +# pylint: disable=unused-variable +# pylint: disable=missing-docstring +# pylint: disable=unnecessary-comprehension + + +class TestArchivist(TestCase): + ''' + Test Archivist class + ''' + def test_archivist(self): + ''' + Test default archivist creation + ''' + arch = Archivist("url", auth="authauthauth") + self.assertEqual( + arch.url, + "url", + msg="Incorrect url", + ) + self.assertEqual( + arch.headers, + { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + msg="Incorrect auth headers", + ) + self.assertTrue( + arch.verify, + msg="verify must be True", + ) + + def test_archivist_no_verify(self): + ''' + Test archivist creation with no verify + ''' + arch = Archivist("url", auth="authauthauth", verify=False) + self.assertFalse( + arch.verify, + msg="verify must be False", + ) + + def test_archivist_with_neither_auth_and_cert(self): + ''' + Test archivist creation with both auth and cert + ''' + with self.assertRaises(ArchivistIllegalArgumentError): + arch = Archivist("url") + + def test_archivist_with_both_auth_and_cert(self): + ''' + Test archivist creation with both auth and cert + ''' + with self.assertRaises(ArchivistIllegalArgumentError): + arch = Archivist("url", auth="authauthauth", cert="/path/to/file") + + @mock.patch('archivist.archivist.os_path_isfile') + def test_archivist_with_nonexistent_cert(self, mock_isfile): + ''' + Test archivist creation with nonexistent cert + ''' + mock_isfile.return_value = False + with self.assertRaises(ArchivistNotFoundError): + arch = Archivist("url", cert="/path/to/file") + + @mock.patch('archivist.archivist.os_path_isfile') + def test_archivist_with_existent_cert(self, mock_isfile): + ''' + Test archivist creation with cert + ''' + mock_isfile.return_value = True + arch = Archivist("url", cert="/path/to/file") + self.assertEqual( + arch.cert, + "/path/to/file", + msg="verify must be False", + ) + + +class TestArchivistMethods(TestCase): + ''' + Test Archivist base method class + ''' + def setUp(self): + self.arch = Archivist("url", auth="authauthauth") + + +class TestArchivistPost(TestArchivistMethods): + ''' + Test Archivist POST method + ''' + @mock.patch('requests.post') + def test_post(self, mock_post): + ''' + Test default post method + ''' + request = {"field1": "value1"} + mock_post.return_value = MockResponse(200, request=request) + resp = self.arch.post("path/path", request) + self.assertEqual( + tuple(mock_post.call_args), + ( + (f"url/{ROOT}/path/path", ), + { + 'data': '{"field1": "value1"}', + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="POST method called incorrectly", + ) + + @mock.patch('requests.post') + def test_post_with_error(self, mock_post): + ''' + Test post method with error + ''' + request = {"field1": "value1"} + mock_post.return_value = MockResponse(400, request=request, field1="value1") + with self.assertRaises(ArchivistBadRequestError): + resp = self.arch.post("path/path", request) + + @mock.patch('requests.post') + def test_post_with_headers(self, mock_post): + ''' + Test default post method + ''' + request = {"field1": "value1"} + mock_post.return_value = MockResponse(200, request=request) + resp = self.arch.post( + "path/path", + request, + headers={"headerfield1": "headervalue1"}, + ) + self.assertEqual( + tuple(mock_post.call_args), + ( + (f"url/{ROOT}/path/path", ), + { + 'data': '{"field1": "value1"}', + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + "headerfield1": "headervalue1", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="POST method called incorrectly", + ) + + @mock.patch('requests.post') + def test_post_file(self, mock_post): + ''' + Test default post_file method + ''' + mock_post.return_value = MockResponse(200) + resp = self.arch.post_file( + "path/path", + BytesIO(b"lotsofbytes"), + "image/jpg", + ) + args, kwargs = mock_post.call_args + self.assertEqual( + len(args), + 1, + msg="Incorrect number of arguments", + ) + self.assertEqual( + args[0], + f"url/{ROOT}/path/path", + msg="Incorrect first argument", + ) + self.assertEqual( + len(kwargs), + 4, + msg="Incorrect number of keyword arguments", + ) + headers = kwargs.get('headers') + self.assertNotEqual( + headers, + None, + msg="Header does not exist", + ) + self.assertTrue( + headers['content-type'].startswith('multipart/form-data'), + msg="Incorrect content-type", + ) + data = kwargs.get('data') + self.assertIsNotNone( + data, + msg="Incorrect data", + ) + fields = data.fields + self.assertIsNotNone( + fields, + msg="Incorrect fields", + ) + myfile = fields.get('file') + self.assertIsNotNone( + myfile, + msg="Incorrect file key", + ) + self.assertEqual( + myfile[0], + 'filename', + msg="Incorrect filename", + ) + self.assertEqual( + myfile[2], + 'image/jpg', + msg="Incorrect mimetype", + ) + + @mock.patch('requests.post') + def test_post_file_with_error(self, mock_post): + ''' + Test post method with error + ''' + mock_post.return_value = MockResponse(400) + with self.assertRaises(ArchivistBadRequestError): + resp = self.arch.post_file( + "path/path", + BytesIO(b"lotsofbytes"), + "image/jpg", + ) + + +class TestArchivistGet(TestArchivistMethods): + ''' + Test Archivist Get method + ''' + @mock.patch('requests.get') + def test_get(self, mock_get): + ''' + Test default get method + ''' + mock_get.return_value = MockResponse(200) + resp = self.arch.get("path/path", "entity/xxxxxxxx") + self.assertEqual( + tuple(mock_get.call_args), + ( + (f"url/{ROOT}/path/path/entity/xxxxxxxx", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_get_with_error(self, mock_get): + ''' + Test get method with error + ''' + mock_get.return_value = MockResponse(404, identity="entity/xxxxxxxx") + with self.assertRaises(ArchivistNotFoundError): + resp = self.arch.get("path/path", "entity/xxxxxxxx") + + @mock.patch('requests.get') + def test_get_file(self, mock_get): + ''' + Test default get method + ''' + def iter_content(): + i = 0 + + def filedata(chunk_size=4096): # pylint: disable=unused-argument + nonlocal i + while i < 4: + i += 1 + + if i == 2: + yield None + + yield b"chunkofbytes" + + return filedata + + mock_get.return_value = MockResponse( + 200, + identity="entity/xxxxxxxx", + iter_content=iter_content(), + ) + with BytesIO() as fd: + resp = self.arch.get_file("path/path", "entity/xxxxxxxx", fd) + self.assertEqual( + tuple(mock_get.call_args), + ( + (f"url/{ROOT}/path/path/entity/xxxxxxxx", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + 'stream': True, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_get_file_with_error(self, mock_get): + ''' + Test get method with error + ''' + mock_get.return_value = MockResponse(404, identity="entity/xxxxxxxx") + with self.assertRaises(ArchivistNotFoundError): + with BytesIO() as fd: + resp = self.arch.get_file("path/path", "entity/xxxxxxxx", fd) + + @mock.patch('requests.get') + def test_get_with_headers(self, mock_get): + ''' + Test default get method + ''' + mock_get.return_value = MockResponse(200) + resp = self.arch.get( + "path/path", + "id/xxxxxxxx", + headers={"headerfield1": "headervalue1"}, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + (f"url/{ROOT}/path/path/id/xxxxxxxx", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + "headerfield1": "headervalue1", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + +class TestArchivistDelete(TestArchivistMethods): + ''' + Test Archivist Delete method + ''' + @mock.patch('requests.delete') + def test_delete(self, mock_delete): + ''' + Test default delete method + ''' + mock_delete.return_value = MockResponse(200) + resp = self.arch.delete("path/path", "entity/xxxxxxxx") + self.assertEqual( + tuple(mock_delete.call_args), + ( + (f"url/{ROOT}/path/path/entity/xxxxxxxx", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="DELETE method called incorrectly", + ) + + @mock.patch('requests.delete') + def test_delete_with_error(self, mock_delete): + ''' + Test delete method with error + ''' + mock_delete.return_value = MockResponse(404, identity="entity/xxxxxxxx") + with self.assertRaises(ArchivistNotFoundError): + resp = self.arch.delete("path/path", "entity/xxxxxxxx") + + @mock.patch('requests.delete') + def test_delete_with_headers(self, mock_delete): + ''' + Test default delete method + ''' + mock_delete.return_value = MockResponse(200) + resp = self.arch.delete( + "path/path", + "id/xxxxxxxx", + headers={"headerfield1": "headervalue1"}, + ) + self.assertEqual( + tuple(mock_delete.call_args), + ( + (f"url/{ROOT}/path/path/id/xxxxxxxx", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + "headerfield1": "headervalue1", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="DELETE method called incorrectly", + ) + + +class TestArchivistPatch(TestArchivistMethods): + ''' + Test Archivist PATCH method + ''' + @mock.patch('requests.patch') + def test_patch(self, mock_patch): + ''' + Test default patch method + ''' + request = {"field1": "value1"} + mock_patch.return_value = MockResponse(200, request=request) + resp = self.arch.patch("path/path", "entity/xxxx", request) + self.assertEqual( + tuple(mock_patch.call_args), + ( + (f"url/{ROOT}/path/path/entity/xxxx", ), + { + 'data': '{"field1": "value1"}', + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="POST method called incorrectly", + ) + + @mock.patch('requests.patch') + def test_patch_with_error(self, mock_patch): + ''' + Test post method with error + ''' + request = {"field1": "value1"} + mock_patch.return_value = MockResponse(400, request=request, field1="value1") + with self.assertRaises(ArchivistBadRequestError): + resp = self.arch.patch("path/path", "entity/xxxx", request) + + @mock.patch('requests.patch') + def test_patch_with_headers(self, mock_patch): + ''' + Test default patch method + ''' + request = {"field1": "value1"} + mock_patch.return_value = MockResponse(200, request=request) + resp = self.arch.patch( + "path/path", + "entity/xxxx", + request, + headers={"headerfield1": "headervalue1"}, + ) + self.assertEqual( + tuple(mock_patch.call_args), + ( + (f"url/{ROOT}/path/path/entity/xxxx", ), + { + 'data': '{"field1": "value1"}', + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + "headerfield1": "headervalue1", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="PATCH method called incorrectly", + ) + + +class TestArchivistCount(TestArchivistMethods): + ''' + Test Archivist count method + ''' + @mock.patch('requests.get') + def test_count(self, mock_get): + ''' + Test default count method + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + things=[ + { + "field1": "value1", + }, + ], + ) + count = self.arch.count("path/path") + self.assertEqual( + count, + 1, + msg="incorrect count", + ) + + @mock.patch('requests.get') + def test_count_with_error(self, mock_get): + ''' + Test default count method with error + ''' + mock_get.return_value = MockResponse( + 400, + things=[ + { + "field1": "value1", + }, + ], + ) + with self.assertRaises(ArchivistBadRequestError): + count = self.arch.count("path/path") + + +class TestArchivistList(TestArchivistMethods): + ''' + Test Archivist list method + ''' + @mock.patch('requests.get') + def test_list(self, mock_get): + ''' + Test default list method + ''' + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": "value1", + }, + ], + ) + listing = self.arch.list("path/path", "things") + responses = [r for r in listing] + self.assertEqual( + len(responses), + 1, + msg="incorrect number of responses", + ) + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/path/path", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_list_with_error(self, mock_get): + ''' + Test default list method with error + ''' + mock_get.return_value = MockResponse( + 400, + things=[ + { + "field1": "value1", + }, + ], + ) + listing = self.arch.list("path/path", "things") + with self.assertRaises(ArchivistBadRequestError): + responses = [r for r in listing] + + @mock.patch('requests.get') + def test_list_with_bad_field(self, mock_get): + ''' + Test default list method with error + ''' + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": "value1", + }, + ], + ) + listing = self.arch.list("path/path", "badthings") + with self.assertRaises(ArchivistBadFieldError): + responses = [r for r in listing] + + @mock.patch('requests.get') + def test_list_with_headers(self, mock_get): + ''' + Test default list method + ''' + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": "value1", + }, + ], + ) + listing = self.arch.list( + "path/path", + "things", + headers={"headerfield1": "headervalue1"}, + ) + responses = [r for r in listing] + self.assertEqual( + len(responses), + 1, + msg="incorrect number of responses", + ) + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/path/path", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + "headerfield1": "headervalue1", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_list_with_query(self, mock_get): + ''' + Test default list method + ''' + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": "value1", + }, + ], + ) + listing = self.arch.list( + "path/path", + "things", + query={"queryfield1": "queryvalue1"}, + ) + responses = [r for r in listing] + self.assertEqual( + len(responses), + 1, + msg="incorrect number of responses", + ) + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/path/path?queryfield1=queryvalue1", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_list_with_page_size(self, mock_get): + ''' + Test default list method + ''' + values = ("value10", "value11") + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": values[0], + }, + { + "field1": values[1], + }, + ], + ) + listing = self.arch.list( + "path/path", + "things", + page_size=2, + ) + responses = [r for r in listing] + self.assertEqual( + len(responses), + 2, + msg="incorrect number of responses", + ) + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/path/path?page_size=2", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + for i, r in enumerate(responses): + self.assertEqual( + r['field1'], + values[i], + msg="Incorrect response body value", + ) + + @mock.patch('requests.get') + def test_list_with_multiple_pages(self, mock_get): + ''' + Test default list method + ''' + values = ("value10", "value11", "value12", "value13") + paging = ("page_size=2", "page_token=token") + mock_get.side_effect =[ + MockResponse( + 200, + things=[ + { + "field1": values[0], + }, + { + "field1": values[1], + }, + ], + next_page_token="token", + ), + MockResponse( + 200, + things=[ + { + "field1": values[2], + }, + { + "field1": values[3], + }, + ], + ), + ] + listing = self.arch.list( + "path/path", + "things", + page_size=2, + ) + responses = [r for r in listing] + self.assertEqual( + len(responses), + 4, + msg="incorrect number of responses", + ) + for i, a in enumerate(mock_get.call_args_list): + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/path/path?{paging[i]}", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + for i, r in enumerate(responses): + self.assertEqual( + r['field1'], + values[i], + msg="Incorrect response body value", + ) + + +class TestArchivistSignature(TestArchivistMethods): + ''' + Test Archivist get_by_signature method + ''' + @mock.patch('requests.get') + def test_get_by_signature(self, mock_get): + ''' + Test default get_by_signature method + ''' + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": "value1", + }, + ], + ) + entity = self.arch.get_by_signature("path/path", "things", {"field1": "value1"}) + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/path/path?page_size=2&field1=value1", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_get_by_signature_not_found(self, mock_get): + ''' + Test default get_by_signature method + ''' + mock_get.return_value = MockResponse( + 200, + things=[], + ) + with self.assertRaises(ArchivistNotFoundError): + entity = self.arch.get_by_signature("path/path", "things", {"field1": "value1"}) + + @mock.patch('requests.get') + def test_get_by_signature_duplicate(self, mock_get): + ''' + Test default get_by_signature method + ''' + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": "value1", + }, + { + "field1": "value1", + }, + ], + ) + with self.assertRaises(ArchivistDuplicateError): + entity = self.arch.get_by_signature("path/path", "things", {"field1": "value1"}) + + @mock.patch('requests.get') + def test_get_by_signature_with_bad_field(self, mock_get): + ''' + Test default list method with error + ''' + mock_get.return_value = MockResponse( + 200, + things=[ + { + "field1": "value1", + }, + ], + ) + with self.assertRaises(ArchivistBadFieldError): + entity = self.arch.get_by_signature("path/path", "badthings", {"field1": "value1"}) diff --git a/unittests/testassets.py b/unittests/testassets.py new file mode 100644 index 00000000..5b99b4f3 --- /dev/null +++ b/unittests/testassets.py @@ -0,0 +1,680 @@ +''' +Test archivist +''' + +import json + +from unittest import TestCase, mock + +from archivist.archivist import Archivist +from archivist.assets import DEFAULT_PAGE_SIZE +from archivist import confirm +from archivist.constants import ( + ASSETS_LABEL, + ASSETS_SUBPATH, + HEADERS_REQUEST_TOTAL_COUNT, + HEADERS_TOTAL_COUNT, + ROOT, +) +from archivist.errors import ArchivistUnconfirmedError + +from .mock_response import MockResponse + +# pylint: disable=missing-docstring +# pylint: disable=unnecessary-comprehension +# pylint: disable=unused-variable + + +BEHAVIOURS = [ + "Firmware", + "Maintenance", + "RecordEvidence", "LocationUpdate", "Attachments", +] +PRIMARY_IMAGE = { + "arc_display_name": "arc_primary_image", + "arc_attachment_identity": "blobs/87b1a84c-1c6f-442b-923e-a97516f4d275", + "arc_hash_alg": "SHA256", + "arc_hash_value": "246c316e2cd6971ce5c83a3e61f9880fa6e2f14ae2976ee03500eb282fd03a60" +} +SECONDARY_IMAGE = { + "arc_display_name": "arc_secondary_image", + "arc_attachment_identity": "blobs/87b1a84c-1c6f-442b-923e-a97516f4d275", + "arc_hash_alg": "SHA256", + "arc_hash_value": "246c316e2cd6971ce5c83a3e61f9880fa6e2f14ae2976ee03500eb282fd03a60" +} +TERTIARY_IMAGE = { + "arc_attachment_identity": "blobs/87b1a84c-1c6f-442b-923e-a97516f4d275", + "arc_hash_alg": "SHA256", + "arc_hash_value": "246c316e2cd6971ce5c83a3e61f9880fa6e2f14ae2976ee03500eb282fd03a60" +} +ASSET_NAME = "tcl.ppj.003" +BASE_ATTRS = { + "arc_firmware_version": "1.0", + "arc_serial_number": "vtl-x4-07", + "arc_description": "Traffic flow control light at A603 North East", + "arc_home_location_identity": "locations/115340cf-f39e-4d43-a2ee-8017d672c6c6", + "arc_display_type": "Traffic light with violation camera", + "some_custom_attribute": "value", +} +ATTRS_WITH_NAME = { + **BASE_ATTRS, + "arc_display_name": ASSET_NAME, +} + +ATTRS = { + **ATTRS_WITH_NAME, + "arc_attachments": [ + TERTIARY_IMAGE, + SECONDARY_IMAGE, + PRIMARY_IMAGE, + ], +} +# also has no arc_display_name +ATTRS_NO_ATTACHMENTS = { + **BASE_ATTRS, +} + +IDENTITY = f'{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' +SUBPATH = f"{ASSETS_SUBPATH}/{ASSETS_LABEL}" + +# TBD: add properties as well +REQUEST = { + 'behaviours': BEHAVIOURS, + 'attributes': ATTRS, +} +REQUEST_DATA = json.dumps(REQUEST) + +RESPONSE = { + 'identity': IDENTITY, + 'behaviours': BEHAVIOURS, + 'attributes': ATTRS, + 'confirmation_status': 'CONFIRMED', +} + +RESPONSE_NO_ATTACHMENTS = { + 'identity': IDENTITY, + 'behaviours': BEHAVIOURS, + 'attributes': ATTRS_NO_ATTACHMENTS, + 'confirmation_status': 'CONFIRMED', +} +RESPONSE_NO_CONFIRMATION = { + 'identity': IDENTITY, + 'behaviours': BEHAVIOURS, + 'attributes': ATTRS, +} +RESPONSE_PENDING = { + 'identity': IDENTITY, + 'behaviours': BEHAVIOURS, + 'attributes': ATTRS, + 'confirmation_status': 'PENDING', +} +RESPONSE_FAILED = { + 'identity': IDENTITY, + 'behaviours': BEHAVIOURS, + 'attributes': ATTRS, + 'confirmation_status': 'FAILED', +} + + +class TestAssets(TestCase): + ''' + Test Archivist Assets Create method + ''' + maxDiff = None + + def setUp(self): + self.arch = Archivist("url", auth="authauthauth") + self.confirm_MAX_TIME = confirm.MAX_TIME + confirm.MAX_TIME = 2 + + def tearDown(self): + confirm.MAX_TIME = self.confirm_MAX_TIME + + @mock.patch('requests.post') + def test_assets_create(self, mock_post): + ''' + Test asset creation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + + asset = self.arch.assets.create(BEHAVIOURS, ATTRS, confirm=False) + self.assertEqual( + tuple(mock_post.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}" + ), + ), + { + "data": REQUEST_DATA, + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="CREATE method called incorrectly", + ) + self.assertEqual( + asset, + RESPONSE, + msg="CREATE method called incorrectly", + ) + self.assertEqual( + asset.primary_image, + PRIMARY_IMAGE, + msg="Incorrect primary image", + ) + self.assertEqual( + asset.name, + ASSET_NAME, + msg="Incorrect name property", + ) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_assets_create_with_confirmation(self, mock_post, mock_get): + ''' + Test asset creation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.return_value = MockResponse(200, **RESPONSE) + asset = self.arch.assets.create(BEHAVIOURS, ATTRS, confirm=True) + self.assertEqual( + asset, + RESPONSE, + msg="CREATE method called incorrectly", + ) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_assets_create_with_confirmation_no_confirmed_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.return_value = MockResponse(200, **RESPONSE_NO_CONFIRMATION) + + with self.assertRaises(ArchivistUnconfirmedError): + asset = self.arch.assets.create(BEHAVIOURS, ATTRS, confirm=True) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_assets_create_with_confirmation_pending_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.side_effect =[ + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE), + ] + asset = self.arch.assets.create(BEHAVIOURS, ATTRS, confirm=True) + self.assertEqual( + asset, + RESPONSE, + msg="CREATE method called incorrectly", + ) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_assets_create_with_confirmation_failed_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.side_effect =[ + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_FAILED), + ] + with self.assertRaises(ArchivistUnconfirmedError): + asset = self.arch.assets.create(BEHAVIOURS, ATTRS, confirm=True) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_assets_create_with_confirmation_always_pending_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.side_effect =[ + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + ] + with self.assertRaises(ArchivistUnconfirmedError): + asset = self.arch.assets.create(BEHAVIOURS, ATTRS, confirm=True) + + @mock.patch('requests.get') + def test_assets_read_with_out_primary_image(self, mock_get): + ''' + Test asset reading + ''' + mock_get.return_value = MockResponse(200, **RESPONSE_NO_ATTACHMENTS) + + asset = self.arch.assets.read(IDENTITY) + self.assertEqual( + asset, + RESPONSE_NO_ATTACHMENTS, + msg="READ method called incorrectly", + ) + self.assertIsNone( + asset.primary_image, + msg="There should be no primary image", + ) + self.assertIsNone( + asset.name, + msg="There should be no name property", + ) + + @mock.patch('requests.get') + def test_assets_count(self, mock_get): + ''' + Test asset counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + assets=[ + RESPONSE, + ], + ) + + count = self.arch.assets.count() + self.assertEqual( + count, + 1, + msg="Incorrect count", + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + "?page_size=1" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_assets_count_with_props_query(self, mock_get): + ''' + Test asset counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + assets=[ + RESPONSE, + ], + ) + + count = self.arch.assets.count( + props={'confirmation_status': 'CONFIRMED', }, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + "?page_size=1" + "&confirmation_status=CONFIRMED" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_assets_count_with_attrs_query(self, mock_get): + ''' + Test asset counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + assets=[ + RESPONSE, + ], + ) + + count = self.arch.assets.count( + attrs={"arc_firmware_version": "1.0"}, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + "?page_size=1" + "&attributes.arc_firmware_version=1.0" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_assets_wait_for_confirmed(self, mock_get): + ''' + Test asset counting + ''' + ## last call to get looks for FAILED assets + status = ('PENDING', 'PENDING', 'FAILED') + mock_get.side_effect =[ + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 0}, + assets=[], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 0}, + assets=[], + ), + ] + + self.arch.assets.wait_for_confirmed() + for i, a in enumerate(mock_get.call_args_list): + self.assertEqual( + tuple(a), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + "?page_size=1" + f"&confirmation_status={status[i]}" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_assets_wait_for_confirmed_timeout(self, mock_get): + ''' + Test asset counting + ''' + ## last call to get looks for FAILED assets + mock_get.side_effect =[ + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + ] + + self.arch.assets.timeout = 3 + with self.assertRaises(ArchivistUnconfirmedError): + self.arch.assets.wait_for_confirmed() + + @mock.patch('requests.get') + def test_assets_wait_for_confirmed_failed(self, mock_get): + ''' + Test asset counting + ''' + ## last call to get looks for FAILED assets + mock_get.side_effect =[ + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 0}, + assets=[], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + assets=[ + RESPONSE_FAILED, + ], + ), + ] + + with self.assertRaises(ArchivistUnconfirmedError): + self.arch.assets.wait_for_confirmed() + + @mock.patch('requests.get') + def test_assets_list(self, mock_get): + ''' + Test asset listing + ''' + mock_get.return_value = MockResponse( + 200, + assets=[ + RESPONSE, + ], + ) + + listing = self.arch.assets.list() + assets = [a for a in listing] + self.assertEqual( + len(assets), + 1, + msg="incorrect number of assets", + ) + for asset in assets: + self.assertEqual( + asset, + RESPONSE, + msg="Incorrect asset listed", + ) + + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/{SUBPATH}?page_size={DEFAULT_PAGE_SIZE}", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_assets_list_with_query(self, mock_get): + ''' + Test asset listing + ''' + mock_get.return_value = MockResponse( + 200, + assets=[ + RESPONSE, + ], + ) + + listing = self.arch.assets.list( + props={'confirmation_status': 'CONFIRMED', }, + attrs={"arc_firmware_version": "1.0"}, + ) + assets = [a for a in listing] + self.assertEqual( + len(assets), + 1, + msg="incorrect number of assets", + ) + for asset in assets: + self.assertEqual( + asset, + RESPONSE, + msg="Incorrect asset listed", + ) + + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + f"?page_size={DEFAULT_PAGE_SIZE}" + "&attributes.arc_firmware_version=1.0" + "&confirmation_status=CONFIRMED" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_assets_read_by_signature(self, mock_get): + ''' + Test asset listing + ''' + mock_get.return_value = MockResponse( + 200, + assets=[ + RESPONSE, + ], + ) + + asset = self.arch.assets.read_by_signature() + self.assertEqual( + asset, + RESPONSE, + msg="Incorrect asset listed", + ) + + self.assertEqual( + tuple(mock_get.call_args), + ( + (f"url/{ROOT}/{SUBPATH}?page_size=2", ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) diff --git a/unittests/testattachments.py b/unittests/testattachments.py new file mode 100644 index 00000000..da20f292 --- /dev/null +++ b/unittests/testattachments.py @@ -0,0 +1,159 @@ +''' +Test archivist +''' + +from io import BytesIO +import json +from unittest import TestCase, mock + +from archivist.archivist import Archivist +from archivist.constants import ROOT, ATTACHMENTS_SUBPATH, ATTACHMENTS_LABEL + +from .mock_response import MockResponse + + +PROPS = { + "hash": { + "alg": "SHA256", + "value": "xxxxxxxxxxxxxxxxxxxxxxx" + }, + "mime_type": "image/jpeg", + "timestamp_accepted": "2019-11-07T15:31:49Z", + "size": 31424, +} +IDENTITY = f'{ATTACHMENTS_LABEL}/xxxxxxxx' +SUBPATH = f'{ATTACHMENTS_SUBPATH}/{ATTACHMENTS_LABEL}' + +RESPONSE = { + **PROPS, + 'identity': IDENTITY, +} +REQUEST_DATA = json.dumps(PROPS) + + +class TestAttachments(TestCase): + ''' + Test Archivist Attachments Create method + ''' + maxDiff = None + + def setUp(self): + self.arch = Archivist("url", auth="authauthauth") + self.mockstream = BytesIO(b"somelongstring") + + @mock.patch('requests.post') + def test_attachments_upload(self, mock_post): + ''' + Test attachment upload + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + + attachment = self.arch.attachments.upload(self.mockstream) + args, kwargs = mock_post.call_args + self.assertEqual( + args, + (f'url/{ROOT}/{SUBPATH}',), + msg="UPLOAD method called incorrectly", + ) + self.assertTrue( + 'headers' in kwargs, + msg="UPLOAD no headers found", + ) + headers = kwargs['headers'] + self.assertTrue( + 'authorization' in headers, + msg="UPLOAD no authorization found", + ) + self.assertEqual( + headers['authorization'], + 'Bearer authauthauth', + msg="UPLOAD incorrect authorization", + ) + self.assertTrue( + headers['content-type'].startswith('multipart/form-data;'), + msg="UPLOAD incorrect content-type", + ) + self.assertTrue( + kwargs['verify'], + msg="UPLOAD method called incorrectly", + ) + self.assertIsNone( + kwargs['cert'], + msg="UPLOAD method called incorrectly", + ) + self.assertTrue( + 'data' in kwargs, + msg="UPLOAD no data found", + ) + fields = kwargs['data'].fields + self.assertTrue( + 'file' in fields, + msg="UPLOAD no file found", + ) + self.assertEqual( + fields['file'][0], + 'filename', + msg="UPLOAD incorrect filename", + ) + self.assertEqual( + fields['file'][2], + 'image/jpg', + msg="UPLOAD incorrect filetype", + ) + self.assertEqual( + attachment, + RESPONSE, + msg="UPLOAD method called incorrectly", + ) + + @mock.patch('requests.get') + def test_attachments_download(self, mock_get): + ''' + Test attachment download + ''' + def iter_content(): + i = 0 + + def filedata(chunk_size=4096): # pylint: disable=unused-argument + nonlocal i + while i < 4: + i += 1 + + if i == 2: + yield None + + yield b"chunkofbytes" + + return filedata + + mock_get.return_value = MockResponse( + 200, + iter_content=iter_content(), + **RESPONSE, + ) + with BytesIO() as fd: + attachment = self.arch.attachments.download(IDENTITY, fd) + args, kwargs = mock_get.call_args + self.assertEqual( + args, + (f'url/{ROOT}/{ATTACHMENTS_SUBPATH}/{IDENTITY}',), + msg="DOWNLOAD method called incorrectly", + ) + self.assertEqual( + kwargs, + { + 'cert': None, + 'headers': { + 'authorization': 'Bearer authauthauth', + 'content-type': 'application/json', + }, + 'stream': True, + 'verify': True, + }, + msg="DOWNLOAD method called incorrectly", + ) + self.assertEqual( + attachment, + RESPONSE, + msg="DOWNLOAD method called incorrectly", + ) diff --git a/unittests/testerrors.py b/unittests/testerrors.py new file mode 100644 index 00000000..4d4cee63 --- /dev/null +++ b/unittests/testerrors.py @@ -0,0 +1,225 @@ +''' +Test archivist +''' + +# pylint: disable=attribute-defined-outside-init +# pylint: disable=missing-docstring +# pylint: disable=too-few-public-methods + +import json + +from unittest import TestCase + +from archivist.errors import ( + parse_response, + Archivist4xxError, + Archivist5xxError, + ArchivistBadRequestError, + ArchivistError, + ArchivistForbiddenError, + ArchivistNotFoundError, + ArchivistNotImplementedError, + ArchivistUnauthenticatedError, + ArchivistUnavailableError, +) + +from .mock_response import MockResponse + + +class TestErrors(TestCase): + ''' + Test exceptions for archivist + ''' + def test_errors_200(self): + ''' + Test errors + ''' + response = MockResponse(200) + error = parse_response(response) + self.assertEqual( + error, + None, + msg="error should be None", + ) + + def test_errors_300(self): + ''' + Test errors + ''' + response = MockResponse(300) + error = parse_response(response) + self.assertEqual( + error, + None, + msg="error should be None", + ) + + def test_errors_400(self): + ''' + Test errors + ''' + response = MockResponse(400, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(ArchivistBadRequestError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (400)', + msg="incorrect error", + ) + + def test_errors_401(self): + ''' + Test errors + ''' + response = MockResponse(401, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(ArchivistUnauthenticatedError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (401)', + msg="incorrect error", + ) + + def test_errors_403(self): + ''' + Test errors + ''' + response = MockResponse(403, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(ArchivistForbiddenError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (403)', + ) + + def test_errors_404(self): + ''' + Test errors + ''' + class Object: + pass + + request = Object() + request.body = json.dumps({'identity': 'entity/xxxxx'}) + response = MockResponse( + 404, + request=request, + error="some error", + ) + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(ArchivistNotFoundError) as ex: + raise error + self.assertEqual( + str(ex.exception), + 'entity/xxxxx not found (404)', + msg="incorrect error", + ) + + def test_errors_4xx(self): + ''' + Test errors + ''' + response = MockResponse(405, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(Archivist4xxError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (405)', + msg="incorrect error", + ) + + def test_errors_500(self): + ''' + Test errors + ''' + response = MockResponse(500, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(Archivist5xxError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (500)', + msg="incorrect error", + ) + + def test_errors_501(self): + ''' + Test errors + ''' + response = MockResponse(501, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(ArchivistNotImplementedError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (501)', + msg="incorrect error", + ) + + def test_errors_503(self): + ''' + Test errors + ''' + response = MockResponse(503, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(ArchivistUnavailableError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (503)', + msg="incorrect error", + ) + + def test_errors_600(self): + ''' + Test errors + ''' + response = MockResponse(600, error="some error") + error = parse_response(response) + self.assertIsNotNone( + error, + msg="error should not be None", + ) + with self.assertRaises(ArchivistError) as ex: + raise error + self.assertEqual( + str(ex.exception), + '{"error": "some error"} (600)', + msg="incorrect error", + ) diff --git a/unittests/testevents.py b/unittests/testevents.py new file mode 100644 index 00000000..afd78199 --- /dev/null +++ b/unittests/testevents.py @@ -0,0 +1,885 @@ +''' +Test archivist +''' + +import json +from unittest import TestCase, mock + +from archivist.archivist import Archivist +from archivist import confirm +from archivist.constants import ( + ROOT, + ASSETS_LABEL, + ASSETS_WILDCARD, + ASSETS_SUBPATH, + ATTACHMENTS_LABEL, + EVENTS_LABEL, + HEADERS_REQUEST_TOTAL_COUNT, + HEADERS_TOTAL_COUNT, +) +from archivist.errors import ArchivistUnconfirmedError +from archivist.events import Event, DEFAULT_PAGE_SIZE + +from .mock_response import MockResponse + +# pylint: disable=missing-docstring +# pylint: disable=unnecessary-comprehension +# pylint: disable=unused-variable + +ASSET_ID = f"{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + +PRIMARY_IMAGE = { + "arc_attachment_identity": f"{ATTACHMENTS_LABEL}/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + "arc_display_name": "an attachment 2", + "arc_hash_value": "042aea10a0f14f2d391373599be69d53a75dde9951fc3d3cd10b6100aa7a9f24", + "arc_hash_alg": "sha256", +} +SECONDARY_IMAGE = { + "arc_attachment_identity": f"{ATTACHMENTS_LABEL}/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + "arc_display_name": "an attachment 1", + "arc_hash_value": "jnwpjocoqsssnundwlqalsqiiqsqp;lpiwpldkndwwlskqaalijopjkokkkojijl", + "arc_hash_alg": "sha256", +} +PRINCIPAL_DECLARED = { + "issuer": "idp.synsation.io/1234", + "subject": "phil.b", + "email": "phil.b@synsation.io", + "display_name": "Declared", +} +PRINCIPAL_ACCEPTED = { + "issuer": "idp.synsation.io/1234", + "subject": "phil.b", + "email": "phil.b@synsation.io", + "display_name": "Accepted", +} +PROPS = { + "operation": "Attach", + "behaviour": "Attachments", + "timestamp_declared": "2019-11-27T14:44:19Z", + "principal_accepted": PRINCIPAL_ACCEPTED, +} +PROPS_WITH_NO_TIMESTAMP = { + "operation": "Attach", + "behaviour": "Attachments", + "principal_accepted": PRINCIPAL_ACCEPTED, +} +PROPS_WITH_TIMESTAMP_ACCEPTED = { + "operation": "Attach", + "behaviour": "Attachments", + "timestamp_accepted": "2021-04-08T14:44:19Z", + "principal_accepted": PRINCIPAL_ACCEPTED, +} +PROPS_WITH_PRINCIPAL_DECLARED = { + "operation": "Attach", + "behaviour": "Attachments", + "timestamp_declared": "2019-11-27T14:44:19Z", + "principal_declared": PRINCIPAL_DECLARED, +} +PROPS_WITH_NO_PRINCIPAL = { + "operation": "Attach", + "behaviour": "Attachments", + "timestamp_declared": "2019-11-27T14:44:19Z", +} + +EVENT_ATTRS = { + "arc_append_attachments": [ + SECONDARY_IMAGE, + PRIMARY_IMAGE, + ], +} +ASSET_ATTRS = { + "external_container": 'assets/xxxx', +} + +IDENTITY = f'{ASSET_ID}/{EVENTS_LABEL}/xxxxxxxxxxxxxxxxxxxx' + +REQUEST = { + **PROPS, + 'event_attributes': EVENT_ATTRS, +} +REQUEST_DATA = json.dumps(REQUEST) + +REQUEST_WITH_ASSET_ATTRS = { + **REQUEST, + 'asset_attributes': ASSET_ATTRS, +} +REQUEST_DATA_WITH_ASSET_ATTRS = json.dumps(REQUEST_WITH_ASSET_ATTRS) + +REQUEST_WITH_NO_PRINCIPAL = { + **PROPS_WITH_NO_PRINCIPAL, + 'event_attributes': EVENT_ATTRS, +} +REQUEST_WITH_NO_PRINCIPAL_DATA = json.dumps(REQUEST_WITH_NO_PRINCIPAL) + +RESPONSE = { + **PROPS, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, + 'confirmation_status': 'CONFIRMED', +} +RESPONSE_WITH_ASSET_ATTRS = { + **RESPONSE, + 'asset_attributes': ASSET_ATTRS, +} +RESPONSE_NO_CONFIRMATION = { + **PROPS, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, +} +RESPONSE_PENDING = { + **PROPS, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, + 'confirmation_status': 'PENDING', +} +RESPONSE_FAILED = { + **PROPS, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, + 'confirmation_status': 'FAILED', +} +RESPONSE_WITH_NO_TIMESTAMP = { + **PROPS_WITH_NO_TIMESTAMP, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, + 'confirmation_status': 'CONFIRMED', +} +RESPONSE_WITH_TIMESTAMP_ACCEPTED = { + **PROPS_WITH_TIMESTAMP_ACCEPTED, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, + 'confirmation_status': 'CONFIRMED', +} + +RESPONSE_WITH_PRINCIPAL_DECLARED = { + **PROPS_WITH_PRINCIPAL_DECLARED, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, + 'confirmation_status': 'CONFIRMED', +} + +RESPONSE_WITH_NO_PRINCIPAL = { + **PROPS_WITH_NO_PRINCIPAL, + 'identity': IDENTITY, + 'event_attributes': EVENT_ATTRS, + 'confirmation_status': 'CONFIRMED', +} + + +class TestEvent(TestCase): + ''' + Test Archivist Events Create method + ''' + maxDiff = None + + def test_event_who_accepted(self): + event = Event(**RESPONSE) + self.assertEqual( + event.who, + "Accepted", + msg="Incorrect who", + ) + + def test_event_who_none(self): + event = Event(**RESPONSE_WITH_NO_PRINCIPAL) + self.assertEqual( + event.who, + None, + msg="who should be None", + ) + + def test_event_who_declared(self): + event = Event(**RESPONSE_WITH_PRINCIPAL_DECLARED) + self.assertEqual( + event.who, + "Declared", + msg="Incorrect who", + ) + + def test_event_when_declared(self): + event = Event(**RESPONSE) + self.assertEqual( + event.when, + "2019-11-27T14:44:19Z", + msg="Incorrect when", + ) + + def test_event_when_accepted(self): + event = Event(**RESPONSE_WITH_TIMESTAMP_ACCEPTED) + self.assertEqual( + event.when, + "2021-04-08T14:44:19Z", + msg="Incorrect when", + ) + + def test_event_when_none(self): + event = Event(**RESPONSE_WITH_NO_TIMESTAMP) + self.assertEqual( + event.when, + None, + msg="Incorrect when", + ) + + +class TestEvents(TestCase): + ''' + Test Archivist Events Create method + ''' + maxDiff = None + + def setUp(self): + self.arch = Archivist("url", auth="authauthauth") + self.confirm_MAX_TIME = confirm.MAX_TIME + confirm.MAX_TIME = 2 + + def tearDown(self): + confirm.MAX_TIME = self.confirm_MAX_TIME + + @mock.patch('requests.post') + def test_events_create(self, mock_post): + ''' + Test event creation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + + event = self.arch.events.create(ASSET_ID, PROPS, EVENT_ATTRS, confirm=False) + self.assertEqual( + tuple(mock_post.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + ), + ), + { + "data": REQUEST_DATA, + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="CREATE method called incorrectly", + ) + self.assertEqual( + event, + RESPONSE, + msg="CREATE method called incorrectly", + ) + + @mock.patch('requests.post') + def test_events_create_with_asset_attrs(self, mock_post): + ''' + Test event creation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE_WITH_ASSET_ATTRS) + + event = self.arch.events.create( + ASSET_ID, + PROPS, + EVENT_ATTRS, + asset_attrs=ASSET_ATTRS, + confirm=False, + ) + self.assertEqual( + tuple(mock_post.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + ), + ), + { + "data": REQUEST_DATA_WITH_ASSET_ATTRS, + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="CREATE method called incorrectly", + ) + self.assertEqual( + event, + RESPONSE_WITH_ASSET_ATTRS, + msg="CREATE method called incorrectly", + ) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_events_create_with_confirmation(self, mock_post, mock_get): + ''' + Test event creation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.return_value = MockResponse(200, **RESPONSE) + + event = self.arch.events.create(ASSET_ID, PROPS, EVENT_ATTRS, confirm=True) + self.assertEqual( + event, + RESPONSE, + msg="CREATE method called incorrectly", + ) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_events_create_with_confirmation_no_confirmed_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.return_value = MockResponse(200, **RESPONSE_NO_CONFIRMATION) + + with self.assertRaises(ArchivistUnconfirmedError): + event = self.arch.events.create(ASSET_ID, PROPS, EVENT_ATTRS, confirm=True) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_events_create_with_confirmation_pending_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.side_effect =[ + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE), + ] + event = self.arch.events.create(ASSET_ID, PROPS, EVENT_ATTRS, confirm=True) + self.assertEqual( + event, + RESPONSE, + msg="CREATE method called incorrectly", + ) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_events_create_with_confirmation_failed_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.side_effect =[ + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_FAILED), + ] + with self.assertRaises(ArchivistUnconfirmedError): + event = self.arch.events.create(ASSET_ID, PROPS, EVENT_ATTRS, confirm=True) + + @mock.patch('requests.get') + @mock.patch('requests.post') + def test_events_create_with_confirmation_always_pending_status( + self, + mock_post, + mock_get, + ): + ''' + Test asset confirmation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + mock_get.side_effect =[ + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + MockResponse(200, **RESPONSE_PENDING), + ] + self.arch.events.timeout = 5 + with self.assertRaises(ArchivistUnconfirmedError): + event = self.arch.events.create(ASSET_ID, PROPS, EVENT_ATTRS, confirm=True) + + @mock.patch('requests.get') + def test_events_read(self, mock_get): + ''' + Test event counting + ''' + mock_get.return_value = MockResponse(200, **RESPONSE) + + event = self.arch.events.read(IDENTITY) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + self.assertEqual( + event, + RESPONSE, + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_read_with_no_principal(self, mock_get): + ''' + Test event counting + ''' + mock_get.return_value = MockResponse(200, **RESPONSE) + + event = self.arch.events.read(IDENTITY) + self.assertEqual( + event, + RESPONSE, + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_count(self, mock_get): + ''' + Test event counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + events=[ + RESPONSE, + ], + ) + + count = self.arch.events.count(asset_id=ASSET_ID) + self.assertEqual( + count, + 1, + msg="Incorrect count", + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + "?page_size=1" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_count_with_props_query(self, mock_get): + ''' + Test event counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + events=[ + RESPONSE, + ], + ) + + count = self.arch.events.count( + asset_id=ASSET_ID, + props={'confirmation_status': 'CONFIRMED', }, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + "?page_size=1" + "&confirmation_status=CONFIRMED" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_count_with_attrs_query(self, mock_get): + ''' + Test event counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + events=[ + RESPONSE, + ], + ) + + count = self.arch.events.count( + asset_id=ASSET_ID, + attrs={"arc_firmware_version": "1.0"}, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + "?page_size=1" + "&event_attributes.arc_firmware_version=1.0" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_count_with_wildcard_asset(self, mock_get): + ''' + Test event counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + events=[ + RESPONSE, + ], + ) + + count = self.arch.events.count( + attrs={"arc_firmware_version": "1.0"}, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_WILDCARD}" + f"/{EVENTS_LABEL}" + "?page_size=1" + "&event_attributes.arc_firmware_version=1.0" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_wait_for_confirmed(self, mock_get): + ''' + Test event counting + ''' + ## last call to get looks for FAILED assets + status = ('PENDING', 'PENDING', 'FAILED') + mock_get.side_effect =[ + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 2}, + assets=[ + RESPONSE_PENDING, + ], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 0}, + assets=[], + ), + MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 0}, + assets=[], + ), + ] + + self.arch.events.wait_for_confirmed() + for i, a in enumerate(mock_get.call_args_list): + self.assertEqual( + tuple(a), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_WILDCARD}" + f"/{EVENTS_LABEL}" + "?page_size=1" + f"&confirmation_status={status[i]}" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_list(self, mock_get): + ''' + Test event listing + ''' + mock_get.return_value = MockResponse( + 200, + events=[ + RESPONSE, + ], + ) + + listing = self.arch.events.list(asset_id=ASSET_ID) + events = [a for a in listing] + self.assertEqual( + len(events), + 1, + msg="incorrect number of events", + ) + for event in events: + self.assertEqual( + event, + RESPONSE, + msg="Incorrect event listed", + ) + + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + f"?page_size={DEFAULT_PAGE_SIZE}" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + + ) + + @mock.patch('requests.get') + def test_events_list_with_query(self, mock_get): + ''' + Test event listing + ''' + mock_get.return_value = MockResponse( + 200, + events=[ + RESPONSE, + ], + ) + + listing = self.arch.events.list( + asset_id=ASSET_ID, + props={'confirmation_status': 'CONFIRMED', }, + attrs={"arc_firmware_version": "1.0"}, + ) + events = [a for a in listing] + self.assertEqual( + len(events), + 1, + msg="incorrect number of events", + ) + for event in events: + self.assertEqual( + event, + RESPONSE, + msg="Incorrect event listed", + ) + + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + f"?page_size={DEFAULT_PAGE_SIZE}" + "&confirmation_status=CONFIRMED" + "&event_attributes.arc_firmware_version=1.0" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_list_with_wildcard_asset(self, mock_get): + ''' + Test event listing + ''' + mock_get.return_value = MockResponse( + 200, + events=[ + RESPONSE, + ], + ) + + listing = self.arch.events.list( + props={'confirmation_status': 'CONFIRMED', }, + attrs={"arc_firmware_version": "1.0"}, + ) + events = [a for a in listing] + self.assertEqual( + len(events), + 1, + msg="incorrect number of events", + ) + for event in events: + self.assertEqual( + event, + RESPONSE, + msg="Incorrect event listed", + ) + + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_WILDCARD}" + f"/{EVENTS_LABEL}" + f"?page_size={DEFAULT_PAGE_SIZE}" + "&confirmation_status=CONFIRMED" + "&event_attributes.arc_firmware_version=1.0" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_events_read_by_signature(self, mock_get): + ''' + Test event listing + ''' + mock_get.return_value = MockResponse( + 200, + events=[ + RESPONSE, + ], + ) + + event = self.arch.events.read_by_signature(asset_id=ASSET_ID) + self.assertEqual( + event, + RESPONSE, + msg="Incorrect event listed", + ) + + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{ASSETS_SUBPATH}" + f"/{ASSETS_LABEL}/xxxxxxxxxxxxxxxxxxxx" + f"/{EVENTS_LABEL}" + f"?page_size=2" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) diff --git a/unittests/testlocations.py b/unittests/testlocations.py new file mode 100644 index 00000000..eea1fe3d --- /dev/null +++ b/unittests/testlocations.py @@ -0,0 +1,383 @@ +''' +Test archivist +''' + +import json +from unittest import TestCase, mock + +from archivist.archivist import Archivist +from archivist.constants import ( + ROOT, + HEADERS_REQUEST_TOTAL_COUNT, + HEADERS_TOTAL_COUNT, + LOCATIONS_SUBPATH, + LOCATIONS_LABEL, +) +from archivist.errors import ArchivistBadRequestError +from archivist.locations import DEFAULT_PAGE_SIZE + +from .mock_response import MockResponse + + +# pylint: disable=missing-docstring +# pylint: disable=unnecessary-comprehension +# pylint: disable=unused-variable + +PROPS = { + "display_name": "Macclesfield, Cheshire", + "description": "Manufacturing site, North West England, Macclesfield, Cheshire", + "latitude": "53.2546799", + "longitude": "-2.1213956,14.54", +} +ATTRS = { + "director": "John Smith", + "address": "Bridgewater, Somerset", + "facility_type": "Manufacture", + "support_email": "support@macclesfield.com", + "support_phone": "123 456 789" +} + +IDENTITY = f'{LOCATIONS_LABEL}/xxxxxxxx' +SUBPATH = f'{LOCATIONS_SUBPATH}/{LOCATIONS_LABEL}' + +RESPONSE = { + **PROPS, + 'identity': IDENTITY, + 'attributes': ATTRS, +} +REQUEST = { + **PROPS, + 'attributes': ATTRS, +} +REQUEST_DATA = json.dumps(REQUEST) + + +class TestLocations(TestCase): + ''' + Test Archivist Locations Create method + ''' + maxDiff = None + + def setUp(self): + self.arch = Archivist("url", auth="authauthauth") + + @mock.patch('requests.post') + def test_locations_create(self, mock_post): + ''' + Test location creation + ''' + mock_post.return_value = MockResponse(200, **RESPONSE) + + location = self.arch.locations.create(PROPS, attrs=ATTRS) + self.assertEqual( + tuple(mock_post.call_args), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + ), + ), + { + 'data': REQUEST_DATA, + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="CREATE method called incorrectly", + ) + self.assertEqual( + location, + RESPONSE, + msg="CREATE method called incorrectly", + ) + + @mock.patch('requests.get') + def test_locations_read(self, mock_get): + ''' + Test asset reading + ''' + mock_get.return_value = MockResponse(200, **RESPONSE) + + asset = self.arch.locations.read(IDENTITY) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{LOCATIONS_SUBPATH}/{IDENTITY}" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_locations_read_with_error(self, mock_get): + ''' + Test read method with error + ''' + mock_get.return_value = MockResponse(400) + with self.assertRaises(ArchivistBadRequestError): + resp = self.arch.locations.read(IDENTITY) + + @mock.patch('requests.get') + def test_locations_count(self, mock_get): + ''' + Test location counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + locations=[ + RESPONSE, + ], + ) + + count = self.arch.locations.count() + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + "?page_size=1" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + self.assertEqual( + count, + 1, + msg="Incorrect count", + ) + + @mock.patch('requests.get') + def test_locations_count_with_props_query(self, mock_get): + ''' + Test location counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + locations=[ + RESPONSE, + ], + ) + + count = self.arch.locations.count( + props={"display_name": "Macclesfield, Cheshire"}, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + "?page_size=1" + "&display_name=Macclesfield, Cheshire" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_locations_count_with_attrs_query(self, mock_get): + ''' + Test location counting + ''' + mock_get.return_value = MockResponse( + 200, + headers={HEADERS_TOTAL_COUNT: 1}, + locations=[ + RESPONSE, + ], + ) + + count = self.arch.locations.count( + attrs={"director": "John Smith"}, + ) + self.assertEqual( + tuple(mock_get.call_args), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + "?page_size=1" + "&attributes.director=John Smith" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + HEADERS_REQUEST_TOTAL_COUNT: 'true', + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_locations_list(self, mock_get): + ''' + Test location listing + ''' + mock_get.return_value = MockResponse( + 200, + locations=[ + RESPONSE, + ], + ) + + listing = self.arch.locations.list() + locations = [a for a in listing] + self.assertEqual( + len(locations), + 1, + msg="incorrect number of locations", + ) + for location in locations: + self.assertEqual( + location, + RESPONSE, + msg="Incorrect location listed", + ) + + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + (f"url/{ROOT}/{SUBPATH}?page_size={DEFAULT_PAGE_SIZE}",), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_locations_list_with_query(self, mock_get): + ''' + Test location listing + ''' + mock_get.return_value = MockResponse( + 200, + locations=[ + RESPONSE, + ], + ) + + listing = self.arch.locations.list( + props={"display_name": "Macclesfield, Cheshire"}, + attrs={"director": "John Smith"}, + ) + locations = [a for a in listing] + self.assertEqual( + len(locations), + 1, + msg="incorrect number of locations", + ) + for location in locations: + self.assertEqual( + location, + RESPONSE, + msg="Incorrect location listed", + ) + + for a in mock_get.call_args_list: + self.assertEqual( + tuple(a), + ( + ( + ( + f"url/{ROOT}/{SUBPATH}" + f"?page_size={DEFAULT_PAGE_SIZE}" + "&attributes.director=John Smith" + "&display_name=Macclesfield, Cheshire" + ), + ), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + ) + + @mock.patch('requests.get') + def test_locations_read_by_signature(self, mock_get): + ''' + Test location read_by_signature + ''' + mock_get.return_value = MockResponse( + 200, + locations=[ + RESPONSE, + ], + ) + + location = self.arch.locations.read_by_signature() + self.assertEqual( + location, + RESPONSE, + msg="Incorrect location listed", + ) + + self.assertEqual( + tuple(mock_get.call_args), + ( + (f"url/{ROOT}/{SUBPATH}?page_size=2",), + { + 'headers': { + 'content-type': 'application/json', + 'authorization': "Bearer authauthauth", + }, + 'verify': True, + 'cert': None, + }, + ), + msg="GET method called incorrectly", + )