diff --git a/.gitignore b/.gitignore index 6ee6dc7a..0cd1154a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,3 @@ -playground.py - .idea/inspectionProfiles/ ### macOS template # General diff --git a/.python-version b/.python-version index fc897a18..0833a98f 100644 --- a/.python-version +++ b/.python-version @@ -1,3 +1 @@ -3.7.3 -3.6.8 -2.7.16 +3.7.4 diff --git a/Makefile b/Makefile index e76fceb5..4152bab7 100644 --- a/Makefile +++ b/Makefile @@ -1,16 +1,20 @@ -PACKAGE := "axonius_api_client" +PACKAGE := axonius_api_client VERSION := $(shell grep __version__ $(PACKAGE)/version.py | cut -d\" -f2) -# FUTURE: write Makefile doc -# FUTURE: add check that only master branch can publish / git tag - .PHONY: build docs +help: + @cat Makefile.help + init: $(MAKE) pip_install_tools $(MAKE) clean $(MAKE) pyenv_init $(MAKE) pipenv_init + $(MAKE) pipenv_install_lint + $(MAKE) pipenv_install_dev + $(MAKE) pipenv_install_docs + $(MAKE) pipenv_install_build pip_install_tools: pip install --quiet --upgrade --requirement requirements-pkg.txt @@ -27,53 +31,58 @@ pipenv_install_build: pipenv_install_docs: pipenv run pip install --quiet --upgrade --requirement docs/requirements.txt -pipenv_clean: - pipenv --rm || true - pipenv_init: pipenv install --dev --skip-lock +pipenv_clean: + pipenv --rm || true + pyenv_init: - pyenv install 3.7.3 -s || true - pyenv install 3.6.8 -s || true - pyenv install 2.7.16 -s || true - pyenv local 3.7.3 3.6.8 2.7.16 || true + pyenv install 3.7.4 -s || true + pyenv local 3.7.4 || true lint: - $(MAKE) pipenv_install_lint - pipenv run which black && black $(PACKAGE) setup.py - pipenv run flake8 --max-line-length 89 $(PACKAGE) setup.py - pipenv run bandit -r . --skip B101 -x playground.py,setup.py + pipenv run isort -rc -y $(PACKAGE) setup.py axonshell*.py + pipenv run which black && pipenv run black $(PACKAGE) setup.py axonshell*.py + pipenv run pydocstyle $(PACKAGE) setup.py axonshell*.py + pipenv run flake8 --max-line-length 89 $(PACKAGE) setup.py axonshell*.py + pipenv run bandit --skip B101 -r $(PACKAGE) test: - $(MAKE) pipenv_install_dev - pipenv run pytest -rA --junitxml=junit-report.xml --cov-config=.coveragerc --cov-report=term --cov-report xml --cov-report=html:cov_html --cov=$(PACKAGE) --showlocals --log-cli-level=INFO --verbose --exitfirst $(PACKAGE)/tests + pipenv run pytest -ra --verbose --junitxml=junit-report.xml --cov-config=.coveragerc --cov-report xml --cov-report=html:cov_html --cov=$(PACKAGE) --showlocals --exitfirst $(PACKAGE)/tests -test_debug: - $(MAKE) pipenv_install_dev - pipenv run pytest -rA --capture=no --showlocals --log-cli-level=DEBUG --verbose --exitfirst $(PACKAGE)/tests +test_dev: + pipenv run pytest -vv --log-cli-level=DEBUG --showlocals --exitfirst $(PACKAGE)/tests + +test_cov_open: + open cov_html/index.html test_clean: - rm -rf .egg .eggs junit-report.xml cov_html .tox .pytest_cache .coverage + rm -rf .egg .eggs junit-report.xml cov_html .tox .pytest_cache .coverage coverage.xml docs: - $(MAKE) pipenv_install_docs + (cd docs && pipenv run make html SPHINXOPTS="-Wna" && cd ..) + +docs_dev: (cd docs && pipenv run make html SPHINXOPTS="-na" && cd ..) + +docs_apigen: + rm -rf docs/api_ref + pipenv run sphinx-apidoc -e -P -M -f -t docs/_templates -o docs/api_ref $(PACKAGE) $(PACKAGE)/tests $(PACKAGE)/cli + +docs_open: open docs/_build/html/index.html docs_coverage: - $(MAKE) pipenv_install_docs (cd docs && pipenv run make coverage && cd ..) cat docs/_build/coverage/python.txt docs_linkcheck: - $(MAKE) pipenv_install_docs (cd docs && pipenv run make linkcheck && cd ..) cat docs/_build/linkcheck/output.txt docs_clean: - $(MAKE) pipenv_install_docs - (cd docs && pipenv run make clean && cd ..) + rm -rf docs/_build git_check: @git diff-index --quiet HEAD && echo "*** REPO IS CLEAN" || (echo "!!! REPO IS DIRTY"; false) @@ -84,15 +93,15 @@ git_tag: @git push --tags @echo "*** ADDED TAG: $(VERSION)" -publish: +pkg_publish: + # FUTURE: add check that only master branch can publish / git tag $(MAKE) lint - $(MAKE) build + $(MAKE) pkg_build $(MAKE) git_check pipenv run twine upload dist/* -build: - $(MAKE) build_clean - $(MAKE) pipenv_install_build +pkg_build: + $(MAKE) pkg_clean @echo "*** Building Source and Wheel (universal) distribution" pipenv run python setup.py sdist bdist_wheel --universal @@ -100,18 +109,19 @@ build: @echo "*** Checking package with twine" pipenv run twine check dist/* -build_clean: +pkg_clean: rm -rf build dist *.egg-info - -clean_files: +files_clean: find . -type d -name "__pycache__" | xargs rm -rf find . -type f -name ".DS_Store" | xargs rm -f find . -type f -name "*.pyc" | xargs rm -f clean: - $(MAKE) clean_files - $(MAKE) build_clean + $(MAKE) files_clean + $(MAKE) pkg_clean $(MAKE) test_clean $(MAKE) docs_clean $(MAKE) pipenv_clean + +# FUTURE: add cov_publish diff --git a/Makefile.help b/Makefile.help new file mode 100644 index 00000000..6c52b7d0 --- /dev/null +++ b/Makefile.help @@ -0,0 +1,43 @@ +# venv +pip_install_tools install requirements-pkg.txt into site-packages +pipenv_install_dev install requirements-dev.txt into venv +pipenv_install_lint install requirements-lint.txt into venv +pipenv_install_build install requirements-build.txt into venv +pipenv_install_docs install docs/requirements.txt into venv +pipenv_init build a venv using pipenv +pipenv_clean remove the venv built by pipenv +pyenv_init setup pyenv with python 3.7.3, 3.6.8, 2.7.16 + +# docs +docs run docs/make html and rebuild ALL docs +docs_dev run docs/make html and only rebuild changed docs +docs_coverage run docs/make coverage +docs_linkcheck run docs/make linkcheck +docs_open open the html docs in a browser +docs_clean make pipenv_install_docs, run docs/make clean + +# git fun +git_check check that the repo is clean and a tag exists for current version +git_tag make a tag for current version and push it + +# packaging +pkg_build make clean, make pipenv_install_build, build the package +pkg_publish make lint, make pkg_build, make git_check, publish the package to pypi +pkg_clean clean up build folders/files + +# testing +lint run black, isort, pydocstyle, flake8, and bandit +test run pytest with coverage reports +test_dev run pytest with logging at debug +test_cov_open open the test coverage html docs in a browser +test_clean clean up test folders/files + +# clean up +files_clean clean up a bunch of files +clean make files_clean, make pkg_clean, make test_clean, make docs_clean, make pipenv_clean + +# other +help this... + +# call me first +init make pip_install_tools, make clean, make py_env, make pipenv_init, make pipenv_install_* diff --git a/README.md b/README.md index b60bcdfe..36cb9330 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,8 @@ Installing axonius_api_client via [pip](https://pypi.org/project/pip/) or [pipen ## Documentation +**With the 2.0 release the docs are in the midst of an overhaul.** + Found [here](https://axonius-api-client.readthedocs.io/en/latest/?) ## Examples diff --git a/axonius_api_client.sublime-project b/axonius_api_client.sublime-project deleted file mode 100644 index b62f2425..00000000 --- a/axonius_api_client.sublime-project +++ /dev/null @@ -1,56 +0,0 @@ -{ - "build_systems": - [ - { - "file_regex": "^[ ]*File \"(...*?)\", line ([0-9]*)", - "name": "Anaconda Python Builder", - "selector": "source.python", - "shell_cmd": "\"/Users/jimbo/.pyenv/shims/python\" -u \"$file\"" - } - ], - "folders": - [ - { - "path": "/gh/axonapi" - } - ], - "settings": - { - "SublimeLinter.linters.flake8.args": - [ - "--max-line-length=89" - ], - "anaconda_linter_phantoms": false, - "anaconda_linter_show_errors_on_save": true, - "pep257": true, - "pep257_ignore": - [ - "D203", - "D209", - "D212", - "D213", - "D405", - "D406", - "D407", - "D408", - "D409", - "D410", - "D411" - ], - "pep8_ignore": - [ - ], - "python_interpreter": "~/.pyenv/shims/python", - "rulers": - [ - 89 - ], - "sublack.black_command": "~/.pyenv/shims/black", - "sublack.black_line_length": null, - "sublack.black_on_save": true, - "sublack.black_skip_string_normalization": false, - "tab_size": 4, - "translate_tabs_to_spaces": true, - "trim_trailing_white_space_on_save": true - } -} diff --git a/axonius_api_client/__init__.py b/axonius_api_client/__init__.py index c8dea534..6b2f4356 100644 --- a/axonius_api_client/__init__.py +++ b/axonius_api_client/__init__.py @@ -1,16 +1,36 @@ # -*- coding: utf-8 -*- """Axonius API Client package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals -from . import api -from . import constants -from . import http -from . import auth -from . import exceptions -from . import version -from . import tools +from . import api, auth, cli, constants, exceptions, http, logs, tools, version +from .api import Adapters, Devices, Enforcements, Users +from .auth import ApiKey +from .connect import Connect +from .http import Http -__all__ = ("api", "constants", "http", "auth", "exceptions", "version", "tools") +__version__ = version.__version__ +LOG = logs.LOG + +__all__ = ( + # Connection handler + "Connect", + # http client + "Http", + # authentication + "ApiKey", + # api + "Users", + "Devices", + "Adapters", + "Enforcements", + # modules + "api", + "auth", + "http", + "exceptions", + "version", + "tools", + "constants", + "cli", + "logs", +) diff --git a/axonius_api_client/api/__init__.py b/axonius_api_client/api/__init__.py index 6f430d23..d62f04dc 100644 --- a/axonius_api_client/api/__init__.py +++ b/axonius_api_client/api/__init__.py @@ -1,556 +1,20 @@ # -*- coding: utf-8 -*- """Axonius API Client package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from . import exceptions -from . import models -from . import routers - - -class Users(models.ApiBase, models.UserDeviceBase): - """User related API methods.""" - - @property - def _router(self): - """Router for this API client. - - Returns: - :obj:`routers.Router` - - """ - return routers.ApiV1.users - - @property - def _default_fields(self): - """Fields to set as default for methods with fields as kwargs. - - Returns: - :obj:`dict` - - """ - return { - "generic": [ - "adapters", - "labels", - "specific_data.data.username", - "specific_data.data.last_seen", - "specific_data.data.mail", - ] - } - - def get_by_name(self, value, **kwargs): - """Get objects by name using paging. - - Args: - value (:obj:`int`): - Value to find using field "specific_data.data.username". - **kwargs: Passed thru to :meth:`models.UserDeviceBase.get_by_field_value` - - Returns: - :obj:`list` of :obj:`dict`: Each row matching name or :obj:`dict` if only1. - - """ - kwargs.setdefault("field", "specific_data.data.username") - kwargs.setdefault("field_adapter", "generic") - kwargs["value"] = value - return self.get_by_field_value(**kwargs) - - def get_by_email(self, value, **kwargs): - """Get objects by email using paging. - - Args: - value (:obj:`int`): - Value to find using field "specific_data.data.mail". - **kwargs: Passed thru to :meth:`models.UserDeviceBase.get_by_field_value` - - Returns: - :obj:`list` of :obj:`dict`: Each row matching email or :obj:`dict` if only1. - - """ - kwargs.setdefault("field", "specific_data.data.mail") - kwargs.setdefault("field_adapter", "generic") - kwargs["value"] = value - return self.get_by_field_value(**kwargs) - - -class Devices(models.ApiBase, models.UserDeviceBase): - """Device related API methods.""" - - @property - def _router(self): - """Router for this API client. - - Returns: - :obj:`routers.Router` - - """ - return routers.ApiV1.devices - - @property - def _default_fields(self): - """Fields to set as default for methods with fields as kwargs. - - Returns: - :obj:`dict` - - """ - return { - "generic": [ - "adapters", - "labels", - "specific_data.data.hostname", - "specific_data.data.network_interfaces.ips", - "specific_data.data.last_seen", - ] - } - - def get_by_name(self, value, **kwargs): - """Get objects by name using paging. - - Args: - value (:obj:`int`): - Value to find using field "specific_data.data.username". - **kwargs: Passed thru to :meth:`models.UserDeviceBase.get_by_field_value` - - Returns: - :obj:`list` of :obj:`dict`: Each row matching name or :obj:`dict` if only1. - - """ - kwargs.setdefault("field", "specific_data.data.hostname") - kwargs.setdefault("field_adapter", "generic") - kwargs["value"] = value - return self.get_by_field_value(**kwargs) - - # TODO: get_by_ip - # TODO: get_by_in_subnet - # TODO: get_by_not_in_subnet - def get_by_mac(self, value, **kwargs): - """Get objects by MAC using paging. - - Args: - value (:obj:`int`): - Value to find using field "specific_data.data.network_interfaces.mac". - **kwargs: Passed thru to :meth:`models.UserDeviceBase.get_by_field_value` - - Returns: - :obj:`list` of :obj:`dict`: Each row matching email or :obj:`dict` if only1. - - """ - kwargs.setdefault("field", "specific_data.data.network_interfaces.mac") - kwargs.setdefault("field_adapter", "generic") - kwargs["value"] = value - return self.get_by_field_value(**kwargs) - - -# FUTURE: needs tests -class Actions(models.ApiBase): - """Action related API methods. - - Notes: - The REST API will need to be updated to allow more power in this library. - Until then, this class should be considered **BETA**. - - """ - - @property - def _router(self): - """Router for this API client. - - Returns: - :obj:`routers.Router` - - """ - return routers.ApiV1.actions - - def get(self): - """Get all actions. - - Returns: - :obj:`list` of :obj:`str` - - """ - return self._request(method="get", path=self._router.root) - - def run(self, name, ids, command): - """Run an action. - - Args: - name (:obj:`str`): - Name of action to run. - ids (:obj:`list` of :obj`str`): - Internal axonius IDs of device to run action against. - command (:obj:`str`): - Command to run. - - - Returns: - :obj:`object` - - """ - data = {} - data["action_name"] = name - data["internal_axon_ids"] = ids - data["command"] = command - return self._request(method="post", path=self._router.shell, json=data) - - # FUTURE: Figure out return. - def deploy(self, name, ids, binary_uuid, binary_filename, params=None): - """Deploy an action. - - Args: - name (:obj:`str`): - Name of action to deploy. - ids (:obj:`list` of :obj`str`): - Internal axonius IDs of device to deploy action against. - binary_uuid (:obj:`str`): - UUID of binary to use in deployment. - binary_filename (:obj:`str`): - Filename of binary to use in deployment. - params (:obj:`str`, optional): - Defaults to: None. - - Returns: - :obj:`object` - - """ - data = {} - data["action_name"] = name - data["internal_axon_ids"] = ids - data["binary"] = {} - data["binary"]["filename"] = binary_filename - data["binary"]["uuid"] = binary_uuid - if params: - data["params"] = params - return self._request(method="post", path=self._router.deploy, json=data) - - def upload_file(self, binary, filename): - """Upload a file to the system for use in deployment. - - Args: - binary (:obj:`io.BytesIO`): - Binary bits of file to upload. - filename (:obj:`str`): - Name of file to upload. - - Returns: - :obj:`str`: UUID of uploaded file. - - """ - data = {} - data["field_name"] = "binary" - files = {} - files["userfile"] = (filename, binary) - return self._request( - method="post", path=self._router.upload_file, data=data, files=files - ) - - -# FUTURE: needs tests -class Adapters(models.ApiBase): - """Adapter related API methods. - - Notes: - The REST API will need to be updated to allow more power in this library. - Until then, this class should be considered **BETA**. - - """ - - @property - def _router(self): - """Router for this API client. - - Returns: - :obj:`routers.Router` - - """ - return routers.ApiV1.adapters - - def get(self): - """Get all adapters. - - Returns: - :obj:`object` - - """ - return self._request(method="get", path=self._router.root) - - # FUTURE: public method - def _check_client(self, name, config, node_id): - """Check connectivity for a client of an adapter. - - Args: - name (:obj:`str`): - Name of adapter to check client connectivity of. - config (:obj:`dict`): - Client configuration. - node_id (:obj:`str`): - Node ID. - - Returns: - :obj:`object` - - """ - data = {} - data.update(config) - data["instanceName"] = node_id - data["oldInstanceName"] = node_id - path = self._router.clients.format(adapter_name=name) - return self._request(method="post", path=path, json=data) - - # FUTURE: public method - def _add_client(self, name, config, node_id): - """Add a client to an adapter. - - Args: - name (:obj:`str`): - Name of adapter to add client to. - config (:obj:`dict`): - Client configuration. - node_id (:obj:`str`): - Node ID. - - Returns: - :obj:`object` - - """ - data = {} - data.update(config) - data["instanceName"] = node_id - path = self._router.clients.format(adapter_name=name) - return self._request(method="put", path=path, json=data) - - # FUTURE: public method - def _delete_client(self, name, id, node_id): - """Delete a client from an adapter. - - Args: - name (:obj:`str`): - Name of adapter to delete client from. - id (:obj:`str`): - ID of client to remove. - node_id (:obj:`str`): - Node ID. - - Returns: - :obj:`object` - - """ - data = {} - data["instanceName"] = node_id - path = self._router.clients.format(adapter_name=name) - path += "/{id}".format(id=id) - return self._request(method="delete", path=path, json=data) - - def upload_file( - self, - adapter_name, - node_id, - binary, - filename, - content_type=None, - field_name="binary", - ): - """Upload a file to the system for use in deployment. - - Args: - binary (:obj:`io.BytesIO`): - Binary bits of file to upload. - filename (:obj:`str`): - Name of file to upload. - - Returns: - :obj:`str`: UUID of uploaded file. - - """ - if content_type: - userfile = (filename, binary, content_type) - else: - userfile = (filename, binary) - - data = {} - data["field_name"] = field_name - files = {} - files["userfile"] = userfile - path = self._router.upload_file.format( - adapter_name=adapter_name, node_id=node_id - ) - return self._request(method="post", path=path, data=data, files=files) - - -# FUTURE: needs tests -class Enforcements(models.ApiBase): - """Enforcement related API methods. - - Notes: - The REST API will need to be updated to allow more power in this library. - Until then, this class should be considered **BETA**. - - """ - - @property - def _router(self): - """Router for this API client. - - Returns: - :obj:`routers.Router` - - """ - return routers.ApiV1.alerts - - def _delete(self, ids): - """Delete objects by internal axonius IDs. - - Args: - ids (:obj:`list` of :obj:`str`): - List of internal axonius IDs of objects to delete. - - Returns: - None - - """ - return self._request(method="delete", path=self._router.root, json=ids) - - # FUTURE: public method - def _create(self, name, main, success=None, failure=None, post=None, triggers=None): - """Create an enforcement. - - Args: - name (:obj:`str`): - Name of new enforcement to create. - main (:obj:`dict`): - Main action to run for this enforcement. - success (:obj:`list` of :obj:`dict`, optional): - Actions to run on success. - - Defaults to: None. - failure (:obj:`list` of :obj:`dict`, optional): - Actions to run on failure. - - Defaults to: None. - post (:obj:`list` of :obj:`dict`, optional): - Actions to run on post. - - Defaults to: None. - triggers (:obj:`list` of :obj:`dict`, optional): - Triggers for this enforcement. - - Defaults to: None. - - Notes: - This will get a public create method once the REST API server has been - updated to expose /enforcements/actions, /api/enforcements/actions/saved, - and others. - - Returns: - :obj:`str`: ID of newly created object. - - """ - data = {} - data["name"] = name - data["actions"] = {} - data["actions"]["main"] = main - data["actions"]["success"] = success or [] - data["actions"]["failure"] = success or [] - data["actions"]["post"] = success or [] - data["triggers"] = triggers or [] - return self._request(method="put", path=self._router.root, json=data) - - def get(self, query=None, row_start=0, page_size=0): - """Get a page for a given query. - - Args: - query (:obj:`str`, optional): - Query to filter rows to return. This is NOT a query built by - the Query Wizard in the GUI. This is something else. See - :meth:`get_by_name` for an example query. Empty - query will return all rows. - - Defaults to: None. - row_start (:obj:`int`, optional): - If not 0, skip N rows in the return. - - Defaults to: 0. - page_size (:obj:`int`, optional): - If not 0, include N rows in the return. - - Defaults to: 0. - - Returns: - :obj:`dict` - - """ - params = {} - - if page_size: - params["limit"] = page_size - - if row_start: - params["skip"] = row_start - - if query: - params["filter"] = query - - response = self._request(method="get", path=self._router.root, params=params) - return response["assets"] - - def delete_by_name(self, name, regex=False, only1=True): - """Delete an enforcement by name. - - Args: - name (:obj:`str`): - Name of object to delete. - regex (:obj:`bool`, optional): - Search for name using regex. - - Defaults to: False. - only1 (:obj:`bool`, optional): - Only allow one match to name. - - Defaults to: True. - - Returns: - :obj:`str`: empty string - - """ - found = self.get_by_name(name=name, regex=regex, only1=True) - ids = [x["uuid"] for x in found] if isinstance(found, list) else [found["uuid"]] - return self._delete(ids=ids) - - def get_by_name(self, name, regex=True, only1=False): - """Get enforcements by name. - - Args: - name (:obj:`str`): - Name of object to get. - regex (:obj:`bool`, optional): - Search for name using regex. - - Defaults to: True. - only1 (:obj:`bool`, optional): - Only allow one match to name. - - Defaults to: True. - - Raises: - :exc:`exceptions.ObjectNotFound` - - Returns: - :obj:`list` of :obj:`dict`: Each row matching name or :obj:`dict` if only1. - - """ - if regex: - query = 'name == regex("{name}", "i")'.format(name=name) - else: - query = 'name == "{name}"'.format(name=name) - - found = self.get(query=query) - - if not found or (len(found) > 1 and only1): - raise exceptions.ObjectNotFound( - value=query, value_type="query", object_type="Alert", exc=None - ) - - return found[0] if only1 else found +from __future__ import absolute_import, division, print_function, unicode_literals + +from . import adapters, enforcements, mixins, routers, users_devices +from .adapters import Adapters +from .enforcements import Enforcements +from .users_devices import Devices, Users + +__all__ = ( + "Users", + "Devices", + "Adapters", + "Enforcements", + "routers", + "users_devices", + "adapters", + "enforcements", + "mixins", +) diff --git a/axonius_api_client/api/adapters.py b/axonius_api_client/api/adapters.py new file mode 100644 index 00000000..00be384a --- /dev/null +++ b/axonius_api_client/api/adapters.py @@ -0,0 +1,1262 @@ +# -*- coding: utf-8 -*- +"""Axonius API module for working with adapters.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import time +import warnings + +from .. import constants, exceptions, tools +from . import mixins, routers + + +class Adapters(mixins.Model, mixins.Mixins): + """Adapter related API methods.""" + + def _init(self, auth, **kwargs): + """Post init constructor.""" + self.cnx = Cnx(parent=self) + """:obj:`Cnx`: Child object for working with connections.""" + + super(Adapters, self)._init(auth=auth, **kwargs) + + def _get(self): + """Private direct API method for getting all adapters. + + Returns: + :obj:`dict` + + """ + return self._request(method="get", path=self._router.root) + + @property + def _router(self): + """Router for this API client. + + Returns: + :obj:`axonius_api_client.api.routers.Router` + + """ + return routers.ApiV1.adapters + + def _upload_file( + self, + adapter_name, + node_id, + name, + field, + content, + content_type=None, + headers=None, + ): + """Private direct API method for uploading a file for an adapter. + + Args: + adapter_name (:obj:`str`): + Name of adapter to upload file to. + node_id (:obj:`str`): + ID of node running adapter_name. + name (:obj:`str`): + Name of file to upload. + field (:obj:`str`): + Field to associate with this file. + content (:obj:`str` or :obj:`bytes`): + Contents of file to upload. + content_type (:obj:`str`, optional): + Mime type of content. + + Defaults to: None. + headers (:obj:`dict`, optional): + Mime headers for content. + + Defaults to: None. + + Returns: + dict + + """ + data = {"field_name": field} + files = {"userfile": (name, content, content_type, headers)} + + path = self._router.upload_file.format( + adapter_name=adapter_name, node_id=node_id + ) + + ret = self._request(method="post", path=path, data=data, files=files) + ret["filename"] = name + return ret + + def get(self): + """Pass.""" + raw = self._get() + parser = ParserAdapters(raw=raw, parent=self) + adapters = parser.parse() + return adapters + + def get_known(self, **kwargs): + """Pass.""" + adapters = kwargs.get("adapters") or self.get() + tmpl = [ + "name: {name!r}", + "node name: {node_name!r}", + "cnx count: {cnx_count}", + "status: {status}", + ] + tmpl = tools.join_comma(obj=tmpl).format + return [tmpl(**a) for a in adapters] + + def get_single(self, adapter, node="master"): + """Pass.""" + if isinstance(adapter, dict): + return adapter + + all_adapters = self.get() + + adapters = self.filter_by_nodes(value=node, adapters=all_adapters) + + adapters = self.filter_by_names(value=adapter, adapters=adapters) + + if len(adapters) != 1: + raise exceptions.ValueNotFound( + value="name {} and node name {}".format(adapter, node), + value_msg="Adapters by name and node name", + known=self.get_known, + known_msg="Adapters", + match_type="equals", + adapters=all_adapters, + ) + + return adapters[0] + + def filter_by_names( + self, adapters, value=None, ignore_case=True, match_count=None, match_error=True + ): + """Pass.""" + value = [ + tools.strip_right(obj=name, fix="_adapter") + for name in tools.listify(obj=value, dictkeys=True) + ] + + matches = [] + + for adapter in adapters: + match = tools.values_match( + checks=value, values=adapter["name"], ignore_case=ignore_case + ) + + if match and adapter not in matches: + matches.append(adapter) + + if (match_count and len(matches) != match_count) and match_error: + raise exceptions.ValueNotFound( + value=value, + value_msg="Adapters by names", + known=self.get_known, + known_msg="Adapters", + adapters=adapters, + ) + + return matches + + def filter_by_nodes( + self, adapters, value=None, ignore_case=True, match_count=None, match_error=True + ): + """Pass.""" + matches = [] + + for adapter in adapters: + match = tools.values_match( + checks=value, values=adapter["node_name"], ignore_case=ignore_case + ) + + if match and adapter not in matches: + matches.append(adapter) + + if (match_count and len(matches) != match_count) and match_error: + raise exceptions.ValueNotFound( + value=value, + value_msg="Adapters by node names", + known=self.get_known, + known_msg="Adapters", + adapters=adapters, + ) + + return matches + + def filter_by_cnx_count( + self, adapters, value=None, match_count=None, match_error=True + ): + """Pass.""" + matches = [] + + for adapter in adapters: + if value is not None and adapter["cnx_count"] != value: + continue + + if adapter not in matches: + matches.append(adapter) + + if (match_count and len(matches) != match_count) and match_error: + raise exceptions.ValueNotFound( + value=value, + value_msg="Adapters by connection count", + known=self.get_known, + known_msg="Adapters", + adapters=adapters, + match_type="is", + ) + + return matches + + def filter_by_status( + self, adapters, value=None, match_count=None, match_error=True + ): + """Pass.""" + matches = [] + + for adapter in adapters: + if isinstance(value, tools.LIST): + if value and adapter["status"] not in value: + continue + elif adapter["status"] != value: + continue + + if adapter not in matches: + matches.append(adapter) + + if (match_count and len(matches) != match_count) and match_error: + raise exceptions.ValueNotFound( + value=value, + value_msg="Adapters by status", + known=self.get_known, + known_msg="Adapters", + adapters=adapters, + ) + + return matches + + def upload_file_str( + self, adapter, field, name, content, node="master", content_type=None + ): + """Pass.""" + adapter = self.get_single(adapter=adapter, node=node) + + return self._upload_file( + adapter_name=adapter["name_raw"], + node_id=adapter["node_id"], + name=name, + field=field, + content=content, + content_type=content_type, + ) + + def upload_file_path(self, adapter, field, path, node="master", content_type=None): + """Pass.""" + adapter = self.get_single(adapter=adapter, node=node) + + path, content = tools.path_read(obj=path, binary=True, is_json=False) + + name = path.name + + return self._upload_file( + adapter_name=adapter["name_raw"], + node_id=adapter["node_id"], + name=name, + field=field, + content=content, + content_type=content_type, + ) + + +class Cnx(mixins.Child): + """Pass.""" + + def _add(self, adapter_name, node_id, config): + """Add a connection to an adapter. + + Args: + adapter (:obj:`str`): + Name of adapter to add connection to. + config (:obj:`dict`): + Client configuration. + node_id (:obj:`str`): + Node ID. + + Returns: + :obj:`object` + + """ + data = {} + data.update(config) + data["instanceName"] = node_id + + path = self._parent._router.cnxs.format(adapter_name=adapter_name) + + return self._parent._request( + method="put", + path=path, + json=data, + error_json_bad_status=False, + error_status=False, + ) + + def _check(self, adapter_name, node_id, config): + """Test an adapter connection. + + Args: + name (:obj:`str`): + Name of adapter to test connection of. + config (:obj:`dict`): + Connection configuration. + node_id (:obj:`str`): + Node ID. + + Returns: + :obj:`object` + + """ + data = {} + data.update(config) + data["instanceName"] = node_id + data["oldInstanceName"] = node_id + + path = self._parent._router.cnxs.format(adapter_name=adapter_name) + + return self._parent._request( + method="post", + path=path, + json=data, + error_json_bad_status=False, + error_status=False, + ) + + def _delete(self, adapter_name, node_id, cnx_uuid, delete_entities=False): + """Delete a connection from an adapter. + + Args: + name (:obj:`str`): + Name of adapter to delete connection from. + id (:obj:`str`): + ID of connection to remove. + node_id (:obj:`str`): + Node ID. + + Returns: + :obj:`object` + + """ + data = {} + data["instanceName"] = node_id + + params = {"deleteEntities": delete_entities} + + path = self._parent._router.cnxs_uuid.format( + adapter_name=adapter_name, cnx_uuid=cnx_uuid + ) + + return self._parent._request( + method="delete", + path=path, + json=data, + params=params, + error_json_bad_status=False, + error_status=False, + ) + + def _update(self, adapter_name, node_id, config, cnx_uuid): + """Add a connection to an adapter. + + Args: + adapter (:obj:`str`): + Name of adapter to add connection to. + config (:obj:`dict`): + Client configuration. + node_id (:obj:`str`): + Node ID. + + Returns: + :obj:`object` + + """ + data = {} + data.update(config) + data["instanceName"] = node_id + data["oldInstanceName"] = node_id + + path = self._parent._router.cnxs_uuid.format( + adapter_name=adapter_name, cnx_uuid=cnx_uuid + ) + return self._parent._request( + method="put", + path=path, + json=data, + error_json_bad_status=False, + error_status=False, + ) + + def add( + self, + adapter, + config, + parse_config=True, + node="master", + retry=15, + sleep=15, + error=True, + ): + """Add a connection to an adapter. + + Args: + name (:obj:`str`): + Name of adapter to add connection to. + config (:obj:`dict`): + Client configuration. + node_id (:obj:`str`): + Node ID. + + Returns: + :obj:`object` + + """ + adapter = self._parent.get_single(adapter=adapter, node=node) + + if parse_config: + parser = ParserCnxConfig(raw=config, parent=self) + config = parser.parse(adapter=adapter, settings=adapter["cnx_settings"]) + + response = self._add( + adapter_name=adapter["name_raw"], node_id=adapter["node_id"], config=config + ) + + had_error = response["status"] == "error" or response["error"] + if had_error and error: + raise exceptions.CnxConnectFailure( + response=response, adapter=adapter["name"], node=adapter["node_name"] + ) + + refetched = {} + refetched["response_had_error"] = had_error + refetched["response"] = response + refetched["cnx"] = self.refetch( + adapter_name=adapter["name"], + node_name=adapter["node_name"], + response=response, + retry=retry, + sleep=sleep, + filter_method=self.filter_by_uuids, + filter_value=response["id"], + ) + + return refetched + + def add_csv_str( + self, + name, + content, + field, + node="master", + is_users=False, + is_installed_sw=False, + parse_config=True, + retry=15, + sleep=15, + error=True, + ): + """Pass.""" + adapter = self._parent.get_single(adapter="csv", node=node) + + validate_csv( + name=name, + content=content, + is_users=is_users, + is_installed_sw=is_installed_sw, + ) + + config = {} + config["is_users_csv"] = is_users + config["is_installed_sw"] = is_installed_sw + config["user_id"] = field + config["csv"] = {} + config["csv"]["filename"] = name + config["csv"]["filecontent"] = content + config["csv"]["filecontent_type"] = "text/csv" + + return self.add( + adapter=adapter, + config=config, + parse_config=parse_config, + retry=retry, + sleep=sleep, + error=error, + ) + + def add_csv_file( + self, + path, + field, + node="master", + is_users=False, + is_installed_sw=False, + parse_config=True, + retry=15, + sleep=15, + error=True, + ): + """Pass.""" + adapter = self._parent.get_single(adapter="csv", node=node) + + path, content = tools.path_read(obj=path, binary=True, is_json=False) + + name = path.name + + validate_csv( + name=name, + content=content, + is_users=is_users, + is_installed_sw=is_installed_sw, + ) + + config = {} + config["is_users_csv"] = is_users + config["is_installed_sw"] = is_installed_sw + config["user_id"] = field + config["csv"] = {} + config["csv"]["filename"] = name + config["csv"]["filecontent"] = content + config["csv"]["filecontent_type"] = "text/csv" + + return self.add( + adapter=adapter, + config=config, + parse_config=parse_config, + retry=retry, + sleep=sleep, + error=error, + ) + + def add_csv_url( + self, + url, + field, + node="master", + is_users=False, + is_installed_sw=False, + parse_config=True, + retry=15, + sleep=15, + error=True, + ): + """Pass.""" + adapter = self._parent.get_single(adapter="csv", node=node) + + config = {} + config["is_users_csv"] = is_users + config["is_installed_sw"] = is_installed_sw + config["user_id"] = field + config["csv_http"] = url + + return self.add( + adapter=adapter, + config=config, + parse_config=parse_config, + retry=retry, + sleep=sleep, + error=error, + ) + + def add_csv_share( + self, + share, + field, + node="master", + is_users=False, + is_installed_sw=False, + username=None, + password=None, + parse_config=True, + retry=15, + sleep=15, + error=True, + ): + """Pass.""" + adapter = self._parent.get_single(adapter="csv", node=node) + + config = {} + config["is_users_csv"] = is_users + config["is_installed_sw"] = is_installed_sw + config["user_id"] = field + config["csv_share"] = share + if username: + config["csv_share_username"] = username + if password: + config["csv_share_password"] = password + + return self.add( + adapter=adapter, + config=config, + parse_config=parse_config, + retry=retry, + sleep=sleep, + error=error, + ) + + def check(self, cnx, retry=15, sleep=15, error=True): + """Pass.""" + response = self._check( + adapter_name=cnx["adapter_name_raw"], + config=cnx["config_raw"], + node_id=cnx["node_id"], + ) + + had_error = bool(response) + + if had_error and error: + raise exceptions.CnxConnectFailure( + response=response, adapter=cnx["adapter_name"], node=cnx["node_name"] + ) + + refetched = {} + refetched["response_had_error"] = had_error + refetched["response"] = response + refetched["cnx"] = self.refetch( + adapter_name=cnx["adapter_name"], + node_name=cnx["node_name"], + response=response, + retry=retry, + sleep=sleep, + filter_method=self.filter_by_ids, + filter_value=cnx["id"], + ) + + return refetched + + def delete( + self, + cnx, + delete_entities=False, + force=False, + warning=True, + error=True, + sleep=15, + ): + """Pass.""" + cnxinfo = [ + "Adapter name: {adapter_name}", + "Node name: {node_name}", + "Connection ID: {id}", + "Connection UUID: {uuid}", + "Connection status: {status}", + "Delete all entities: {de}", + ] + cnxinfo = tools.join_cr(obj=cnxinfo).format(de=delete_entities, **cnx) + + if not force: + raise exceptions.CnxDeleteForce(cnxinfo=cnxinfo) + + if warning: + warnings.warn(exceptions.CnxDeleteWarning(cnxinfo=cnxinfo, sleep=sleep)) + + dargs = { + "adapter_name": cnx["adapter_name_raw"], + "node_id": cnx["node_id"], + "cnx_uuid": cnx["uuid"], + "delete_entities": delete_entities, + } + + lsmsg = [ + "Connection info: {cnxinfo}", + "About to delete connection in {s} seconds using args: {a}", + ] + lsmsg = tools.join_cr(obj=lsmsg).format(cnxinfo=cnxinfo, s=sleep, a=dargs) + self._log.warning(lsmsg) + + time.sleep(sleep) + + response = self._delete(**dargs) + + had_error = isinstance(response, dict) and ( + response["status"] == "error" or response["error"] + ) + + lfmsg = [ + "Connection info: {cnxinfo}", + "Deleted connection with error {he} and return {r}", + ] + lfmsg = tools.join_cr(obj=lfmsg).format( + cnxinfo=cnxinfo, he=had_error, r=response + ) + self._log.info(lfmsg) + + if had_error: + if warning and not error: + warnings.warn( + exceptions.CnxDeleteFailedWarning( + cnxinfo=cnxinfo, response=response + ) + ) + elif error: + raise exceptions.CnxDeleteFailed(cnxinfo=cnxinfo, response=response) + + ret = {} + ret["response_had_error"] = had_error + ret["response"] = response + ret["cnx"] = cnx + + return ret + + def filter_by_ids( + self, cnxs, value=None, ignore_case=True, match_count=None, match_error=True + ): + """Get all connections for all adapters.""" + matches = [] + + for cnx in cnxs: + match = tools.values_match( + checks=value, values=cnx["id"], ignore_case=ignore_case + ) + + if match and cnx not in matches: + matches.append(cnx) + + if (match_count and len(matches) != match_count) and match_error: + raise exceptions.ValueNotFound( + value=value, + value_msg="Adapter connections by id", + known=self.get_known, + known_msg="Adapter connections", + cnxs=cnxs, + ) + + return matches + + def filter_by_uuids( + self, cnxs, value=None, ignore_case=True, match_count=None, match_error=True + ): + """Get all connections for all adapters.""" + matches = [] + + for cnx in cnxs: + match = tools.values_match( + checks=value, values=cnx["uuid"], ignore_case=ignore_case + ) + + if match and cnx not in matches: + matches.append(cnx) + + if (match_count and len(matches) != match_count) and match_error: + raise exceptions.ValueNotFound( + value=value, + value_msg="Adapter connections by uuid", + known=self.get_known, + known_msg="Adapter connections", + cnxs=cnxs, + ) + + return matches + + def filter_by_status(self, cnxs, value=None, match_count=None, match_error=True): + """Get all connections for all adapters.""" + matches = [] + + for cnx in cnxs: + if isinstance(value, tools.LIST): + if value and cnx["status"] not in value: + continue + elif value is not None and cnx["status"] != value: + continue + + if cnx not in matches: + matches.append(cnx) + + if (match_count and len(matches) != match_count) and match_error: + raise exceptions.ValueNotFound( + value=value, + value_msg="Adapter connections by status", + known=self.get_known, + known_msg="Adapter connections", + cnxs=cnxs, + ) + + return matches + + def get(self, adapter=None, node=None): + """Get all connections for an adapter.""" + if isinstance(adapter, tools.LIST): + all_adapters = self._parent.get() + all_adapters = self._parent.filter_by_names( + adapters=all_adapters, value=adapter + ) + all_adapters = self._parent.filter_by_nodes( + adapters=all_adapters, value=node + ) + return [c for a in all_adapters for c in a["cnx"]] + + if not adapter: + all_adapters = self._parent.get() + all_adapters = self._parent.filter_by_nodes( + adapters=all_adapters, value=node + ) + return [c for a in all_adapters for c in a["cnx"]] + + adapter = self._parent.get_single(adapter=adapter, node=node) + return adapter["cnx"] + + def get_known(self, **kwargs): + """Pass.""" + cnxs = kwargs.get("cnxs") or self.get() + tmpl = [ + "Adapter: {adapter_name!r}", + "Node: {node_name!r}", + "cnx id: {id!r}", + "cnx uuid: {uuid!r}", + "cnx status: {status}", + ] + tmpl = tools.join_comma(obj=tmpl) + return [tmpl.format(**c) for c in cnxs] + + def refetch( + self, + adapter_name, + node_name, + response, + filter_method, + filter_value, + retry=15, + sleep=15, + ): + """Pass.""" + count = 0 + retry = retry or 1 + + # new connections don't always show up right away, so we have to do some magic + while count < retry: + # re-fetch all connections for this adapter + # try to find the newly created connection + all_cnxs = self.get(adapter=adapter_name, node=node_name) + + msg = "Retry count {c}/{r} and sleep {s} - find {fv!r} using {fm!r}" + msg = msg.format( + c=count, r=retry, s=sleep, fv=filter_value, fm=filter_method + ) + self._log.debug(msg) + + cnxs = filter_method( + cnxs=all_cnxs, value=filter_value, match_count=1, match_error=False + ) + + msg = "Found {c} matches out of {ct} cnxs using {fv!r}" + msg = msg.format(c=len(cnxs), ct=len(all_cnxs), fv=filter_value) + self._log.debug(msg) + + if len(cnxs) == 1: + return cnxs[0] + + count += 1 + + dmsg = [ + "Connection not in system yet", + "try {c} of {r}", + "sleeping another {s} seconds", + "Looking for connection: {response}", + ] + dmsg = tools.join_comma(obj=dmsg).format( + response=response, c=count, r=retry, s=sleep + ) + self._log.debug(dmsg) + + time.sleep(sleep) + + raise exceptions.CnxRefetchFailure( + response=response, + adapter=adapter_name, + node=node_name, + filter_method=filter_method, + filter_value=filter_value, + known=self.get_known, + cnxs=all_cnxs, + ) + + def update( + self, cnx, new_config=None, parse_config=True, retry=15, sleep=15, error=True + ): + """Pass.""" + if parse_config and new_config: + adapter = self._parent.get_single(adapter=cnx["adapter_name"]) + parser = ParserCnxConfig(raw=new_config, parent=self) + new_config = parser.parse(adapter=adapter, settings=adapter["cnx_settings"]) + + msg = [ + "Updating cnx id={id}", + "uuid={uuid}", + "adapter={adapter_name}", + "node={node_name}", + ] + msg = tools.join_comma(obj=msg).format(**cnx) + self._log.debug(msg) + + response = self._update( + adapter_name=cnx["adapter_name_raw"], + node_id=cnx["node_id"], + config=new_config or cnx["config_raw"], + cnx_uuid=cnx["uuid"], + ) + + msg = "Updating cnx response {r}".format(r=response) + self._log.debug(msg) + + had_error = response["status"] == "error" or response["error"] + + if had_error and error: + raise exceptions.CnxConnectFailure( + response=response, adapter=cnx["adapter_name"], node=cnx["node_name"] + ) + + refetched = {} + refetched["response_had_error"] = had_error + refetched["response"] = response + refetched["cnx"] = self.refetch( + adapter_name=cnx["adapter_name"], + node_name=cnx["node_name"], + response=response, + retry=retry, + sleep=sleep, + filter_method=self.filter_by_uuids, + filter_value=response["id"], + ) + + return refetched + + +class ParserCnxConfig(mixins.Parser): + """Pass.""" + + def parse(self, adapter, settings): + """Pass.""" + new_config = {} + + for name, schema in settings.items(): + required = schema["required"] + + value = self._raw.get(name, None) + + has_value = name in self._raw + has_default = "default" in schema + + req = "required" if required else "optional" + msg = "Processing {req} setting {n!r} with value of {v!r}, schema: {ss}" + msg = msg.format(req=req, n=name, v=value, ss=schema) + self._log.debug(msg) + + if not has_value and not has_default: + if not required: + continue + + raise exceptions.CnxSettingMissing( + name=name, value=value, schema=schema, adapter=adapter + ) + + if not has_value and has_default: + value = schema["default"] + + new_config[name] = self.check_value( + name=name, value=value, schema=schema, adapter=adapter + ) + + return new_config + + def check_value(self, name, value, schema, adapter): + """Pass.""" + type_str = schema["type"] + enum = schema.get("enum", []) + + if value == constants.SETTING_UNCHANGED: + return value + + if enum and value not in enum: + raise exceptions.CnxSettingInvalidChoice( + name=name, value=value, schema=schema, enum=enum, adapter=adapter + ) + + if type_str == "file": + return self.check_file( + name=name, value=value, schema=schema, adapter=adapter + ) + elif type_str == "bool": + return tools.coerce_bool(obj=value) + elif type_str in ["number", "integer"]: + return tools.coerce_int(obj=value) + elif type_str == "array": + if isinstance(value, tools.STR): + value = [x.strip() for x in value.split(",")] + if isinstance(value, tools.LIST) and all( + [isinstance(x, tools.STR) for x in value] + ): + return value + elif type_str == "string": + if isinstance(value, tools.STR): + return value + else: + raise exceptions.CnxSettingUnknownType( + name=name, + value=value, + schema=schema, + type_str=type_str, + adapter=adapter, + ) + + raise exceptions.CnxSettingInvalidType( + name=name, value=value, schema=schema, adapter=adapter, mustbe=type_str + ) + + def check_file(self, name, value, schema, adapter): + """Pass.""" + is_str = isinstance(value, tools.STR) + is_dict = isinstance(value, dict) + is_path = isinstance(value, tools.pathlib.Path) + + if not any([is_dict, is_str, is_path]): + raise exceptions.CnxSettingInvalidType( + name=name, + value=value, + schema=schema, + mustbe="dict or str", + adapter=adapter, + ) + + if is_str or is_path: + value = {"filepath": format(value)} + + uuid = value.get("uuid", None) + filename = value.get("filename", None) + filepath = value.get("filepath", None) + filecontent = value.get("filecontent", None) + filecontent_type = value.get("filecontent_type", None) + + if uuid and filename: + return {"uuid": uuid, "filename": filename} + + # FUTURE: try here + if filepath: + uploaded = self._parent._parent.upload_file_path( + field=name, + adapter=adapter, + path=filepath, + content_type=filecontent_type, + ) + + return {"uuid": uploaded["uuid"], "filename": uploaded["filename"]} + + if filecontent and filename: + uploaded = self._parent._parent.upload_file_str( + field=name, + adapter=adapter, + name=filename, + content=filecontent, + content_type=filecontent_type, + ) + return {"uuid": uploaded["uuid"], "filename": uploaded["filename"]} + + raise exceptions.CnxSettingFileMissing( + name=name, value=value, schema=schema, adapter=adapter + ) + + +class ParserAdapters(mixins.Parser): + """Pass.""" + + def _adapter(self, name, raw): + """Pass.""" + parsed = { + "name": tools.strip_right(obj=name, fix="_adapter"), + "name_raw": name, + "name_plugin": raw["unique_plugin_name"], + "node_name": raw["node_name"], + "node_id": raw["node_id"], + "status_raw": raw["status"], + "features": raw["supported_features"], + } + + if parsed["status_raw"] == "success": + parsed["status"] = True + elif parsed["status_raw"] == "warning": + parsed["status"] = False + else: + parsed["status"] = None + + cnx = self._cnx(raw=raw, parent=parsed) + cnx_ok = [x for x in cnx if x["status"] is True] + cnx_bad = [x for x in cnx if x["status"] is False] + + parsed["cnx"] = cnx + parsed["cnx_ok"] = cnx_ok + parsed["cnx_bad"] = cnx_bad + parsed["cnx_settings"] = self._cnx_settings(raw=raw) + parsed["cnx_count"] = len(cnx) + parsed["cnx_count_ok"] = len(cnx_ok) + parsed["cnx_count_bad"] = len(cnx_bad) + parsed["settings"] = self._adapter_settings(raw=raw, base=False) + parsed["adv_settings"] = self._adapter_settings(raw=raw, base=True) + + return parsed + + def _adapter_settings(self, raw, base=True): + """Pass.""" + settings = {} + + for raw_name, raw_settings in raw["config"].items(): + is_base = raw_name == "AdapterBase" + if ((is_base and base) or (not is_base and not base)) and not settings: + schema = raw_settings["schema"] + items = schema["items"] + required = schema["required"] + config = raw_settings["config"] + + for item in items: + setting_name = item["name"] + parsed_settings = {k: v for k, v in item.items()} + parsed_settings["required"] = setting_name in required + parsed_settings["value"] = config.get(setting_name, None) + settings[setting_name] = parsed_settings + + return settings + + def _cnx_settings(self, raw): + """Pass.""" + settings = {} + + schema = raw["schema"] + items = schema["items"] + required = schema["required"] + + for item in items: + setting_name = item["name"] + settings[setting_name] = {k: v for k, v in item.items()} + settings[setting_name]["required"] = setting_name in required + + return settings + + def _cnx(self, raw, parent): + """Pass.""" + cnx = [] + + cnx_settings = self._cnx_settings(raw=raw) + + for raw_cnx in raw["clients"]: + raw_config = raw_cnx["client_config"] + parsed_settings = {} + + for setting_name, setting_config in cnx_settings.items(): + value = raw_config.get(setting_name, None) + + if value == constants.SETTING_UNCHANGED: + value = "__HIDDEN__" + + if setting_name not in raw_config: + value = "__NOTSET__" + + parsed_settings[setting_name] = setting_config.copy() + parsed_settings[setting_name]["value"] = value + + pcnx = {} + pcnx["node_name"] = parent["node_name"] + pcnx["node_id"] = parent["node_id"] + pcnx["adapter_name"] = parent["name"] + pcnx["adapter_name_raw"] = parent["name_raw"] + pcnx["adapter_status"] = parent["status"] + pcnx["config"] = parsed_settings + pcnx["config_raw"] = raw_config + pcnx["status_raw"] = raw_cnx["status"] + pcnx["status"] = raw_cnx["status"] == "success" + pcnx["id"] = raw_cnx["client_id"] + pcnx["uuid"] = raw_cnx["uuid"] + pcnx["date_fetched"] = raw_cnx["date_fetched"] + pcnx["error"] = raw_cnx["error"] + cnx.append(pcnx) + + return cnx + + def parse(self): + """Pass.""" + parsed = [] + + for name, raw_adapters in self._raw.items(): + for raw in raw_adapters: + adapter = self._adapter(name=name, raw=raw) + parsed.append(adapter) + + return parsed + + +def validate_csv(name, content, is_users=False, is_installed_sw=False): + """Pass.""" + if is_users: + ids = constants.CSV_FIELDS["user"] + ids_type = "user" + elif is_installed_sw: + ids = constants.CSV_FIELDS["sw"] + ids_type = "installed software" + else: + ids = constants.CSV_FIELDS["device"] + ids_type = "device" + + headers_content = content + if isinstance(headers_content, tools.BYTES): + headers_content = headers_content.decode() + + headers = headers_content.splitlines()[0].lower().split(",") + headers_has_any_id = any([x in headers for x in ids]) + + if not headers_has_any_id: + warnings.warn( + exceptions.CnxCsvWarning( + ids_type=ids_type, ids=ids, name=name, headers=headers + ) + ) + + +# FUTURE: public REST API does not support setting advanced settings +""" +# advanced settings +method=POST +path=/api/plugins/configs/carbonblack_defense_adapter/AdapterBase +body= +{ + "connect_client_timeout": 300, + "fetching_timeout": 5400, + "last_fetched_threshold_hours": 49, + "last_seen_prioritized": false, + "last_seen_threshold_hours": 43800, + "minimum_time_until_next_fetch": null, + "realtime_adapter": false, + "user_last_fetched_threshold_hours": null, + "user_last_seen_threshold_hours": null +} +""" + +""" +# adapter specific advanced settings +method=POST +path=/api/plugins/configs/carbonblack_defense_adapter/CarbonblackDefenseAdapter +body={"fetch_deregistred":false} +""" + +""" +# rule to add to api.py +@api_add_rule('plugins/configs//', methods=['POST', 'GET'], + wrap around service.py: plugins_configs_set() +""" + +# FUTURE: date_fetched for client seems to be only for +# when fetch has been triggered from "save" in adapters>client page?? +# need to submit BR +""" +date_fetched = client["date_fetched"] +minutes_ago = tools.dt.minutes_ago(date_fetched) + +if within is not None: + if minutes_ago >= within: + continue + +if not_within is not None: + if minutes_ago <= not_within: + continue +""" diff --git a/axonius_api_client/api/enforcements.py b/axonius_api_client/api/enforcements.py new file mode 100644 index 00000000..f7f9b956 --- /dev/null +++ b/axonius_api_client/api/enforcements.py @@ -0,0 +1,439 @@ +# -*- coding: utf-8 -*- +"""Axonius API Client package.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import warnings + +from .. import constants, exceptions, tools +from . import mixins, routers, users_devices + + +class RunAction(mixins.Child): + """Action related API methods. + + Notes: + The REST API will need to be updated to allow more power in this library. + Until then, this class should be considered **BETA**. + + """ + + @property + def _router(self): + """Router for this API client. + + Returns: + :obj:`axonius_api_client.api.routers.Router` + + """ + return routers.ApiV1.actions + + # sort of pointless + def _get(self): + """Get all actions. + + Returns: + :obj:`list` of :obj:`str` + + """ + path = self._router.root + + return self._parent._request(method="get", path=path) + + # FUTURE: public method + def _deploy(self, action_name, ids, file_uuid, file_name, params=None): + """Deploy an action. + + Args: + name (:obj:`str`): + Name of action to deploy. + ids (:obj:`list` of :obj:`str`): + Internal axonius IDs of device to deploy action against. + uuid (:obj:`str`): + UUID of binary to use in deployment. + filename (:obj:`str`): + Filename of binary to use in deployment. + params (:obj:`str`, optional): + Defaults to: None. + + Returns: + :obj:`object` + + """ + data = {} + data["action_name"] = action_name + data["internal_axon_ids"] = ids + data["binary"] = {} + data["binary"]["filename"] = file_name + data["binary"]["uuid"] = file_uuid + data["params"] = params + + path = self._router.deploy + + return self._parent._request(method="post", path=path, json=data) + + # FUTURE: public method + def _shell(self, action_name, ids, command): + """Run an action. + + Args: + action_name (:obj:`str`): + Name of action to run. + ids (:obj:`list` of :obj:`str`): + Internal axonius IDs of device to run action against. + command (:obj:`str`): + Command to run. + + Returns: + :obj:`object` + + """ + data = {} + data["action_name"] = action_name + data["internal_axon_ids"] = ids + data["command"] = command + + path = self._router.shell + + return self._parent._request(method="post", path=path, json=data) + + # FUTURE: public method + def _upload_file(self, name, content, content_type=None, headers=None): + """Upload a file to the system for use in deployment. + + Args: + binary (:obj:`io.BytesIO`): + Binary bits of file to upload. + filename (:obj:`str`): + Name of file to upload. + + Returns: + :obj:`str`: UUID of uploaded file. + + """ + data = {"field_name": "binary"} + files = {"userfile": (name, content, content_type, headers)} + + path = self._router.upload_file + + ret = self._parent._request(method="post", path=path, data=data, files=files) + ret["filename"] = name + return ret + + +class Enforcements(mixins.Model, mixins.Mixins): + """Enforcement related API methods. + + Notes: + The REST API will need to be updated to allow more power in this library. + Until then, this class should be considered **BETA**. + + """ + + def _init(self, auth, **kwargs): + """Pass.""" + # cross ref + self.users = users_devices.Users(auth=auth, **kwargs) + self.devices = users_devices.Devices(auth=auth, **kwargs) + + # children + self.runaction = RunAction(parent=self) + + super(Enforcements, self)._init(auth=auth, **kwargs) + + warnings.warn(exceptions.BetaWarning(obj=self)) + + @property + def _router(self): + """Router for this API client. + + Returns: + :obj:`axonius_api_client.api.routers.Router` + + """ + return routers.ApiV1.alerts + + def _delete(self, ids): + """Delete objects by internal axonius IDs. + + Args: + ids (:obj:`list` of :obj:`str`): + List of internal axonius IDs of objects to delete. + + Returns: + None + + """ + path = self._router.root + + return self._request(method="delete", path=path, json=ids) + + # FUTURE: public method + def _create(self, name, main, success=None, failure=None, post=None, triggers=None): + """Create an enforcement. + + Args: + name (:obj:`str`): + Name of new enforcement to create. + main (:obj:`dict`): + Main action to run for this enforcement. + success (:obj:`list` of :obj:`dict`, optional): + Actions to run on success. + + Defaults to: None. + failure (:obj:`list` of :obj:`dict`, optional): + Actions to run on failure. + + Defaults to: None. + post (:obj:`list` of :obj:`dict`, optional): + Actions to run on post. + + Defaults to: None. + triggers (:obj:`list` of :obj:`dict`, optional): + Triggers for this enforcement. + + Defaults to: None. + + Notes: + This will get a public create method once the REST API server has been + updated to expose /enforcements/actions, /api/enforcements/actions/saved, + and others. + + Returns: + :obj:`str`: ID of newly created object. + + """ + data = {} + data["name"] = name + data["actions"] = {} + data["actions"]["main"] = main + data["actions"]["success"] = success or [] + data["actions"]["failure"] = failure or [] + data["actions"]["post"] = post or [] + data["triggers"] = triggers or [] + + print(tools.json_reload(data)) + + path = self._router.root + return self._request(method="put", path=path, json=data, is_json=False) + + # FUTURE: Shares code with SavedQuery.delete + # FUTURE: old sdk had fields arg + def _get(self, query=None, row_start=0, page_size=0): + """Get a page for a given query. + + Args: + query (:obj:`str`, optional): + Query to filter rows to return. This is NOT a query built by + the Query Wizard in the GUI. This is something else. See + :meth:`get_by_name` for an example query. Empty + query will return all rows. + + Defaults to: None. + row_start (:obj:`int`, optional): + If not 0, skip N rows in the return. + + Defaults to: 0. + page_size (:obj:`int`, optional): + If not 0, include N rows in the return. + + Defaults to: 0. + + Returns: + :obj:`dict` + + """ + if not page_size or page_size > constants.MAX_PAGE_SIZE: + msg = "Changed page size from {ps} to max page size {mps}" + msg = msg.format(ps=page_size, mps=constants.MAX_PAGE_SIZE) + self._log.debug(msg) + + page_size = constants.MAX_PAGE_SIZE + + params = {} + params["skip"] = row_start + params["limit"] = page_size + + if query: + params["filter"] = query + + path = self._router.root + + return self._request(method="get", path=path, params=params) + + # FUTURE: Shares code with SavedQuery.delete + def delete(self, rows): + """Delete an enforcement by name. + + Args: + name (:obj:`str`): + Name of object to delete. + regex (:obj:`bool`, optional): + Search for name using regex. + + Defaults to: False. + only1 (:obj:`bool`, optional): + Only allow one match to name. + + Defaults to: True. + + Returns: + :obj:`str`: empty string + + """ + return self._delete( + ids=[x["uuid"] for x in tools.listify(obj=rows, dictkeys=False)] + ) + + # FUTURE: Shares code with SavedQuery.get and UsersDevicesMixins.get (sorta) + def get(self, query=None, max_rows=None, max_pages=None, page_size=None): + """Get enforcements.""" + if not page_size or page_size > constants.MAX_PAGE_SIZE: + msg = "Changed page_size={ps} to max_page_size={mps}" + msg = msg.format(ps=page_size, mps=constants.MAX_PAGE_SIZE) + self._log.debug(msg) + + page_size = constants.MAX_PAGE_SIZE + + page_info = 0 + page_num = 0 + rows_fetched = 0 + rows = [] + fetch_start = tools.dt_now() + + msg = [ + "Starting get: page_size={}".format(page_size), + "query={!r}".format(query or ""), + ] + self._log.debug(tools.join_comma(msg)) + + while True: + page_start = tools.dt_now() + page_num += 1 + rows_left = max_rows - len(rows) if max_rows else -1 + + if 0 < rows_left < page_size: + msg = "Changed page_size={ps} to rows_left={rl} (max_rows={mr})" + msg = msg.format(ps=page_size, rl=rows_left, mr=max_rows) + self._log.debug(msg) + + page_size = rows_left + + msg = [ + "Fetching page_num={}".format(page_num), + "page_size={}".format(page_size), + "rows_fetched={}".format(rows_fetched), + ] + self._log.debug(tools.join_comma(obj=msg)) + + page = self._get(query=query, page_size=page_size, row_start=rows_fetched) + + assets = page["assets"] + page_info = page["page"] + + rows += assets + rows_fetched += len(assets) + + msg = [ + "Fetched page_num={}".format(page_num), + "page_took={}".format(tools.dt_sec_ago(obj=page_start)), + "rows_fetched={}".format(rows_fetched), + "page_info={}".format(page_info), + ] + self._log.debug(tools.join_comma(obj=msg)) + + if not assets: + msg = "Stopped fetch loop, page with no assets returned" + self._log.debug(msg) + break + + if max_pages and page_num >= max_pages: + msg = "Stopped fetch loop, hit max_pages={mp}" + msg = msg.format(mp=max_pages) + self._log.debug(msg) + break + + if max_rows and len(rows) >= max_rows: + msg = "Stopped fetch loop, hit max_rows={mr} with rows_fetched={rf}" + msg = msg.format(mr=max_rows, rf=rows_fetched) + self._log.debug(msg) + break + + msg = [ + "Finished get: rows_fetched={}".format(rows_fetched), + "total_rows={}".format(page_info["totalResources"]), + "fetch_took={}".format(tools.dt_sec_ago(obj=fetch_start)), + "query={!r}".format(query or ""), + ] + self._log.info(tools.join_comma(obj=msg)) + + return rows + + def get_by_id( + self, value, match_error=True, max_rows=None, max_pages=None, page_size=None + ): + """Get EC using paging.""" + rows = self.get(max_rows=max_rows, max_pages=max_pages, page_size=page_size) + + for row in rows: + if row["uuid"] == value: + return row + + if match_error: + ktmpl = "name: {name!r}, uuid: {uuid!r}".format + known = [ktmpl(**row) for row in rows] + known_msg = "Enforcements" + value_msg = "Enforcements by UUID" + raise exceptions.ValueNotFound( + value=value, value_msg=value_msg, known=known, known_msg=known_msg + ) + + return None + + # FUTURE: Shares code with SavedQuery.get_by_name + def get_by_name( + self, + value, + match_count=None, + match_error=True, + eq_single=True, + max_rows=None, + max_pages=None, + page_size=None, + ): + """Find actions by name.""" + not_flag = "" + + if value.startswith("NOT:"): + value = tools.strip_left(obj=value, fix="NOT:").strip() + not_flag = "not " + + if value.startswith("RE:"): + value = tools.strip_left(obj=value, fix="RE:").strip() + query = '{not_flag}name == regex("{value}", "i")' + else: + query = '{not_flag}name == "{value}"' + + if eq_single and not not_flag: + max_rows = 1 + match_count = 1 + match_error = True + + query = query.format(not_flag=not_flag, value=value) + + rows = self.get( + query=query, max_rows=max_rows, max_pages=max_pages, page_size=page_size + ) + + if (match_count and len(rows) != match_count) and match_error: + ktmpl = "name: {name!r}, uuid: {uuid!r}".format + known = [ktmpl(**row) for row in self.get()] + known_msg = "Enforcements" + value_msg = "Enforcements by name using query {q}".format(q=query) + raise exceptions.ValueNotFound( + value=value, value_msg=value_msg, known=known, known_msg=known_msg + ) + + if match_count == 1 and len(rows) == 1: + return rows[0] + + return rows diff --git a/axonius_api_client/api/exceptions.py b/axonius_api_client/api/exceptions.py deleted file mode 100644 index 5d886bd9..00000000 --- a/axonius_api_client/api/exceptions.py +++ /dev/null @@ -1,270 +0,0 @@ -# -*- coding: utf-8 -*- -"""API errors.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from .. import exceptions -from .. import tools - - -def trydecode(x): - """Pass.""" - try: - return x.decode() - except Exception: - return x - - -class ApiError(exceptions.PackageError): - """Parent exception for all API errors.""" - - -class ResponseError(ApiError): - """Parent exception for any response error.""" - - def __init__(self, response, error="", exc=None, details=True, bodies=True): - """Constructor. - - Args: - response (:obj:`requests.Response`): - Response error was thrown for. - error (:obj:`str`, optional): - Error message. - - Defaults to: "". - exc (:obj:`Exception`, optional): - Original exception thrown. - - Defaults to: None. - bodies (:obj:`bool`, optional): - Show request and response bodies. - - Defaults to: True. - - """ - self.response = response - """:obj:`requests.Response`: Response error was thrown for.""" - - error = error or "Response error!" - self.error = error - """:obj:`str`: Error message.""" - - self.exc = exc - """:obj:`Exception`: Original exception thrown.""" - - msgs = [] - - if details: - txt = [ - "code={r.status_code!r}", - "reason={r.reason!r}", - "method={r.request.method!r}", - "url={r.url!r}", - ] - - txt = "({})".format(", ".join(txt).format(r=response)) - error = "{} Response details {}".format(error, txt) - - error = "{} (original exception: {})".format(error, exc) if exc else error - - msgs.append(error) - - if bodies: - req_txt = trydecode(response.request.body) - req_txt = format(tools.json_pretty(req_txt)) - - resp_txt = trydecode(response.text) - resp_txt = format(tools.json_pretty(resp_txt)) - msgs += ["*** request ***", req_txt, "*** response ***", resp_txt] - - # msgs = [format(x) for x in msgs] - msg = msgs[0] if len(msgs) == 1 else "\n".join(msgs) - - super(ResponseError, self).__init__(msg) - - -class InvalidJson(ResponseError): - """Error when response has invalid JSON.""" - - def __init__(self, response, exc=None): - """Constructor. - - Args: - response (:obj:`requests.Response`): - Response error was thrown for. - exc (:obj:`Exception`, optional): - Original exception thrown. - - Defaults to: None. - - """ - error = "Invalid JSON in response" - super(InvalidJson, self).__init__(response=response, error=error, exc=exc) - - -class ObjectNotFound(ApiError): - """Error when unable to find an object.""" - - def __init__(self, value, value_type, object_type, exc=None): - """Constructor. - - Args: - value (:obj:`str`): - Value used to find object. - value (:obj:`str`): - Type of value used to find object. - object_type (:obj:`str`): - Type of object searched for. - - """ - self.value = value - """:obj:`str`: Value used to find object.""" - - self.value_type = value_type - """:obj:`str`: Value type used to find object.""" - - self.object_type = object_type - """:obj:`str`: Type of object searched for.""" - - msg = "Unable to find {obj_type} using {val_type}: {val!r}" - msg = msg.format(val=value, val_type=value_type, obj_type=object_type) - msg = "{} -- original exception: {}".format(msg, exc) if exc else msg - - super(ObjectNotFound, self).__init__(msg) - - -class TooFewObjectsFound(ApiError): - """Error when too many objects found.""" - - def __init__( - self, value, value_type, object_type, row_count_total, row_count_min, exc=None - ): - """Constructor. - - Args: - value (:obj:`str`): - Value used to find object. - value (:obj:`str`): - Type of value used to find object. - object_type (:obj:`str`): - Type of object searched for. - - """ - self.value = value - """:obj:`str`: Value used to find object.""" - - self.value_type = value_type - """:obj:`str`: Value type used to find object.""" - - self.object_type = object_type - """:obj:`str`: Type of object searched for.""" - - msg = "Expected at least {tmin}, found {tcnt} {obj_type} objects" - msg += " using {val_type}: {val!r}" - msg = msg.format( - val=value, - val_type=value_type, - obj_type=object_type, - tcnt=row_count_total, - tmin=row_count_min, - ) - msg = "{} -- original exception: {}".format(msg, exc) if exc else msg - - super(TooFewObjectsFound, self).__init__(msg) - - -class TooManyObjectsFound(ApiError): - """Error when too many objects found.""" - - def __init__( - self, value, value_type, object_type, row_count_total, row_count_max, exc=None - ): - """Constructor. - - Args: - value (:obj:`str`): - Value used to find object. - value (:obj:`str`): - Type of value used to find object. - object_type (:obj:`str`): - Type of object searched for. - - """ - self.value = value - """:obj:`str`: Value used to find object.""" - - self.value_type = value_type - """:obj:`str`: Value type used to find object.""" - - self.object_type = object_type - """:obj:`str`: Type of object searched for.""" - - msg = "Expected no more than {tmax}, found {tcnt} {obj_type} objects" - msg += " using {val_type}: {val!r}" - msg = msg.format( - val=value, - val_type=value_type, - obj_type=object_type, - tcnt=row_count_total, - tmax=row_count_max, - ) - msg = "{} -- original exception: {}".format(msg, exc) if exc else msg - - super(TooManyObjectsFound, self).__init__(msg) - - -class UnknownAdapterName(ApiError): - """Error when unable to find an adapter name.""" - - def __init__(self, name, known_names): - """Constructor. - - Args: - name (:obj:`str`): - Name of adapter that was not found. - known_names (:obj:`list` of :obj:`str`): - Names of adapters that exist. - - """ - self.name = name - """:obj:`str`: Name of adapter that was not found.""" - - self.known_names = known_names - """:obj:`list` of :obj:`str`: Names of adapters that exist.""" - - msg = "Unable to find adapter {name!r}, valid adapters: {names}" - msg = msg.format(name=name, names=known_names) - - super(UnknownAdapterName, self).__init__(msg) - - -class UnknownFieldName(ApiError): - """Error when unable to find a generic or adapter field name.""" - - def __init__(self, name, adapter, known_names): - """Constructor. - - Args: - name (:obj:`str`): - Name of field that was not found. - adapter (:obj:`str`): - Name of adapter that field was being looked for. - known_names (:obj:`list` of :obj:`str`): - Names of fields that exist. - - """ - self.name = name - """:obj:`str`: Name of field that was not found.""" - - self.known_names = known_names - """:obj:`list` of :obj:`str`: Names of fields that exist.""" - - self.adapter = adapter - """:obj:`str`: Name of adapter that field was being looked for.""" - - msg = "Unable to find {adapter} field {field!r}, valid fields: {names}" - msg = msg.format(adapter=adapter, field=name, names=known_names) - - super(UnknownFieldName, self).__init__(msg) diff --git a/axonius_api_client/api/mixins.py b/axonius_api_client/api/mixins.py new file mode 100644 index 00000000..85957e29 --- /dev/null +++ b/axonius_api_client/api/mixins.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +"""Axonius API Client package.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import abc + +import six + +from .. import constants, exceptions, logs + + +@six.add_metaclass(abc.ABCMeta) +class Model(object): + """API client for Axonius REST API.""" + + @abc.abstractproperty + def _router(self): + """Router for this API client. + + Returns: + :obj:`axonius_api_client.api.routers.Router` + + """ + raise NotImplementedError # pragma: no cover + + +@six.add_metaclass(abc.ABCMeta) +class ModelUserDevice(Model): + """API client for Axonius REST API.""" + + @abc.abstractproperty + def _default_fields(self): + """Fields to set as default for methods with fields as kwargs. + + Returns: + :obj:`dict` + + """ + raise NotImplementedError # pragma: no cover + + +class Mixins(object): + """API client for Axonius REST API.""" + + def __init__(self, auth, **kwargs): + """Constructor. + + Args: + auth (:obj:`AuthModel`): + Authentication object. + + """ + log_level = kwargs.get("log_level", constants.LOG_LEVEL_API) + self._log = logs.get_obj_log(obj=self, level=log_level) + """:obj:`logging.Logger`: Logger for this object.""" + + self._auth = auth + """:obj:`AuthModel`: Authentication object.""" + + self._init(auth=auth, **kwargs) + + auth.check_login() + + def _init(self, auth, **kwargs): + """Pass.""" + pass + + def __str__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return "{c.__module__}.{c.__name__}(auth={auth!r}, url={url!r})".format( + c=self.__class__, + auth=self._auth.__class__.__name__, + url=self._auth._http.url, + ) + + def __repr__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return self.__str__() + + def _request( + self, + path, + method="get", + raw=False, + is_json=True, + error_status=True, + error_json_bad_status=True, + error_json_invalid=True, + **kwargs + ): + """Perform a REST API request. + + Args: + path (:obj:`str`): + Path to use in request. + method (:obj:`str`, optional): + HTTP method to use in request. + + Defaults to: "get". + raw (:obj:`bool`, optional): + Return the raw response. If False, return response text or json. + + Defaults to: False. + is_json (:obj:`bool`, optional): + Response should have JSON data. + + Defaults to: True. + error_status (:obj:`bool`, optional): + Call :meth:`_check_response_code`. + + Defaults to: True. + **kwargs: + Passed to :meth:`axonius_api_client.http.client.HttpClient.__call__` + + Returns: + :obj:`object` if is_json, or :obj:`str` if not is_json, or + :obj:`requests.Response` if raw + + """ + sargs = {} + sargs.update(kwargs) + sargs.update({"path": path, "method": method}) + + response = self._auth.http(**sargs) + + if raw: + return response + + if is_json and response.text: + data = self._check_response_json( + response=response, + error_json_bad_status=error_json_bad_status, + error_json_invalid=error_json_invalid, + ) + else: + data = response.text + + self._check_response_code(response=response, error_status=error_status) + + return data + + def _check_response_code(self, response, error_status=True): + """Check response status code. + + Raises: + :exc:`exceptions.ResponseError` + + """ + if error_status: + try: + response.raise_for_status() + except Exception as exc: + raise exceptions.ResponseNotOk( + response=response, exc=exc, details=True, bodies=True + ) + + def _check_response_json( + self, response, error_json_bad_status=True, error_json_invalid=True + ): + """Check response is JSON. + + Raises: + :exc:`exceptions.InvalidJson` + + """ + try: + data = response.json() + except Exception as exc: + if error_json_invalid: + raise exceptions.JsonInvalid(response=response, exc=exc) + return response.text + + if isinstance(data, dict): + has_error = data.get("error") + has_error_status = data.get("status") == "error" + + if (has_error or has_error_status) and error_json_bad_status: + raise exceptions.JsonError(response=response, data=data) + + return data + + +class Child(object): + """Pass.""" + + def __init__(self, parent): + """Pass.""" + self._parent = parent + self._log = parent._log.getChild(self.__class__.__name__) + + def __str__(self): + """Pass.""" + return "{} for {}".format(self.__class__.__name__, self._parent) + + def __repr__(self): + """Pass.""" + return self.__str__() + + +@six.add_metaclass(abc.ABCMeta) +class Parser(Child): + """Pass.""" + + def __init__(self, raw, parent, **kwargs): + """Pass.""" + self._parent = parent + self._raw = raw + self._log = parent._log.getChild(self.__class__.__name__) + + @abc.abstractmethod + def parse(self): + """Pass.""" + raise NotImplementedError # pragma: no cover diff --git a/axonius_api_client/api/models.py b/axonius_api_client/api/models.py deleted file mode 100644 index 5d54caa3..00000000 --- a/axonius_api_client/api/models.py +++ /dev/null @@ -1,790 +0,0 @@ -# -*- coding: utf-8 -*- -"""Axonius API Client package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import abc -import logging - -import six - -from . import exceptions -from . import utils -from .. import constants -from .. import tools - - -LOG = logging.getLogger(__name__) - - -@six.add_metaclass(abc.ABCMeta) -class ApiBase(object): - """API client for Axonius REST API.""" - - @abc.abstractproperty - def _router(self): - """Router for this API client. - - Returns: - :obj:`axonius_api_client.api.routers.Router` - - """ - raise NotImplementedError # pragma: no cover - - def __init__(self, auth): - """Constructor. - - Args: - auth (:obj:`axonius_api_client.auth.models.AuthBase`): - Authentication object. - - """ - self._log = LOG.getChild(self.__class__.__name__) - """:obj:`logging.Logger`: Logger for this object.""" - - self._auth = auth - """:obj:`axonius_api_client.auth.models.AuthBase`: Authentication object.""" - - auth.check_login() - - def __str__(self): - """Show object info. - - Returns: - :obj:`str` - - """ - return "{c.__module__}.{c.__name__}(auth={auth!r}, url={url!r})".format( - c=self.__class__, - auth=self._auth.__class__.__name__, - url=self._auth._http_client.url, - ) - - def __repr__(self): - """Show object info. - - Returns: - :obj:`str` - - """ - return self.__str__() - - def _request( - self, path, method="get", raw=False, is_json=True, check_status=True, **kwargs - ): - """Perform a REST API request. - - Args: - path (:obj:`str`): - Path to use in request. - method (:obj:`str`, optional): - HTTP method to use in request. - - Defaults to: "get". - raw (:obj:`bool`, optional): - Return the raw response. If False, return response text or json. - - Defaults to: False. - is_json (:obj:`bool`, optional): - Response should have JSON data. - - Defaults to: True. - check_status (:obj:`bool`, optional): - Call :meth:`_check_response_status`. - - Defaults to: True. - **kwargs: - Passed to :meth:`axonius_api_client.http.HttpClient.__call__` - - Returns: - :obj:`object` if is_json, or :obj:`str` if not is_json, or - :obj:`requests.Response` if raw - - """ - sargs = {} - sargs.update(kwargs) - sargs.update({"path": path, "method": method}) - - response = self._auth.http_client(**sargs) - - if check_status: - self._check_response_status(response=response) - - if raw: - return response - - if is_json: - return self._check_response_json(response=response) - - return response.text - - def _check_response_status(self, response): - """Check response status code. - - Raises: - :exc:`exceptions.ResponseError` - - """ - if response.status_code != 200: - raise exceptions.ResponseError( - response=response, exc=None, details=True, bodies=True - ) - - def _check_response_json(self, response): - """Check response is JSON. - - Raises: - :exc:`exceptions.InvalidJson` - - """ - try: - return response.json() - except Exception as exc: - raise exceptions.InvalidJson(response=response, exc=exc) - # FUTURE: check for "error" in JSON dict - # Need a way to reproduce response with "error" in JSON dict - - -@six.add_metaclass(abc.ABCMeta) -class UserDeviceBase(object): - """Mixins for User & Device models.""" - - @abc.abstractproperty - def _default_fields(self): - """Fields to set as default for methods with fields as kwargs. - - Returns: - :obj:`dict` - - """ - raise NotImplementedError # pragma: no cover - - def get_fields(self): - """Get the fields. - - Notes: - Will only return fields on Axonius v2.7 or greater. Caches result to self. - - Returns: - :obj:`dict` - - """ - if not getattr(self, "_fields", None): - self._fields = self._request(method="get", path=self._router.fields) - return self._fields - - # FUTURE: needs tests - def get_labels(self): - """Get the labels. - - Returns: - :obj:`list` of :obj:`str` - - """ - return self._request(method="get", path=self._router.labels) - - # FUTURE: needs tests - def add_labels_by_rows(self, rows, labels): - """Add labels to objects using rows returned from :meth:`get`. - - Args: - rows (:obj:`list` of :obj:`dict`): - Rows returned from :meth:`get` - labels (:obj:`list` of `str`): - Labels to add to rows. - - Returns: - :obj:`int`: Number of objects that had labels added - - """ - ids = [row["internal_axon_id"] for row in rows] - - processed = 0 - - # only do 100 labels at a time, more seems to break API - for group in tools.grouper(ids, 100): - group = [x for x in group if x is not None] - response = self._add_labels(labels=labels, ids=group) - processed += response - - return processed - - # FUTURE: needs tests - def add_labels_by_query(self, query, labels): - """Add labels to objects using a query to select objects. - - Args: - query (:obj:`str`): - Query built from Query Wizard in GUI to select objects to add labels to. - labels (:obj:`list` of `str`): - Labels to add to rows returned from query. - - Returns: - :obj:`int`: Number of objects that had labels added - - """ - rows = list(self.get(query=query, default_fields=False)) - return self.add_labels_by_rows(rows=rows, labels=labels) - - # FUTURE: needs tests - def delete_labels_by_rows(self, rows, labels): - """Delete labels from objects using rows returned from :meth:`get`. - - Args: - rows (:obj:`list` of :obj:`dict`): - Rows returned from :meth:`get` - labels (:obj:`list` of `str`): - Labels to delete from rows. - - Returns: - :obj:`int`: Number of objects that had labels deleted. - - """ - ids = [row["internal_axon_id"] for row in rows] - - processed = 0 - - # only do 100 labels at a time, more seems to break API - for group in tools.grouper(ids, 100): - group = [x for x in group if x is not None] - response = self._delete_labels(labels=labels, ids=group) - processed += response - - return processed - - # FUTURE: needs tests - def delete_labels_by_query(self, query, labels): - """Delete labels from objects using a query to select objects. - - Args: - query (:obj:`str`): - Query built from Query Wizard in GUI to select objects to delete labels - from. - labels (:obj:`list` of `str`): - Labels to delete from rows returned from query. - - Returns: - :obj:`int`: Number of objects that had labels deleted - - """ - rows = list(self.get(query=query, default_fields=False)) - return self.delete_labels_by_rows(rows=rows, labels=labels) - - def create_saved_query( - self, - name, - query, - page_size=constants.GUI_PAGE_SIZES[0], - sort_field="", - sort_descending=True, - sort_adapter="generic", - **fields - ): - """Create a saved query. - - Args: - name (:obj:`str`): - Name of saved query to create. - query (:obj:`str`): - Query built from Query Wizard in GUI to use in saved query. - page_size (:obj:`int`, optional): - Number of rows to show in each page in GUI. - - Defaults to: first item in - :data:`axonius_api_client.constants.GUI_PAGE_SIZES`. - sort_field (:obj:`str`, optional): - Name of field to sort results on. - - Defaults to: "". - sort_descending (:obj:`bool`, optional): - Sort sort_field descending. - - Defaults to: True. - sort_adapter (:obj:`str`, optional): - Name of adapter sort_field is from. - - Defaults to: "generic". - - Returns: - :obj:`str`: The ID of the new saved query. - - """ - # FUTURE: needs tests - if page_size not in constants.GUI_PAGE_SIZES: - msg = "page_size {size} invalid, must be one of {sizes}" - msg = msg.format(size=page_size, sizes=constants.GUI_PAGE_SIZES) - raise exceptions.ApiError(msg) - - utils.check_max_page_size(page_size=page_size) - for k, v in self._default_fields.items(): - fields.setdefault(k, v) - - known_fields = self.get_fields() - validated_fields = utils.validate_fields(known_fields=known_fields, **fields) - - if sort_field: - sort_field = utils.find_field( - name=sort_field, fields=known_fields, adapter=sort_adapter - ) - - data = {} - data["name"] = name - data["query_type"] = "saved" - data["view"] = {} - data["view"]["fields"] = validated_fields - - # FUTURE: find out what this is - data["view"]["historical"] = None - - # FUTURE: find out if this only impacts GUI - data["view"]["columnSizes"] = [] - - # FUTURE: find out if this only impacts GUI - data["view"]["page"] = 0 - - # FUTURE: find out if this only impacts GUI - data["view"]["pageSize"] = page_size - - data["view"]["query"] = {} - - # FUTURE: validate 'expressions' is not needed - # data["view"]["query"]["expressions"] = [] - - data["view"]["query"]["filter"] = query - data["view"]["sort"] = {} - data["view"]["sort"]["desc"] = sort_descending - data["view"]["sort"]["field"] = sort_field - - return self._request(method="post", path=self._router.views, json=data) - - def delete_saved_query_by_name(self, name, regex=False, only1=True): - """Delete a saved query by name. - - Args: - name (:obj:`str`): - Name of saved query to delete. - regex (:obj:`bool`, optional): - Search for name using regex. - - Defaults to: False. - only1 (:obj:`bool`, optional): - Only allow one match to name. - - Defaults to: True. - - Returns: - :obj:`str`: empty string - - """ - found = self.get_saved_query_by_name(name=name, regex=regex, only1=True) - ids = [x["uuid"] for x in found] if isinstance(found, list) else [found["uuid"]] - return self._delete_saved_query(ids=ids) - - def get_saved_query( - self, query=None, page_size=constants.DEFAULT_PAGE_SIZE, max_rows=0 - ): - """Get saved queries using paging. - - Args: - query (:obj:`str`, optional): - Query to filter rows to return. This is NOT a query built by - the Query Wizard in the GUI. This is something else. See - :meth:`get_saved_query_by_name` for an example query. - - Defaults to: None. - page_size (:obj:`int`, optional): - Get N rows per page. - - Defaults to: :data:`axonius_api_client.constants.DEFAULT_PAGE_SIZE`. - max_rows (:obj:`int`, optional): - If not 0, only return up to N rows. - - Defaults to: 0. - - Yields: - :obj:`dict`: Each row found in 'assets' from return. - - """ - page = self._get_saved_query(query=query, page_size=page_size, row_start=0) - - for row in page["assets"]: - yield row - - seen = len(page["assets"]) - - while True: - page = self._get_saved_query( - query=query, page_size=page_size, row_start=seen - ) - - for row in page["assets"]: - yield row - - if (max_rows and seen >= max_rows) or not page["assets"]: - break - - seen += len(page["assets"]) - - def get_saved_query_by_name(self, name, regex=True, only1=False): - """Get saved queries by name using paging. - - Args: - name (:obj:`str`): - Name of saved query to get. - regex (:obj:`bool`, optional): - Search for name using regex. - - Defaults to: True. - only1 (:obj:`bool`, optional): - Only allow one match to name. - - Defaults to: True. - - Raises: - :exc:`exceptions.ObjectNotFound` - - Returns: - :obj:`list` of :obj:`dict`: Each row matching name or :obj:`dict` if only1. - - """ - if regex: - query = 'name == regex("{name}", "i")'.format(name=name) - else: - query = 'name == "{name}"'.format(name=name) - - found = list(self.get_saved_query(query=query)) - - if not found or (len(found) > 1 and only1): - object_type = "Saved Query for {o}".format(o=self._router._object_type) - raise exceptions.ObjectNotFound( - value=query, value_type="query", object_type=object_type - ) - - return found[0] if only1 else found - - def get_count(self, query=None): - """Get the number of matches for a given query. - - Args: - query (:obj:`str`, optional): - Query built from Query Wizard in GUI. - - Returns: - :obj:`int` - - """ - params = {} - if query: - params["filter"] = query - return self._request(method="get", path=self._router.count, params=params) - - def get_by_saved_query(self, name, page_size=constants.DEFAULT_PAGE_SIZE): - """Pass. - - Future: Flush out. - """ - sq = self.get_saved_query_by_name(name=name, regex=False, only1=True) - return list( - self.get( - query=sq["view"]["query"]["filter"], - page_size=page_size, - manual_fields=sq["view"]["fields"], - ) - ) - - def get( - self, - query=None, - page_size=constants.DEFAULT_PAGE_SIZE, - page_count=None, - row_count_min=None, - row_count_max=None, - default_fields=True, - manual_fields=None, - **fields - ): - """Get objects for a given query using paging. - - Args: - query (:obj:`str`, optional): - Query built from Query Wizard in GUI to select rows to return. - - Defaults to: None. - page_size (:obj:`int`, optional): - Get N rows per page. - - Defaults to: :data:`axonius_api_client.constants.DEFAULT_PAGE_SIZE`. - default_fields (:obj:`bool`, optional): - Update fields with :attr:`_default_fields` if no fields supplied. - - Defaults to: True. - fields: Fields to include in result. - - >>> generic=['f1', 'f2'] # for generic fields. - >>> adapter=['f1', 'f2'] # for adapter specific fields. - - Yields: - :obj:`dict`: each row found in 'assets' from return. - - """ - row_count_total = self.get_count(query=query) - - if row_count_min == 1 and row_count_max == 1 and row_count_total != 1: - raise exceptions.ObjectNotFound( - value=query, - value_type="query", - object_type=self._router._object_type, - exc=None, - ) - - if row_count_min is not None and row_count_total < row_count_min: - raise exceptions.TooFewObjectsFound( - value=query, - value_type="query", - object_type=self._router._object_type, - row_count_total=row_count_total, - row_count_min=row_count_min, - ) - - if row_count_max is not None and row_count_total > row_count_max: - raise exceptions.TooManyObjectsFound( - value=query, - value_type="query", - object_type=self._router._object_type, - row_count_total=row_count_total, - row_count_max=row_count_max, - ) - - if not fields and default_fields: - for k, v in self._default_fields.items(): - fields.setdefault(k, v) - - if manual_fields: - validated_fields = manual_fields - else: - known_fields = self.get_fields() - validated_fields = utils.validate_fields( - known_fields=known_fields, **fields - ) - - row_count_seen = 0 - page_count_seen = 0 - - page = self._get( - query=query, - fields=validated_fields, - row_start=0, - page_size=row_count_max if row_count_max else page_size, - ) - - page_count_seen += 1 - - for row in page["assets"]: - row_count_seen += 1 - yield row - - while not row_count_seen >= row_count_total: - if page_count is not None and page_count_seen >= page_count: - return - - page = self._get( - query=query, - fields=validated_fields, - row_start=row_count_seen, - page_size=page_size, - ) - - page_count_seen += 1 - - for row in page["assets"]: - row_count_seen += 1 - yield row - - def get_by_id(self, id): - """Get an object by internal_axon_id. - - Args: - id (:obj:`str`): - internal_axon_id of object to get. - - Raises: - :exc:`exceptions.ObjectNotFound`: - When :meth:`ApiBase._request` raises exception. - - Returns: - :obj:`dict` - - """ - path = self._router.by_id.format(id=id) - try: - data = self._request(method="get", path=path) - except exceptions.ResponseError as exc: - raise exceptions.ObjectNotFound( - value=id, - value_type="Axonius ID", - object_type=self._router._object_type, - exc=exc, - ) - return data - - def get_by_field_value(self, value, field, field_adapter, regex=False, **kwargs): - """Pass. - - FUTURE: Flush out. - """ - if regex: - query = '{field} == regex("{value}", "i")' - else: - query = '{field} == "{value}"' - - known_fields = self.get_fields() - field = utils.find_field(name=field, fields=known_fields, adapter=field_adapter) - - kwargs.setdefault("row_count_min", 1) - kwargs.setdefault("row_count_max", 1) - kwargs.setdefault("query", query.format(field=field, value=value)) - - found = list(self.get(**kwargs)) - - only1 = kwargs["row_count_min"] == 1 and kwargs["row_count_max"] == 1 - - return found[0] if only1 else found - - def _get(self, query=None, fields=None, row_start=0, page_size=0): - """Get a page for a given query. - - Args: - query (:obj:`str`, optional): - Query built from Query Wizard in GUI to select rows to return. - - Defaults to: None. - fields (:obj:`list` of :obj:`str` or :obj:`str`): - List of fields to include in return. - If str, CSV seperated list of fields. - If list, strs of fields. - - Defaults to: None. - row_start (:obj:`int`, optional): - If not 0, skip N rows in the return. - - Defaults to: 0. - page_size (:obj:`int`, optional): - If not 0, include N rows in the return. - - Defaults to: 0. - - Returns: - :obj:`dict` - - """ - utils.check_max_page_size(page_size=page_size) - params = {} - - if row_start: - params["skip"] = row_start - - if page_size: - params["limit"] = page_size - - if query: - params["filter"] = query - - if fields: - if isinstance(fields, (list, tuple)): - fields = ",".join(fields) - params["fields"] = fields - return self._request(method="get", path=self._router.root, params=params) - - def _get_saved_query(self, query=None, row_start=0, page_size=0): - """Get device saved queries. - - Args: - query (:obj:`str`, optional): - Query to filter rows to return. This is NOT a query built by - the Query Wizard in the GUI. This is something else. See - :meth:`get_saved_query_by_name` for an example query. Empty - query will return all rows. - - Defaults to: None. - row_start (:obj:`int`, optional): - If not 0, skip N rows in the return. - - Defaults to: 0. - page_size (:obj:`int`, optional): - If not 0, include N rows in the return. - - Defaults to: 0. - - Returns: - :obj:`dict` - - """ - utils.check_max_page_size(page_size=page_size) - params = {} - - if page_size: - params["limit"] = page_size - - if row_start: - params["skip"] = row_start - - if query: - params["filter"] = query - - return self._request(method="get", path=self._router.views, params=params) - - def _delete_saved_query(self, ids): - """Delete saved queries by ids. - - Args: - ids (:obj:`list` of :obj:`str`): - List of UUID's of saved queries to delete. - - Returns: - :obj:`str`: empty string - - """ - data = {"ids": ids} - return self._request(method="delete", path=self._router.views, json=data) - - # FUTURE: needs tests - def _add_labels(self, labels, ids): - """Add labels to object IDs. - - Args: - labels (:obj:`list` of `str`): - Labels to add to ids. - ids (:obj:`list` of `str`): - Axonius internal object IDs to add to labels. - - Returns: - :obj:`int`: Number of objects that had labels added - - """ - data = {} - data["entities"] = {} - data["entities"]["ids"] = ids - data["labels"] = labels - return self._request(method="post", path=self._router.labels, json=data) - - # FUTURE: needs tests - def _delete_labels(self, labels, ids): - """Delete labels from object IDs. - - Args: - labels (:obj:`list` of `str`): - Labels to delete from ids. - ids (:obj:`list` of `str`): - Axonius internal object IDs to delete from labels. - - Returns: - :obj:`int`: Number of objects that had labels deleted. - - """ - data = {} - data["entities"] = {} - data["entities"]["ids"] = ids - data["labels"] = labels - return self._request(method="delete", path=self._router.labels, json=data) diff --git a/axonius_api_client/api/routers.py b/axonius_api_client/api/routers.py index bc512767..d86cc5a3 100644 --- a/axonius_api_client/api/routers.py +++ b/axonius_api_client/api/routers.py @@ -1,9 +1,8 @@ # -*- coding: utf-8 -*- """Constants for this package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals + +from .. import tools class Router(object): @@ -20,11 +19,11 @@ def __init__(self, object_type, base, version, **routes): self._version = version self._base = base self._object_type = object_type - self.root = "{base}/{object_type}".format(base=base, object_type=object_type) + self.root = tools.join_url(base, object_type) self._routes = ["root"] for k, v in routes.items(): self._routes.append(k) - setattr(self, k, "{root}/{route}".format(root=self.root, route=v)) + setattr(self, k, tools.join_url(self.root, v)) def __str__(self): """Show object info. @@ -88,7 +87,8 @@ class ApiV1(object): object_type="adapters", base=base, version=version, - clients="{adapter_name}/clients", + cnxs="{adapter_name}/clients", + cnxs_uuid="{adapter_name}/clients/{cnx_uuid}", upload_file="{adapter_name}/{node_id}/upload_file", ) diff --git a/axonius_api_client/api/users_devices.py b/axonius_api_client/api/users_devices.py new file mode 100644 index 00000000..f1b4b784 --- /dev/null +++ b/axonius_api_client/api/users_devices.py @@ -0,0 +1,1322 @@ +# -*- coding: utf-8 -*- +"""Axonius API Client package.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import ipaddress + +from .. import constants, exceptions, tools +from . import adapters, mixins, routers + + +class UserDeviceMixin(mixins.ModelUserDevice, mixins.Mixins): + """Mixins for User & Device models.""" + + def _init(self, auth, **kwargs): + """Pass.""" + # cross reference + self.adapters = adapters.Adapters(auth=auth, **kwargs) + + # children + self.labels = Labels(parent=self) + self.saved_query = SavedQuery(parent=self) + self.fields = Fields(parent=self) + self.reports = Reports(parent=self) + + super(UserDeviceMixin, self)._init(auth=auth, **kwargs) + + def _count(self, query=None, use_post=False): + """Pass.""" + params = {} + if query: + params["filter"] = query + + if len(query) >= constants.QUERY_USE_POST_LENGTH: + use_post = True + + if use_post: + return self._request(method="post", path=self._router.count, json=params) + else: + return self._request(method="get", path=self._router.count, params=params) + + # FUTURE: BR for use_post, defaults to limit == 2000 + def _get(self, query=None, fields=None, row_start=0, page_size=0, use_post=False): + """Get a page for a given query. + + Args: + query (:obj:`str`, optional): + Query built from Query Wizard in GUI to select rows to return. + + Defaults to: None. + fields (:obj:`list` of :obj:`str` or :obj:`str`): + List of fields to include in return. + If str, CSV seperated list of fields. + If list, strs of fields. + + Defaults to: None. + row_start (:obj:`int`, optional): + If not 0, skip N rows in the return. + + Defaults to: 0. + page_size (:obj:`int`, optional): + If not 0, include N rows in the return. + + Defaults to: 0. + + Returns: + :obj:`dict` + + """ + if not page_size or page_size > constants.MAX_PAGE_SIZE: + msg = "Changed page size from {ps} to max page size {mps}" + msg = msg.format(ps=page_size, mps=constants.MAX_PAGE_SIZE) + self._log.debug(msg) + + page_size = constants.MAX_PAGE_SIZE + + params = {} + params["skip"] = row_start + params["limit"] = page_size + + if query: + if len(query) >= constants.QUERY_USE_POST_LENGTH: + use_post = True + + params["filter"] = query + + if fields: + if isinstance(fields, tools.LIST): + fields = ",".join(fields) + + params["fields"] = fields + + self._LAST_GET = params + + if use_post: + return self._request(method="post", path=self._router.root, json=params) + else: + return self._request(method="get", path=self._router.root, params=params) + + def _get_by_id(self, id): + """Pass.""" + path = self._router.by_id.format(id=id) + return self._request(method="get", path=path) + + def count(self, query=None, use_post=False): + """Get the number of matches for a given query. + + Args: + query (:obj:`str`, optional): + Query built from Query Wizard in GUI. + + Returns: + :obj:`int` + + """ + return self._count(query=query, use_post=use_post) + + def count_by_saved_query(self, name, use_post=False): + """Get the number of matches for a given query. + + Args: + query (:obj:`str`, optional): + Query built from Query Wizard in GUI. + + Returns: + :obj:`int` + + """ + sq = self.saved_query.get_by_name(value=name, match_count=1, match_error=True) + return self._count(query=sq["view"]["query"]["filter"], use_post=use_post) + + def get( + self, + query=None, + fields=None, + fields_default=True, + fields_error=True, + max_rows=None, + max_pages=None, + page_size=None, + use_post=False, + all_fields=None, + ): + """Get objects for a given query using paging.""" + fields = self.fields.validate( + fields=fields, + error=fields_error, + default=fields_default, + all_fields=all_fields, + ) + + if not page_size or page_size > constants.MAX_PAGE_SIZE: + msg = "Changed page_size={ps} to max_page_size={mps}" + msg = msg.format(ps=page_size, mps=constants.MAX_PAGE_SIZE) + self._log.debug(msg) + + page_size = constants.MAX_PAGE_SIZE + + page_info = {} + page_num = 0 + rows_fetched = 0 + rows = [] + fetch_start = tools.dt_now() + + msg = [ + "Starting get: page_size={}".format(page_size), + "query={!r}".format(query or ""), + "fields={!r}".format(fields), + ] + self._log.debug(tools.join_comma(msg)) + + while True: + page_start = tools.dt_now() + page_num += 1 + rows_left = max_rows - len(rows) if max_rows else -1 + + if 0 < rows_left < page_size: + msg = "Changed page_size={ps} to rows_left={rl} (max_rows={mr})" + msg = msg.format(ps=page_size, rl=rows_left, mr=max_rows) + self._log.debug(msg) + + page_size = rows_left + + msg = [ + "Fetching page_num={}".format(page_num), + "page_size={}".format(page_size), + "rows_fetched={}".format(rows_fetched), + "use_post={}".format(use_post), + ] + self._log.debug(tools.join_comma(obj=msg)) + + page = self._get( + query=query, + fields=fields, + row_start=rows_fetched, + page_size=page_size, + use_post=use_post, + ) + + assets = page["assets"] + page_info = page["page"] + + rows += assets + rows_fetched += len(assets) + + msg = [ + "Fetched page_num={}".format(page_num), + "page_took={}".format(tools.dt_sec_ago(obj=page_start)), + "rows_fetched={}".format(rows_fetched), + "page_info={}".format(page_info), + ] + self._log.debug(tools.join_comma(obj=msg)) + + if not assets: + msg = "Stopped fetch loop, page with no assets returned" + self._log.debug(msg) + break + + if max_pages and page_num >= max_pages: + msg = "Stopped fetch loop, hit max_pages={mp}" + msg = msg.format(mp=max_pages) + self._log.debug(msg) + break + + if max_rows and len(rows) >= max_rows: + msg = "Stopped fetch loop, hit max_rows={mr} with rows_fetched={rf}" + msg = msg.format(mr=max_rows, rf=rows_fetched) + self._log.debug(msg) + break + + msg = [ + "Finished get: rows_fetched={}".format(rows_fetched), + "total_rows={}".format(page_info.get("totalResources", 0)), + "fetch_took={}".format(tools.dt_sec_ago(obj=fetch_start)), + "query={!r}".format(query or ""), + "fields={!r}".format(fields), + ] + self._log.debug(tools.join_comma(obj=msg)) + + return rows + + def get_by_id(self, id): + """Get an object by internal_axon_id. + + Args: + id (:obj:`str`): + internal_axon_id of object to get. + + Returns: + :obj:`dict` + + """ + try: + return self._get_by_id(id=id) + except exceptions.JsonError as exc: + msg = "Axonius ID for {t}".format(t=self._router._object_type) + raise exceptions.ValueNotFound(value=id, value_msg=msg, exc=exc) + + def get_by_saved_query( + self, name, max_rows=None, max_pages=None, page_size=None, use_post=False + ): + """Pass.""" + sq = self.saved_query.get_by_name(value=name, match_count=1, match_error=True) + + return self.get( + query=sq["view"]["query"]["filter"], + fields={"manual": sq["view"]["fields"]}, + max_rows=max_rows, + max_pages=max_pages, + page_size=page_size, + use_post=use_post, + ) + + # FUTURE: include outdated and/or query_pre? + def get_by_value( + self, + value, + field, + query_post="", + match_count=None, + match_error=True, + eq_single=True, + fields=None, + fields_default=True, + fields_error=True, + max_rows=None, + max_pages=None, + page_size=None, + use_post=False, + all_fields=None, + ): + """Build query to perform equals or regex search.""" + all_fields = all_fields or self.fields.get() + + field = self.fields.find_single(field=field, all_fields=all_fields) + + not_flag = "" + + if isinstance(value, tools.LIST): + if any([x.startswith("NOT:") for x in value]): + value = [tools.strip_left(obj=x, fix="NOT:").strip() for x in value] + not_flag = "not " + elif value.startswith("NOT:"): + value = tools.strip_left(obj=value, fix="NOT:").strip() + not_flag = "not " + + if isinstance(value, tools.LIST): + value = tools.strip_left(obj=value, fix="RE:") + value = ", ".join(["'{}'".format(v.strip()) for v in value]) + query = "{not_flag}{field} in [{value}]" + elif value.startswith("RE:"): + value = tools.strip_left(obj=value, fix="RE:").strip() + query = '{not_flag}{field} == regex("{value}", "i")' + else: + query = '{not_flag}{field} == "{value}"' + value = value.strip() + + if eq_single and (not query_post and not not_flag): + max_rows = 1 + match_count = 1 + match_error = True + + query = query.format(not_flag=not_flag, field=field, value=value) + query_post + + rows = self.get( + query=query, + fields=fields, + fields_default=fields_default, + fields_error=fields_error, + max_rows=max_rows, + max_pages=max_pages, + page_size=page_size, + use_post=use_post, + all_fields=all_fields, + ) + + if (match_count and len(rows) != match_count) and match_error: + value_msg = "{o} by field {f!r} value {v!r}" + value_msg = value_msg.format(o=self._router._object_type, f=field, v=value) + raise exceptions.ValueNotFound(value=query, value_msg=value_msg) + + if match_count == 1 and len(rows) == 1: + return rows[0] + + return rows + + +class Users(UserDeviceMixin): + """User related API methods.""" + + @property + def _router(self): + """Router for this API client. + + Returns: + :obj:`axonius_api_client.api.routers.Router` + + """ + return routers.ApiV1.users + + @property + def _default_fields(self): + """Fields to set as default for methods with fields as kwargs. + + Returns: + :obj:`dict` + + """ + return [ + "labels", + "adapters", + "specific_data.data.id", + "specific_data.data.fetch_time", + "specific_data.data.username", + "specific_data.data.mail", + ] + + def get_by_username(self, value, **kwargs): + """Get objects by name using paging. + + Args: + value (:obj:`int`): + Value to find using field "username". + **kwargs: Passed thru to :meth:`UserDeviceModel.get_by_value` + + Returns: + :obj:`list` of :obj:`dict`: Each row matching name or :obj:`dict` if only1. + + """ + kwargs.pop("field", None) + return self.get_by_value( + value=value, field="specific_data.data.username", **kwargs + ) + + def get_by_mail(self, value, **kwargs): + """Get objects by email using paging. + + Args: + value (:obj:`int`): + Value to find using field "mail". + **kwargs: Passed thru to :meth:`UserDeviceModel.get_by_value` + + Returns: + :obj:`list` of :obj:`dict`: Each row matching email or :obj:`dict` if only1. + + """ + kwargs.pop("field", None) + return self.get_by_value(value=value, field="specific_data.data.mail", **kwargs) + + +class Devices(UserDeviceMixin): + """Device related API methods.""" + + @property + def _router(self): + """Router for this API client. + + Returns: + :obj:`axonius_api_client.api.routers.Router` + + """ + return routers.ApiV1.devices + + @property + def _default_fields(self): + """Fields to set as default for methods with fields as kwargs. + + Returns: + :obj:`dict` + + """ + return [ + "labels", + "adapters", + "specific_data.data.id", + "specific_data.data.fetch_time", + "specific_data.data.hostname", + "specific_data.data.network_interfaces.ips", + ] + + def get_by_hostname(self, value, **kwargs): + """Get objects by name using paging. + + Args: + value (:obj:`int`): + Value to find using field "username". + **kwargs: Passed thru to :meth:`UserDeviceModel.get_by_value` + + Returns: + :obj:`list` of :obj:`dict`: Each row matching name or :obj:`dict` if only1. + + """ + kwargs.pop("field", None) + return self.get_by_value( + value=value, field="specific_data.data.hostname", **kwargs + ) + + def get_by_mac(self, value, **kwargs): + """Get objects by MAC using paging. + + Args: + value (:obj:`int`): + Value to find using field "network_interfaces.mac". + **kwargs: Passed thru to :meth:`UserDeviceModel.get_by_value` + + Returns: + :obj:`list` of :obj:`dict`: Each row matching email or :obj:`dict` if only1. + + """ + kwargs.pop("field", None) + return self.get_by_value( + value=value, field="specific_data.data.network_interfaces.mac", **kwargs + ) + + def get_by_ip(self, value, **kwargs): + """Get objects by MAC using paging. + + Args: + value (:obj:`int`): + Value to find using field "network_interfaces.mac". + **kwargs: Passed thru to :meth:`UserDeviceModel.get_by_value` + + Returns: + :obj:`list` of :obj:`dict`: Each row matching email or :obj:`dict` if only1. + + """ + kwargs.pop("field", None) + return self.get_by_value( + value=value, field="specific_data.data.network_interfaces.ips", **kwargs + ) + + def get_by_subnet(self, value, query_post="", **kwargs): + """Get objects by MAC using paging. + + Args: + value (:obj:`int`): + Value to find using field "network_interfaces.mac". + **kwargs: Passed thru to :meth:`UserDeviceModel.get_by_value` + + Returns: + :obj:`list` of :obj:`dict`: Each row matching email or :obj:`dict` if only1. + + """ + not_flag = "" + + if value.startswith("NOT:"): + value = tools.strip_left(obj=value, fix="NOT:").strip() + not_flag = "not " + + network = ipaddress.ip_network(value) + + begin = int(network.network_address) + end = int(network.broadcast_address) + + match_field = "specific_data.data.network_interfaces.ips_raw" + + match = 'match({{"$gte": {begin}, "$lte": {end}}})' + match = match.format(begin=begin, end=end) + + query = "{not_flag}{match_field} == {match}{query_post}" + query = query.format( + not_flag=not_flag, + match_field=match_field, + match=match, + query_post=query_post, + ) + + kwargs.pop("query", None) + return self.get(query=query, **kwargs) + + +class SavedQuery(mixins.Child): + """Pass.""" + + def _add( + self, + name, + query, + fields, + sort=None, + sort_descending=True, + column_filters=None, + gui_page_size=None, + ): + """Create a saved query. + + Args: + name (:obj:`str`): + Name of saved query to create. + query (:obj:`str`): + Query built from Query Wizard in GUI to use in saved query. + page_size (:obj:`int`, optional): + Number of rows to show in each page in GUI. + + Defaults to: first item in + :data:`axonius_api_client.constants.GUI_PAGE_SIZES`. + sort (:obj:`str`, optional): + Name of field to sort results on. + + Defaults to: "". + sort_descending (:obj:`bool`, optional): + Sort sort descending. + + Defaults to: True. + sort_adapter (:obj:`str`, optional): + Name of adapter sort is from. + + Defaults to: "generic". + + Returns: + :obj:`str`: The ID of the new saved query. + + """ + if gui_page_size not in constants.GUI_PAGE_SIZES: + gui_page_size = constants.GUI_PAGE_SIZES[0] + + data = {} + data["name"] = name + data["query_type"] = "saved" + + data["view"] = {} + data["view"]["fields"] = fields + data["view"]["colFilters"] = column_filters or {} + data["view"]["pageSize"] = gui_page_size + + data["view"]["query"] = {} + data["view"]["query"]["filter"] = query + + data["view"]["sort"] = {} + data["view"]["sort"]["desc"] = sort_descending + data["view"]["sort"]["field"] = sort or "" + + path = self._parent._router.views + + return self._parent._request(method="post", path=path, json=data) + + def _delete(self, ids): + """Delete saved queries by ids. + + Args: + ids (:obj:`list` of :obj:`str`): + List of UUID's of saved queries to delete. + + Returns: + :obj:`str`: empty string + + """ + data = {"ids": tools.listify(ids)} + + path = self._parent._router.views + + return self._parent._request(method="delete", path=path, json=data) + + def _get(self, query=None, row_start=0, page_size=None): + """Get device saved queries. + + Args: + query (:obj:`str`, optional): + Query to filter rows to return. This is NOT a query built by + the Query Wizard in the GUI. This is something else. See + :meth:`find_saved_query_by_name` for an example query. Empty + query will return all rows. + + Defaults to: None. + row_start (:obj:`int`, optional): + If not 0, skip N rows in the return. + + Defaults to: 0. + page_size (:obj:`int`, optional): + If not 0, include N rows in the return. + + Defaults to: 0. + + Returns: + :obj:`dict` + + """ + if not page_size or page_size > constants.MAX_PAGE_SIZE: + msg = "Changed page size from {ps} to max page size {mps}" + msg = msg.format(ps=page_size, mps=constants.MAX_PAGE_SIZE) + self._log.debug(msg) + + page_size = constants.MAX_PAGE_SIZE + + params = {} + params["limit"] = page_size + params["skip"] = row_start + + if query: + params["filter"] = query + + path = self._parent._router.views + + return self._parent._request(method="get", path=path, params=params) + + # FUTURE: FR: Have backend process expressions on add if none supplied + def add( + self, + name, + query, + fields=None, + fields_default=True, + fields_error=True, + sort=None, + sort_descending=True, + column_filters=None, + gui_page_size=None, + ): + """Create a saved query. + + Args: + name (:obj:`str`): + Name of saved query to create. + query (:obj:`str`): + Query built from Query Wizard in GUI to use in saved query. + page_size (:obj:`int`, optional): + Number of rows to show in each page in GUI. + + Defaults to: first item in + :data:`axonius_api_client.constants.GUI_PAGE_SIZES`. + sort (:obj:`str`, optional): + Name of field to sort results on. + + Defaults to: "". + sort_descending (:obj:`bool`, optional): + Sort sort descending. + + Defaults to: True. + sort_adapter (:obj:`str`, optional): + Name of adapter sort is from. + + Defaults to: "generic". + + Returns: + :obj:`str`: The ID of the new saved query. + + """ + all_fields = self._parent.fields.get() + + fields = self._parent.fields.validate( + fields=fields, + default=fields_default, + error=fields_error, + all_fields=all_fields, + ) + + find_single = self._parent.fields.find_single + + if sort: + sort = find_single(field=sort, all_fields=all_fields) + + if column_filters: + column_filters = { + find_single(field=k, all_fields=all_fields): v + for k, v in column_filters.items() + } + + added = self._add( + name=name, + query=query, + fields=fields, + column_filters=column_filters, + sort=sort, + sort_descending=sort_descending, + gui_page_size=gui_page_size, + ) + + return self.get_by_id(value=added) + + def delete(self, rows): + """Delete a saved query by name. + + Args: + name (:obj:`str`): + Name of saved query to delete. + + Returns: + :obj:`str`: empty string + + """ + return self._delete( + ids=[x["uuid"] for x in tools.listify(obj=rows, dictkeys=False)] + ) + + def get(self, query=None, max_rows=None, max_pages=None, page_size=None): + """Get saved queries using paging. + + Args: + query (:obj:`str`, optional): + Query to filter rows to return. This is NOT a query built by + the Query Wizard in the GUI. This is something else. See + :meth:`get` for an example query. + + Defaults to: None. + page_size (:obj:`int`, optional): + Get N rows per page. + + Defaults to: :data:`axonius_api_client.constants.DEFAULT_PAGE_SIZE`. + max_rows (:obj:`int`, optional): + If not 0, only return up to N rows. + + Defaults to: 0. + + Yields: + :obj:`dict`: Each row found in 'assets' from return. + + """ + if not page_size or page_size > constants.MAX_PAGE_SIZE: + msg = "Changed page_size={ps} to max_page_size={mps}" + msg = msg.format(ps=page_size, mps=constants.MAX_PAGE_SIZE) + self._log.debug(msg) + + page_size = constants.MAX_PAGE_SIZE + + page_info = 0 + page_num = 0 + rows_fetched = 0 + rows = [] + fetch_start = tools.dt_now() + + msg = [ + "Starting get: page_size={}".format(page_size), + "query={!r}".format(query or ""), + ] + self._log.debug(tools.join_comma(msg)) + + while True: + page_start = tools.dt_now() + page_num += 1 + rows_left = max_rows - len(rows) if max_rows else -1 + + if 0 < rows_left < page_size: + msg = "Changed page_size={ps} to rows_left={rl} (max_rows={mr})" + msg = msg.format(ps=page_size, rl=rows_left, mr=max_rows) + self._log.debug(msg) + + page_size = rows_left + + msg = [ + "Fetching page_num={}".format(page_num), + "page_size={}".format(page_size), + "rows_fetched={}".format(rows_fetched), + ] + self._log.debug(tools.join_comma(obj=msg)) + + page = self._get(query=query, page_size=page_size, row_start=rows_fetched) + + assets = page["assets"] + page_info = page["page"] + + rows += assets + rows_fetched += len(assets) + + msg = [ + "Fetched page_num={}".format(page_num), + "page_took={}".format(tools.dt_sec_ago(obj=page_start)), + "rows_fetched={}".format(rows_fetched), + "page_info={}".format(page_info), + ] + self._log.debug(tools.join_comma(obj=msg)) + + if not assets: + msg = "Stopped fetch loop, page with no assets returned" + self._log.debug(msg) + break + + if max_pages and page_num >= max_pages: + msg = "Stopped fetch loop, hit max_pages={mp}" + msg = msg.format(mp=max_pages) + self._log.debug(msg) + break + + if max_rows and len(rows) >= max_rows: + msg = "Stopped fetch loop, hit max_rows={mr} with rows_fetched={rf}" + msg = msg.format(mr=max_rows, rf=rows_fetched) + self._log.debug(msg) + break + + msg = [ + "Finished get: rows_fetched={}".format(rows_fetched), + "total_rows={}".format(page_info["totalResources"]), + "fetch_took={}".format(tools.dt_sec_ago(obj=fetch_start)), + "query={!r}".format(query or ""), + ] + self._log.debug(tools.join_comma(obj=msg)) + + return rows + + def get_by_id( + self, value, match_error=True, max_rows=None, max_pages=None, page_size=None + ): + """Get saved queries using paging.""" + rows = self.get(max_rows=max_rows, max_pages=max_pages, page_size=page_size) + + for row in rows: + if row["uuid"] == value: + return row + + if match_error: + ktmpl = "name: {name!r}, uuid: {uuid!r}".format + known = [ktmpl(**row) for row in rows] + known_msg = "Saved Queries" + value_msg = "Saved Query by UUID" + raise exceptions.ValueNotFound( + value=value, value_msg=value_msg, known=known, known_msg=known_msg + ) + + return None + + def get_by_name( + self, + value, + match_count=None, + match_error=True, + eq_single=True, + max_rows=None, + max_pages=None, + page_size=None, + ): + """Get saved queries using paging. + + Args: + name (:obj:`str`): + Name of saved query to get. + use_regex (:obj:`bool`, optional): + Search for name using regex. + + Defaults to: True. + only1 (:obj:`bool`, optional): + Only allow one match to name. + + Defaults to: True. + + Returns: + :obj:`list` of :obj:`dict`: Each row matching name or :obj:`dict` if only1. + + """ + not_flag = "" + + if value.startswith("NOT:"): + value = tools.strip_left(obj=value, fix="NOT:").strip() + not_flag = "not " + + if value.startswith("RE:"): + value = tools.strip_left(obj=value, fix="RE:") + query = '{not_flag}name == regex("{value}", "i")' + else: + query = '{not_flag}name == "{value}"' + + if eq_single and not not_flag: + max_rows = 1 + match_count = 1 + match_error = True + + query = query.format(not_flag=not_flag, value=value) + + rows = self.get( + query=query, max_rows=max_rows, max_pages=max_pages, page_size=page_size + ) + + if (match_count and len(rows) != match_count) and match_error: + ktmpl = "name: {name!r}, uuid: {uuid!r}".format + known = [ktmpl(**row) for row in self.get()] + known_msg = "Saved Queries" + value_msg = "Saved Query by name using query {q}".format(q=query) + raise exceptions.ValueNotFound( + value=value, value_msg=value_msg, known=known, known_msg=known_msg + ) + + if match_count == 1 and len(rows) == 1: + return rows[0] + + return rows + + +class Labels(mixins.Child): + """Pass.""" + + def _add(self, labels, ids): + """Add labels to object IDs. + + Args: + labels (:obj:`list` of `str`): + Labels to add to ids. + ids (:obj:`list` of `str`): + Axonius internal object IDs to add to labels. + + Returns: + :obj:`int`: Number of objects that had labels added + + """ + data = {} + data["entities"] = {} + data["entities"]["ids"] = ids + data["labels"] = labels + + path = self._parent._router.labels + + return self._parent._request(method="post", path=path, json=data) + + def _get(self): + """Get the labels. + + Returns: + :obj:`list` of :obj:`str` + + """ + path = self._parent._router.labels + + return self._parent._request(method="get", path=path) + + def _remove(self, labels, ids): + """Delete labels from object IDs. + + Args: + labels (:obj:`list` of `str`): + Labels to delete from ids. + ids (:obj:`list` of `str`): + Axonius internal object IDs to delete from labels. + + Returns: + :obj:`int`: Number of objects that had labels deleted. + + """ + data = {} + data["entities"] = {} + data["entities"]["ids"] = ids + data["labels"] = labels + + path = self._parent._router.labels + + return self._parent._request(method="delete", path=path, json=data) + + def add(self, rows, labels): + """Add labels to objects using rows returned from :meth:`get`. + + Args: + rows (:obj:`list` of :obj:`dict`): + Rows returned from :meth:`get` + labels (:obj:`list` of `str`): + Labels to add to rows. + + Returns: + :obj:`int`: Number of objects that had labels added + + """ + ids = [row["internal_axon_id"] for row in rows] + + processed = 0 + + # only do 100 labels at a time, more seems to break API + for group in tools.grouper(ids, 100): + group = [x for x in group if x is not None] + response = self._add(labels=labels, ids=group) + processed += response + + return processed + + def get(self): + """Get the labels. + + Returns: + :obj:`list` of :obj:`str` + + """ + return self._get() + + def remove(self, rows, labels): + """Delete labels from objects using rows returned from :meth:`get`. + + Args: + rows (:obj:`list` of :obj:`dict`): + Rows returned from :meth:`get` + labels (:obj:`list` of `str`): + Labels to delete from rows. + + Returns: + :obj:`int`: Number of objects that had labels deleted. + + """ + ids = [row["internal_axon_id"] for row in rows] + + processed = 0 + + # only do 100 labels at a time, more seems to break API + for group in tools.grouper(ids, 100): + group = [x for x in group if x is not None] + response = self._remove(labels=labels, ids=group) + processed += response + + return processed + + +class Fields(mixins.Child): + """Pass.""" + + _GENERIC_ALTS = ["generic", "general", "specific"] + _ALL_ALTS = ["all", "*", "specific_data"] + + def _get(self): + """Get the fields. + + Returns: + :obj:`dict` + + """ + return self._parent._request(method="get", path=self._parent._router.fields) + + def find_adapter(self, adapter, error=True, all_fields=None): + """Find an adapter by name.""" + all_fields = all_fields or self.get() + + check = tools.strip_right(obj=adapter.lower().strip(), fix="_adapter") + + if check in self._GENERIC_ALTS: + check = "generic" + + if check in all_fields: + vmsg = "Validated adapter name {cn!r} (supplied {n!r})" + vmsg = vmsg.format(n=adapter, cn=check) + self._log.debug(vmsg) + + return check, all_fields[check] + + if error: + raise exceptions.ValueNotFound( + value=adapter, + value_msg="adapter by name", + known=list(all_fields), + known_msg="adapter names", + ) + + fmsg = "Failed to validate adapter {cn!r} (supplied {n!r})" + fmsg = fmsg.format(n=adapter, cn=check) + self._log.warning(fmsg) + + return None, {} + + def find_single(self, field, all_fields=None): + """Find a single field.""" + found = self.find(field=field, error=True, all_fields=all_fields) + return found[0] + + def find(self, field, error=True, all_fields=None): + """Find a field for a given adapter.""" + if field.startswith("MANUAL:"): + return tools.strip_left(obj=field, fix="MANUAL:").strip() + + all_fields = all_fields or self.get() + + all_fq = [f["name"] for af in all_fields.values() for f in af.values()] + + check = field.strip() + + if check in all_fq: + fqmsg = "Validated field {sf!r} as already fully qualified" + fqmsg = fqmsg.format(sf=field) + self._log.debug(fqmsg) + + return [check] + + if ":" in check: + search_adapter, search_fields = check.split(":", 1) + else: + search_adapter, search_fields = ("generic", check) + + search_adapter = search_adapter.strip() + search_fields = [ + x.strip().lower() for x in search_fields.split(",") if x.strip() + ] + + real_adapter, real_fields = self.find_adapter( + adapter=search_adapter, error=error, all_fields=all_fields + ) + + found = [] + + if not real_adapter: + return found + + for search_field in search_fields: + found_field = None + + if search_field in self._ALL_ALTS: + found_field = real_fields["all"]["name"] + elif search_field in all_fq: + found_field = search_field + elif search_field in real_fields: + found_field = real_fields[search_field]["name"] + + if not found_field: + if error: + value_msg = "adapter {a!r} field" + value_msg = value_msg.format(a=real_adapter) + + known_msg = "field names for adapter {a!r}" + known_msg = known_msg.format(a=real_adapter) + + raise exceptions.ValueNotFound( + value=search_field, + known=list(real_fields), + value_msg=value_msg, + known_msg=known_msg, + ) + + wmsg = "Failed to validate field {sf!r} for adapter {a!r} as {ff!r}" + wmsg = wmsg.format(a=real_adapter, sf=search_field, ff=found_field) + self._log.warning(wmsg) + else: + if found_field not in found: + found.append(found_field) + + vfmsg = "Validated field {sf!r} for adapter {a!r} as {ff!r}" + vfmsg = vfmsg.format(a=real_adapter, sf=search_field, ff=found_field) + self._log.debug(vfmsg) + + vsmsg = "Validated field search {s!r} as {f!r}" + vsmsg = vsmsg.format(s=field, f=found) + self._log.debug(vsmsg) + + return found + + def get(self): + """Pass.""" + raw = self._get() + parser = ParserFields(raw=raw, parent=self) + return parser.parse() + + def validate(self, fields=None, default=True, error=True, all_fields=None): + """Validate provided fields.""" + fields = tools.listify(fields) + all_fields = all_fields or self.get() + + if default: + val_fields = self._parent._default_fields + else: + val_fields = [] + + for field in [x for x in fields if isinstance(x, tools.STR) and x]: + found = self.find(field=field, all_fields=all_fields, error=error) + val_fields += [x for x in found if x not in val_fields] + + return val_fields + + +class Reports(mixins.Child): + """Pass.""" + + # FUTURE: + """ + get all users + for each device + find any users whose username matches last logged in user + device[users] = found_users + """ + + def missing_adapters(self, rows, adapters=None, fields=None): + """Pass.""" + adapters = adapters or self._parent.adapters.get() + fields = fields or self._parent.fields.get() + new_rows = [] + + for raw_row in rows: + # row = {k: v for k, v in raw_row.items() if "." in k or k in ["labels"]} + row = {k: v for k, v in raw_row.items()} + row["adapters"] = tools.strip_right( + obj=tools.listify(obj=raw_row.get("adapters", [])), fix="_adapter" + ) + row["missing_nocnx"] = [] + row["missing"] = [] + + for adapter in adapters: + # this row does not have data from this adapter + if adapter["name"] not in row["adapters"]: + # this adapter has no connections + if not adapter["cnx"]: + row["missing_nocnx"].append(adapter["name"]) + # this adapter has been fetched by other assets, but not this one + elif adapter["name"] in fields: + row["missing"].append(adapter["name"]) + + new_rows.append(row) + + return new_rows + + +class ParserFields(mixins.Parser): + """Pass.""" + + def _exists(self, item, source, desc): + """Pass.""" + if item in source: + msg = "{d} {i!r} already exists, duplicate??" + msg = msg.format(d=desc, i=item) + raise exceptions.ApiError(msg) + + def _generic(self): + """Pass.""" + fields = { + "all_data": { + "name": "specific_data.data", + "title": "All data subsets for generic adapter", + "type": "array", + "adapter_prefix": "specific_data", + }, + "all": { + "name": "specific_data", + "title": "All data for generic adapter", + "type": "array", + "adapter_prefix": "specific_data", + }, + } + + for field in self._raw["generic"]: + field["adapter_prefix"] = "specific_data" + field_name = tools.strip_left( + obj=field["name"], fix="specific_data.data" + ).strip(".") + self._exists(field_name, fields, "Generic field") + fields[field_name] = field + + return fields + + def _adapter(self, name, raw_fields): + short_name = tools.strip_right(obj=name, fix="_adapter") + + prefix = "adapters_data.{adapter_name}" + prefix = prefix.format(adapter_name=name) + + fields = { + "all": { + "name": prefix, + "title": "All data for {} adapter".format(prefix), + "type": "array", + "adapter_prefix": prefix, + }, + "raw": { + "name": "{}.raw".format(prefix), + "title": "All raw data for {} adapter".format(prefix), + "type": "array", + "adapter_prefix": prefix, + }, + } + + for field in raw_fields: + field["adapter_prefix"] = prefix + field_name = tools.strip_left(obj=field["name"], fix=prefix).strip(".") + self._exists(field_name, fields, "Adapter {} field".format(short_name)) + fields[field_name] = field + + return short_name, fields + + def parse(self): + """Pass.""" + ret = {} + ret["generic"] = self._generic() + + for name, raw_fields in self._raw["specific"].items(): + short_name, fields = self._adapter(name=name, raw_fields=raw_fields) + self._exists(short_name, ret, "Adapter {}".format(name)) + ret[short_name] = fields + + return ret diff --git a/axonius_api_client/api/utils.py b/axonius_api_client/api/utils.py deleted file mode 100644 index 9924b65f..00000000 --- a/axonius_api_client/api/utils.py +++ /dev/null @@ -1,185 +0,0 @@ -# -*- coding: utf-8 -*- -"""Axonius API Client package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import logging - -from . import exceptions -from .. import tools -from .. import constants - -LOG = logging.getLogger(__name__) - - -def find_adapter(name, known_names=None): - """Find an adapter by name. - - Args: - name (:obj:`str`): - Name of adapter to find. - known_names (:obj:`list` of :obj:`str`, optional): - List of known adapter names. - - Defaults to: None. - - Notes: - If known_names is None, this will just ensure name ends with '_adapter'. - - Raises: - :exc:`exceptions.UnknownAdapterName`: If name can not be found in known_names. - - Returns: - :obj:`str` - - """ - postfix = "_adapter" - name = name if name.endswith(postfix) else name + postfix - - if not known_names: - found = name - elif name in known_names: - found = known_names[known_names.index(name)] - else: - known_names = tools.rstrip(obj=known_names, postfix=postfix) - raise exceptions.UnknownAdapterName(name=name, known_names=known_names) - - msg = "Resolved adapter name {name!r} to {found!r}" - msg = msg.format(name=name, found=found) - LOG.debug(msg) - - return found - - -def find_field(name, adapter, fields=None): - """Find a field for a given adapter. - - Args: - name (:obj:`str`): - Name of field to find. - adapter (:obj:`str`): - Name of adapter to look for field in. - If 'generic' look for the field in generic fields. - fields (:obj:`dict`, optional): - Return from :meth:`axonius_api_client.api.models.UserDeviceBase.get_fields`. - - Defaults to: None. - - Notes: - If adapter 'generic', ensure name begins with - :attr:`axonius_api_client.constants.GENERIC_FIELD_PREFIX`, - otherwise ensure name begins with - :attr:`axonius_api_client.constants.ADAPTER_FIELD_PREFIX`. - - If fields is None, we can't validate that the field exists, so we just ensure - the name is fully qualified. - - If name in "all" or prefix, returns prefix. - - Raises: - :exc:`exceptions.UnknownFieldName`: - If fields is not None and name can not be found in fields. - - Returns: - :obj:`str` - - """ - if adapter == "generic": - prefix = constants.GENERIC_FIELD_PREFIX - container = fields["generic"] if fields else None - else: - known_adapters = list(fields["specific"].keys()) if fields else None - adapter = find_adapter(name=adapter, known_names=known_adapters) - prefix = constants.ADAPTER_FIELD_PREFIX.format(adapter_name=adapter) - container = fields["specific"][adapter] if fields else None - - if not name.startswith(prefix): - fq_name = ".".join([x for x in [prefix, name] if x]) - else: - fq_name = name - - found = None - - if not container: - found = name if name in ["adapters", "labels"] else fq_name - else: - known_names = [x["name"] for x in container] - - for check in [name, fq_name]: - if check in ["all", prefix]: - found = prefix - break - if check in known_names: - found = known_names[known_names.index(check)] - break - - if not found: - known_names = tools.lstrip(obj=known_names, prefix=prefix + ".") - known_names += ["all", prefix] - raise exceptions.UnknownFieldName( - name=name, known_names=known_names, adapter=adapter - ) - - msg = "Resolved {adapter!r} field name {name!r} to {found!r}" - msg = msg.format(adapter=adapter, name=name, found=found) - LOG.debug(msg) - - return found - - -def validate_fields(known_fields, **fields): - """Validate provided fields are valid. - - Args: - known_fields (:obj:`dict`): - Known fields from - :meth:`axonius_api_client.api.models.UserDeviceBase.get_fields`. - **fields: Fields to validate. - * generic=['f1', 'f2'] for generic fields. - * adapter=['f1', 'f2'] for adapter specific fields. - - Notes: - This will try to use known_fields to validate the device - fields, but if known_fields is empty it will just ensure the fields are - fully qualified. - - * generic=['field1'] => ['specific_data.data.field1'] - * adapter=['field1'] =>['adapters_data.adapter_name.field1'] - - Returns: - :obj:`list` of :obj:`str` - - """ - validated_fields = [] - - for name, afields in fields.items(): - if not isinstance(afields, (tuple, list)): - continue - for field in afields: - field = find_field(name=field, fields=known_fields, adapter=name) - if field not in validated_fields: - validated_fields.append(field) - - msg = "Resolved fields {fields} to {validated_fields}" - msg = msg.format(fields=fields, validated_fields=validated_fields) - LOG.debug(msg) - return validated_fields - - -def check_max_page_size(page_size): - """Check if page size is over :data:`axonius_api_client.constants.MAX_PAGE_SIZE`. - - Args: - page_size (:obj:`int`): - Page size to check. - - Raises: - :exc:`exceptions.ApiError` - - """ - if page_size > constants.MAX_PAGE_SIZE: - msg = "Page size {page_size} is over maximum page size {max_size}" - msg = msg.format(page_size=page_size, max_size=constants.MAX_PAGE_SIZE) - raise exceptions.ApiError(msg) diff --git a/axonius_api_client/auth/__init__.py b/axonius_api_client/auth.py similarity index 51% rename from axonius_api_client/auth/__init__.py rename to axonius_api_client/auth.py index e699cf75..d06e1f94 100644 --- a/axonius_api_client/auth/__init__.py +++ b/axonius_api_client/auth.py @@ -1,40 +1,80 @@ # -*- coding: utf-8 -*- -"""Axonius API Client package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +"""Axonius API authentication module.""" +from __future__ import absolute_import, division, print_function, unicode_literals -import logging +import abc -from . import models -from . import exceptions -from .. import api +import six -LOG = logging.getLogger(__name__) +from . import api, constants, exceptions, logs -class AuthMixins(object): - """Mixins for AuthBase.""" +@six.add_metaclass(abc.ABCMeta) +class Model(object): + """Abstract base class for all Authentication methods.""" + + @abc.abstractmethod + def login(self): + """Login to API.""" + raise NotImplementedError # pragma: no cover + + @abc.abstractmethod + def logout(self): + """Logout from API.""" + raise NotImplementedError # pragma: no cover + + @abc.abstractmethod + def check_login(self): + """Throw exc if not login. + + Raises: + :exc:`exceptions.NotLoggedIn` + + """ + raise NotImplementedError # pragma: no cover + + @abc.abstractproperty + def http(self): + """Get HttpClient object. + + Returns: + :obj:`axonius_api_client.http.Http` + + """ + raise NotImplementedError # pragma: no cover + + @abc.abstractproperty + def is_logged_in(self): + """Check if login has been called. + + Returns: + :obj:`bool` + + """ + raise NotImplementedError # pragma: no cover + + +class Mixins(object): + """Mixins for Model.""" _logged_in = False """:obj:`bool`: Attribute checked by :meth:`is_logged_in`.""" - def __init__(self, http_client, **creds): + def __init__(self, http, creds, **kwargs): """Constructor. Args: - http_client (:obj:`axonius_api_client.http.HttpClient`): + http (:obj:`axonius_api_client.http.Http`): HTTP client to use to send requests. creds: Credentials used by this Auth method. """ - self._log = LOG.getChild(self.__class__.__name__) - """:obj:`logging.Logger`: Logger for this object.""" + log_level = kwargs.get("log_level", constants.LOG_LEVEL_AUTH) + self._log = logs.get_obj_log(obj=self, level=log_level) - self._http_client = http_client - """:obj:`axonius_api_client.http.HttpClient`: HTTP Client.""" + self._http = http + """:obj:`axonius_api_client.http.Http`: HTTP Client.""" self._creds = creds """:obj:`dict`: Credential store.""" @@ -50,7 +90,7 @@ def __str__(self): """ bits = [ - "url={!r}".format(self.http_client.url), + "url={!r}".format(self.http.url), "is_logged_in={}".format(self.is_logged_in), ] bits = "({})".format(", ".join(bits)) @@ -66,14 +106,14 @@ def __repr__(self): return self.__str__() @property - def http_client(self): + def http(self): """Get HttpClient object. Returns: - :obj:`axonius_api_client.http.HttpClient` + :obj:`axonius_api_client.http.Http` """ - return self._http_client + return self._http def _check_http_lock(self): """Check HTTP client not already used by another Auth. @@ -82,19 +122,19 @@ def _check_http_lock(self): :exc:`exceptions.AuthError` """ - auth_lock = getattr(self.http_client, "_auth_lock", None) + auth_lock = getattr(self.http, "_auth_lock", None) if auth_lock: - msg = "{http_client} already being used by {auth}" - msg = msg.format(http_client=self.http_client, auth=auth_lock) + msg = "{http} already being used by {auth}" + msg = msg.format(http=self.http, auth=auth_lock) raise exceptions.AuthError(msg) def _set_http_lock(self): """Set HTTP Client auth lock.""" - self._http_client._auth_lock = self + self._http._auth_lock = self def _validate(self): """Validate credentials.""" - response = self.http_client(method="get", path=api.routers.ApiV1.devices.count) + response = self.http(method="get", path=api.routers.ApiV1.devices.count) try: response.raise_for_status() @@ -131,59 +171,14 @@ def is_logged_in(self): return self._logged_in -class AuthUser(AuthMixins, models.AuthBase): - """Authentication method using username & password.""" - - def __init__(self, http_client, username, password): - """Constructor. - - Args: - http_client (:obj:`axonius_api_client.http.HttpClient`): - HTTP client to use to send requests. - username (:obj:`str`): - Username to use in credentials. - password (:obj:`str`): - Password to use in credentials. - - """ - super(AuthUser, self).__init__( - http_client=http_client, username=username, password=password - ) - - @property - def _cred_fields(self): - return ["username", "password"] - - def _logout(self): - """Logout from API.""" - self._logged_in = False - self.http_client.session.auth = None - - def login(self): - """Login to API.""" - if self.is_logged_in: - raise exceptions.AlreadyLoggedIn(auth=self) - - self.http_client.session.auth = ( - self._creds["username"], - self._creds["password"], - ) - - self._validate() - - self._logged_in = True - msg = "Successfully logged in using {}".format(self._cred_fields) - self._log.debug(msg) - - -class AuthKey(AuthMixins, models.AuthBase): +class ApiKey(Mixins, Model): """Authentication method using API key & API secret.""" - def __init__(self, http_client, key, secret): + def __init__(self, http, key, secret, **kwargs): """Constructor. Args: - http_client (:obj:`axonius_api_client.http.HttpClient`): + http (:obj:`axonius_api_client.http.Http`): HTTP client to use to send requests. key (:obj:`str`): API key to use in credentials. @@ -191,7 +186,8 @@ def __init__(self, http_client, key, secret): API secret to use in credentials. """ - super(AuthKey, self).__init__(http_client=http_client, key=key, secret=secret) + creds = {"key": key, "secret": secret} + super(ApiKey, self).__init__(http=http, creds=creds, **kwargs) @property def _cred_fields(self): @@ -200,15 +196,15 @@ def _cred_fields(self): def _logout(self): """Logout from API.""" self._logged_in = False - self.http_client.session.headers = {} + self.http.session.headers = {} def login(self): """Login to API.""" if self.is_logged_in: raise exceptions.AlreadyLoggedIn(auth=self) - self.http_client.session.headers["api-key"] = self._creds["key"] - self.http_client.session.headers["api-secret"] = self._creds["secret"] + self.http.session.headers["api-key"] = self._creds["key"] + self.http.session.headers["api-secret"] = self._creds["secret"] self._validate() diff --git a/axonius_api_client/auth/exceptions.py b/axonius_api_client/auth/exceptions.py deleted file mode 100644 index 05792b99..00000000 --- a/axonius_api_client/auth/exceptions.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -"""Axonius API Client Authentication errors.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from .. import exceptions - - -class AuthError(exceptions.PackageError): - """Parent exception for all Authentication errors.""" - - -class InvalidCredentials(AuthError): - """Error on failed login.""" - - def __init__(self, auth, exc=None): - """Constructor. - - Args: - auth (:obj:`axonius_api_client.auth.models.AuthBase`): - Authentication method. - exc (:obj:`Exception`, optional): - Original Exception, if any. - - Defaults to: None. - - """ - self.auth = auth - """:obj:`axonius_api_client.auth.models.AuthBase`: Authentication method.""" - - self.exc = exc - """:obj:`Exception`: Original Exception, if any.""" - - msg = "Invalid credentials on {auth} -- exception: {exc}" - msg = msg.format(auth=auth, exc=exc) - super(InvalidCredentials, self).__init__(msg) - - -class NotLoggedIn(AuthError): - """Error when not logged in.""" - - def __init__(self, auth): - """Constructor. - - Args: - auth (:obj:`axonius_api_client.auth.models.AuthBase`): - Authentication method. - - """ - self.auth = auth - """:obj:`axonius_api_client.auth.models.AuthBase`: Authentication method.""" - - msg = "Must call login() on {auth}" - msg = msg.format(auth=auth) - super(NotLoggedIn, self).__init__(msg) - - -class AlreadyLoggedIn(AuthError): - """Error when already logged in.""" - - def __init__(self, auth): - """Constructor. - - Args: - auth (:obj:`axonius_api_client.auth.models.AuthBase`): - Authentication method. - - """ - self.auth = auth - """:obj:`axonius_api_client.auth.models.AuthBase`: Authentication method.""" - - msg = "Already logged in on {auth}" - msg = msg.format(auth=auth) - super(AlreadyLoggedIn, self).__init__(msg) diff --git a/axonius_api_client/auth/models.py b/axonius_api_client/auth/models.py deleted file mode 100644 index 874f9b0a..00000000 --- a/axonius_api_client/auth/models.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -"""Axonius API Client Authentication methods.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class AuthBase(object): - """Abstract base class for all Authentication methods.""" - - @abc.abstractmethod - def login(self): - """Login to API.""" - raise NotImplementedError # pragma: no cover - - @abc.abstractmethod - def logout(self): - """Logout from API.""" - raise NotImplementedError # pragma: no cover - - @abc.abstractmethod - def check_login(self): - """Throw exc if not login. - - Raises: - :exc:`exceptions.NotLoggedIn` - - """ - raise NotImplementedError # pragma: no cover - - @abc.abstractproperty - def http_client(self): - """Get HttpClient object. - - Returns: - :obj:`axonius_api_client.http.HttpClient` - - """ - raise NotImplementedError # pragma: no cover - - @abc.abstractproperty - def is_logged_in(self): - """Check if login has been called. - - Returns: - :obj:`bool` - - """ - raise NotImplementedError # pragma: no cover diff --git a/axonius_api_client/cli/__init__.py b/axonius_api_client/cli/__init__.py new file mode 100644 index 00000000..629744ab --- /dev/null +++ b/axonius_api_client/cli/__init__.py @@ -0,0 +1,214 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import constants, version +from . import cmd_shell, context, grp_adapters, grp_objects + + +# FUTURE: grp_enforcements +# FUTURE: wrap json datasets with objtype info +# FUTURE: --verbose/--no-verbose to silence echo_ok +# FUTURE: --warning/--no-warning to silence echo_warn +# FUTURE: way to only import cli stuffs so package doesnt see unless needed +# FUTURE: add cert_human logic +# FUTURE: prompt does not use CR when re-prompting on empty var with hide_input=False +# FUTURE: add doc links +@click.group() +@click.option( + "--log-level-package", + "-lpkg", + default=constants.LOG_LEVEL_PACKAGE, + help="Logging level to use for entire package.", + type=click.Choice(constants.LOG_LEVELS_STR), + show_envvar=True, + show_default=True, +) +@click.option( + "--log-level-http", + "-lhttp", + default=constants.LOG_LEVEL_HTTP, + help="Logging level to use for http client.", + type=click.Choice(constants.LOG_LEVELS_STR), + show_envvar=True, + show_default=True, +) +@click.option( + "--log-level-auth", + "-lauth", + default=constants.LOG_LEVEL_AUTH, + help="Logging level to use for auth client.", + type=click.Choice(constants.LOG_LEVELS_STR), + show_envvar=True, + show_default=True, +) +@click.option( + "--log-level-api", + "-lapi", + default=constants.LOG_LEVEL_API, + help="Logging level to use for api clients.", + type=click.Choice(constants.LOG_LEVELS_STR), + show_envvar=True, + show_default=True, +) +@click.option( + "--log-level-console", + "-lcon", + default=constants.LOG_LEVEL_CONSOLE, + help="Logging level to use for console output.", + type=click.Choice(constants.LOG_LEVELS_STR), + show_envvar=True, + show_default=True, +) +@click.option( + "--log-level-file", + "-lfile", + default=constants.LOG_LEVEL_FILE, + help="Logging level to use for file output.", + type=click.Choice(constants.LOG_LEVELS_STR), + show_envvar=True, + show_default=True, +) +@click.option( + "--log-console/--no-log-console", + "-con/-ncon", + default=False, + help="Enable logging to --log-console-output.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--log-file/--no-log-file", + "-file/-nfile", + default=False, + help="Enable logging to --log-file-name in --log-file-path.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--log-request-attrs/--no-log-request-attrs", + default=None, + help="Log http client verbose or brief request attributes (none by default).", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--log-response-attrs/--no-log-response-attrs", + default=None, + help="Log http client verbose or brief response attributes (none by default).", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--log-request-body/--no-log-request-body", + help="Log http client request body.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--log-response-body/--no-log-response-body", + help="Log http client response body.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--log-file-name", + "-file-name", + default=constants.LOG_FILE_NAME, + help="Send file logging to this file in --log-file-path.", + show_envvar=True, + show_default=True, +) +@click.option( + "--log-file-path", + "-file-path", + default=constants.LOG_FILE_PATH, + help="Send file logging to --log-file-name in this directory.", + show_envvar=True, + show_default=True, +) +@click.option( + "--log-file-max-mb", + "-file-mb", + default=constants.LOG_FILE_MAX_MB, + help="Rollover the log file when the size is this many MB.", + type=click.INT, + show_envvar=True, + show_default=True, +) +@click.option( + "--log-file-max-files", + "-file-mf", + default=constants.LOG_FILE_MAX_FILES, + help="Only keep this many rollover logs.", + type=click.INT, + show_envvar=True, + show_default=True, +) +@click.option( + "--proxy", + default="", + help="Proxy to use to connect to Axonius instance.", + metavar="PROXY", + show_envvar=True, + show_default=True, +) +@click.option( + "--certpath", + "-cp", + type=click.Path(exists=True, resolve_path=True), + help="Path to SSL certificate.", + metavar="PATH", + show_envvar=True, + show_default=True, +) +@click.option( + "--certverify/--no-certverify", + "-cv/-ncv", + default=False, + help="Perform SSL Certificate Verification.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--certwarn/--no-certwarn", + "-cw/-ncw", + default=True, + help="Show warning for self-signed SSL certificates.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--wraperror/--no-wraperror", + "-we/-nwe", + default=True, + help="Show an error string instead of the full exception.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.version_option(version.__version__) +@context.pass_context +@click.pass_context +def cli(click_ctx, ctx, **kwargs): + """Axonius API Client command line tool.""" + ctx._click_ctx = click_ctx + ctx._connect_args.update(kwargs) + context.load_dotenv() + return ctx + + +cli.add_command(cmd_shell.cmd) +cli.add_command(grp_objects.devices) +cli.add_command(grp_objects.users) +cli.add_command(grp_adapters.adapters) diff --git a/axonius_api_client/cli/cmd_shell.py b/axonius_api_client/cli/cmd_shell.py new file mode 100644 index 00000000..c9df71a9 --- /dev/null +++ b/axonius_api_client/cli/cmd_shell.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from . import context + + +@click.command("shell", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.pass_context +def cmd(ctx, url, key, secret): + """Start an interactive shell.""" + client = ctx.start_client(url=url, key=key, secret=secret, save_history=True) + + client._http.save_history = True + + devices = client.devices + users = client.users + adapters = client.adapters + enforcements = client.enforcements + + shellvars = {} + shellvars.update(globals()) + shellvars.update(locals()) + + context.spawn_shell(shellvars) diff --git a/axonius_api_client/cli/context.py b/axonius_api_client/cli/context.py new file mode 100644 index 00000000..660c4129 --- /dev/null +++ b/axonius_api_client/cli/context.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import atexit +import csv +import functools +import os +import readline +import rlcompleter +import sys +import warnings + +import click +import dotenv +import requests + +from .. import connect, tools +from ..tools import json_reload as jdump + +HISTPATH = os.path.expanduser("~") +HISTFILE = ".python_history" +CONTEXT_SETTINGS = {"auto_envvar_prefix": "AX"} +REASON = "Export format {ef!r} is unsupported! Must be one of: {sf}" +QUOTING = csv.QUOTE_NONNUMERIC + +KV_TMPL = "{}: {}".format + +MAX_LEN = 30000 +MAX_STR = "...TRIMMED - {} items over max cell length {}".format + + +def load_dotenv(): + """Pass.""" + ax_env = os.environ.get("AX_ENV", "") + + if ax_env: + path = tools.path(obj=ax_env) + else: + path = tools.path(obj=os.getcwd()) / ".env" + + dotenv.load_dotenv(format(path)) + + +def connect_options(func): + """Combine commonly appearing @click.option decorators.""" + # + @click.option( + "--url", + "-u", + required=True, + help="URL of Axonius instance.", + metavar="URL", + prompt="URL of Axonius instance", + show_envvar=True, + ) + @click.option( + "--key", + "-k", + required=True, + help="API Key of user in Axonius instance.", + metavar="KEY", + prompt="API Key of user in Axonius instance", + hide_input=True, + show_envvar=True, + ) + @click.option( + "--secret", + "-s", + required=True, + help="API Secret of user in Axonius instance.", + metavar="SECRET", + prompt="API Secret of user in Axonius instance", + hide_input=True, + show_envvar=True, + ) + @functools.wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + +def export_options(func): + """Combine commonly appearing @click.option decorators.""" + # + # FUTURE: error if os.path.sep in value + @click.option( + "--export-file", + "-xf", + default="", + help="Export to a file in export-path instead of printing to STDOUT.", + show_envvar=True, + show_default=True, + ) + @click.option( + "--export-path", + "-xp", + default=format(tools.path(obj=os.getcwd())), + help="Path to create --export-file in.", + type=click.Path(exists=False, resolve_path=True), + show_envvar=True, + show_default=True, + ) + @click.option( + "--export-format", + "-xt", + default="json", + help="Format to use for STDOUT or export-file.", + type=click.Choice(["csv", "json"]), + show_envvar=True, + show_default=True, + ) + @click.option( + "--export-overwrite/--no-export-overwrite", + "-xo/-nxo", + default=False, + help="Overwrite export-file if exists.", + is_flag=True, + show_envvar=True, + show_default=True, + ) + @functools.wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + +class SplitEquals(click.ParamType): + """Pass.""" + + name = "split_equals" + + def convert(self, value, param, ctx): + """Pass.""" + split = value.split("=", 1) + + if len(split) != 2: + msg = "Need an '=' in --{p} with value {v!r}" + msg = msg.format(p=param.name, v=value) + self.fail(msg, param, ctx) + + return [x.strip() for x in split] + + +class exc_wrap(object): + """Pass.""" + + def __init__(self, wraperror=True): + """Pass.""" + self.wraperror = wraperror + + def __enter__(self): + """Pass.""" + return self + + def __exit__(self, exc, value, traceback): + """Pass.""" + if value and self.wraperror and not isinstance(value, SystemExit): + msg = "WRAPPED EXCEPTION: {c.__module__}.{c.__name__}\n{v}" + msg = msg.format(c=value.__class__, v=value) + Context.echo_error(msg) + + +def json_from_stream(ctx, stream, src): + """Pass.""" + stream_name = getattr(stream, "name", format(stream)) + if stream.isatty(): + # its STDIN with no input + msg = "No input provided on {s} for {src}" + msg = msg.format(s=stream_name, src=src) + ctx.echo_error(msg) + + # its STDIN with input or a file + content = stream.read() + msg = "Read {n} bytes from {s} for {src}" + msg = msg.format(n=len(content), s=stream_name, src=src) + ctx.echo_ok(msg) + + content = content.strip() + + if not content: + msg = "Empty content supplied in {s} for {src}" + msg = msg.format(s=stream_name, src=src) + ctx.echo_error(msg) + + with exc_wrap(wraperror=ctx.wraperror): + content = tools.json_load(obj=content) + + msg = "Loaded JSON content from {src} as {t} with length of {n}" + msg = msg.format(t=type(content).__name__, src=src, n=len(content)) + ctx.echo_ok(msg) + + return content + + +def dictwriter(rows, stream=None, headers=None, quoting=QUOTING, **kwargs): + """Pass.""" + fh = stream or tools.six.StringIO() + + headers = headers or [] + + if not headers: + for row in rows: + headers += [k for k in row if k not in headers] + + writer = csv.DictWriter(fh, fieldnames=headers, quoting=quoting, **kwargs) + + writer.writeheader() + + for row in rows: + writer.writerow(row) + + return fh.getvalue() + + +def to_json(ctx, raw_data, **kwargs): + """Pass.""" + return tools.json_dump(obj=raw_data) + + +def is_simple(o): + """Is simple.""" + return isinstance(o, tools.SIMPLE) or o is None + + +def is_list(o): + """Is simple.""" + return isinstance(o, tools.LIST) + + +def is_los(o): + """Is simple or list of simples.""" + return is_simple(o) or (is_list(o) and all([is_simple(x) for x in o])) + + +def is_dos(o): + """Is dict with simple or list of simple values.""" + return isinstance(o, dict) and all([is_los(v) for v in o.values()]) + + +def obj_to_csv(ctx, raw_data, **kwargs): + """Pass.""" + raw_data = tools.listify(obj=raw_data, dictkeys=False) + rows = [] + + for raw_row in raw_data: + row = {} + rows.append(row) + for raw_key, raw_value in raw_row.items(): + + if is_los(raw_value): + row[raw_key] = join_cr(raw_value, is_cell=True) + continue + + if is_list(raw_value) and all([is_dos(x) for x in raw_value]): + values = {} + + for raw_item in raw_value: + for k, v in raw_item.items(): + new_key = "{}.{}".format(raw_key, k) + + values[new_key] = values.get(new_key, []) + + values[new_key] += tools.listify(v, dictkeys=False) + + for k, v in values.items(): + row[k] = join_cr(v, is_cell=True) + + continue + + msg = "Data of type {t} is too complex for CSV format" + msg = msg.format(t=type(raw_value).__name__) + row[raw_key] = msg + + return dictwriter(rows=rows) + + +def check_empty( + ctx, this_data, prev_data, value_type, value, objtype, known_cb, known_cb_key +): + """Pass.""" + if value in tools.EMPTY: + return + + value = tools.join_comma(obj=value, empty=False) + if not this_data: + msg = "Valid {objtype}:{valid}\n" + msg = msg.format( + valid=tools.join_cr(known_cb(**{known_cb_key: prev_data})), objtype=objtype + ) + ctx.echo_error(msg, abort=False) + + msg = "No {objtype} found when searching by {value_type}: {value}" + msg = msg.format(objtype=objtype, value_type=value_type, value=value) + ctx.echo_error(msg) + + msg = "Found {cnt} {objtype} by {value_type}: {value}" + msg = msg.format( + objtype=objtype, cnt=len(this_data), value_type=value_type, value=value + ) + ctx.echo_ok(msg) + + +def join_kv(obj, indent=" "): + """Pass.""" + items = [KV_TMPL(k, v) for k, v in obj.items()] + return tools.join_cr(obj=items, indent=indent) + + +def join_tv(obj): + """Pass.""" + items = [KV_TMPL(v["title"], v["value"]) for k, v in obj.items()] + return join_cr(items) + + +def join_cr(obj, is_cell=False): + """Pass.""" + stro = tools.join_cr(obj=obj, pre=False, post=False, indent="") + + if is_cell and len(stro) >= MAX_LEN: + stro = tools.join_cr([stro[:MAX_LEN], MAX_STR(len(obj), MAX_LEN)]) + + return stro + + +def write_hist_file(): + """Pass.""" + histpath = tools.pathlib.Path(HISTPATH) + histfile = histpath / HISTFILE + + histpath.mkdir(mode=0o700, exist_ok=True) + histfile.touch(mode=0o600, exist_ok=True) + + readline.write_history_file(format(histfile)) + + +def register_readline(shellvars=None): + """Pass.""" + shellvars = shellvars or {} + + histpath = tools.pathlib.Path(HISTPATH) + histfile = histpath / HISTFILE + + histpath.mkdir(mode=0o700, exist_ok=True) + histfile.touch(mode=0o600, exist_ok=True) + + try: + readline.read_history_file(format(histfile)) + atexit.register(write_hist_file) + + readline.set_completer(rlcompleter.Completer(shellvars).complete) + + readline_doc = getattr(readline, "__doc__", "") + is_libedit = readline_doc and "libedit" in readline_doc + + pab = "bind ^I rl_complete" if is_libedit else "tab: complete" + readline.parse_and_bind(pab) + + except Exception as exc: + msg = "Unable to register history and autocomplete:\n{}".format(exc) + Context.echo_error(msg, abort=False) + + +def spawn_shell(shellvars=None): + """Pass.""" + import code + + shellvars = shellvars or {} + shellvars.setdefault("jdump", jdump) + register_readline(shellvars) + + code.interact(local=shellvars) + + +class Context(object): + """Pass.""" + + OK_ARGS = {"fg": "green", "bold": True, "err": True} + OK_TMPL = "** {msg}" + + WARN_ARGS = {"fg": "yellow", "bold": True, "err": True} + WARN_TMPL = "** WARNING: {msg}" + + ERROR_ARGS = {"fg": "red", "bold": True, "err": True} + ERROR_TMPL = "** ERROR: {msg}" + + SSLWARN_MSG = ( + "Unverified HTTPS request! Set AX_CERT environment variable or " + "--cert option to the path of a CA bundle!" + ) + SSLWARN_CLS = requests.urllib3.exceptions.InsecureRequestWarning + + def __init__(self): + """Pass.""" + self.obj = None + self._click_ctx = None + self._connect_args = {} + self._export_args = {} + + def __str__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return format(self.obj) + + def __repr__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return self.__str__() + + def export(self, data, export_file=None, export_path=None, export_overwrite=False): + """Pass.""" + if not export_file: + click.echo(data) + return + + export_path = export_path or os.getcwd() + + path = tools.path(obj=export_path) + path.mkdir(mode=0o700, parents=True, exist_ok=True) + + full_path = path / export_file + + mode = "overwritten" if full_path.exists() else "created" + + if full_path.exists() and not export_overwrite: + msg = "Export file {p} already exists and export-overwite is False!" + msg = msg.format(p=full_path) + self.echo_error(msg=msg) + + full_path.touch(mode=0o600) + + with full_path.open(mode="w", newline="") as fh: + fh.write(data) + + msg = "Exported file {p!r} {mode}!" + msg = msg.format(p=format(full_path), mode=mode) + self.echo_ok(msg) + + @classmethod + def echo_ok(cls, msg): + """Pass.""" + click.secho(cls.OK_TMPL.format(msg=msg), **cls.OK_ARGS) + + @classmethod + def echo_error(cls, msg, abort=True): + """Pass.""" + click.secho(cls.ERROR_TMPL.format(msg=msg), **cls.ERROR_ARGS) + if abort: + sys.exit(1) + + @classmethod + def echo_warn(cls, msg): + """Pass.""" + click.secho(cls.WARN_TMPL.format(msg=msg), **cls.WARN_ARGS) + + @property + def wraperror(self): + """Pass.""" + return self._connect_args.get("wraperror", True) + + def start_client(self, url, key, secret, **kwargs): + """Pass.""" + if not getattr(self, "obj", None): + connect_args = {} + connect_args.update(self._connect_args) + connect_args.update(kwargs) + connect_args["url"] = url + connect_args["key"] = key + connect_args["secret"] = secret + + with exc_wrap(wraperror=self.wraperror): + self.obj = connect.Connect(**connect_args) + + with warnings.catch_warnings(record=True) as caught_warnings: + self.obj.start() + + for caught_warning in caught_warnings: + wmsg = caught_warning.message + is_ssl = isinstance(wmsg, self.SSLWARN_CLS) + wmsg = self.SSLWARN_MSG if is_ssl else wmsg + wmsg = format(wmsg) + self.echo_warn(wmsg) + + # warnings suck. + warnings.simplefilter("ignore", self.SSLWARN_CLS) + + self.echo_ok(msg=self.obj) + + return self.obj + + def handle_export( + self, + raw_data, + formatters, + export_format, + export_file, + export_path, + export_overwrite, + ctx=None, + reason=REASON, + **kwargs, + ): + """Pass.""" + if export_format not in formatters: + self.echo_error(msg=reason.format(ef=export_format, sf=list(formatters))) + + with exc_wrap(wraperror=self.wraperror): + data = formatters[export_format]( + ctx=ctx or self, raw_data=raw_data, **kwargs + ) + + self.export( + data=data, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) + + +pass_context = click.make_pass_decorator(Context, ensure=True) diff --git a/axonius_api_client/cli/grp_adapters/__init__.py b/axonius_api_client/cli/grp_adapters/__init__.py new file mode 100644 index 00000000..bf00e729 --- /dev/null +++ b/axonius_api_client/cli/grp_adapters/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context, grp_cnx +from . import cmd_get + + +@click.group() +@context.pass_context +def adapters(ctx): + """Work with adapter connections.""" + return ctx + + +adapters.add_command(cmd_get.cmd) +adapters.add_command(grp_cnx.cnx) diff --git a/axonius_api_client/cli/grp_adapters/cmd_get.py b/axonius_api_client/cli/grp_adapters/cmd_get.py new file mode 100644 index 00000000..1149da65 --- /dev/null +++ b/axonius_api_client/cli/grp_adapters/cmd_get.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("get", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--name", + "-n", + help="Only include adapters with matching names.", + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--node", + "-no", + help="Only include adapters with matching node names.", + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--cnx-working/--no-cnx-working", + "-cw/-ncw", + help="Include/Exclude adapters with working connections.", + is_flag=True, + default=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--cnx-broken/--no-cnx-broken", + "-cb/-ncb", + help="Include/Exclude adapters with broken connections.", + is_flag=True, + default=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--cnx-none/--no-cnx-none", + "-cn/-ncn", + help="Include/Exclude adapters with no connections.", + default=True, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--cnx-count", + "-c", + help="Only include adapters with this number of connections.", + type=click.INT, + show_envvar=True, + show_default=True, +) +@click.option( + "--include-settings/--no-include-settings", + "-is/-nis", + help="Include connection, adapter, and advanced settings in CSV export.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +def cmd( + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + name, + node, + cnx_working, + cnx_broken, + cnx_none, + cnx_count, + include_settings, +): + """Get all adapters with clients that have errors.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + statuses = [] + + if cnx_working: + statuses.append(True) + + if cnx_broken: + statuses.append(False) + + if cnx_none: + statuses.append(None) + + with context.exc_wrap(wraperror=ctx.wraperror): + all_adapters = client.adapters.get() + + by_nodes = client.adapters.filter_by_nodes(adapters=all_adapters, value=node) + context.check_empty( + ctx=ctx, + this_data=by_nodes, + prev_data=all_adapters, + value_type="node names", + value=node, + objtype="adapters", + known_cb=ctx.obj.adapters.get_known, + known_cb_key="adapters", + ) + + by_names = client.adapters.filter_by_names(adapters=by_nodes, value=name) + context.check_empty( + ctx=ctx, + this_data=by_names, + prev_data=by_nodes, + value_type="names", + value=name, + objtype="adapters", + known_cb=ctx.obj.adapters.get_known, + known_cb_key="adapters", + ) + + by_statuses = client.adapters.filter_by_status( + adapters=by_names, value=statuses + ) + context.check_empty( + ctx=ctx, + this_data=by_statuses, + prev_data=by_names, + value_type="statuses", + value=statuses, + objtype="adapters", + known_cb=ctx.obj.adapters.get_known, + known_cb_key="adapters", + ) + + by_cnx_count = client.adapters.filter_by_cnx_count( + adapters=by_names, value=cnx_count + ) + context.check_empty( + ctx=ctx, + this_data=by_cnx_count, + prev_data=by_statuses, + value_type="connection count", + value=cnx_count, + objtype="adapters", + known_cb=ctx.obj.adapters.get_known, + known_cb_key="adapters", + ) + + formatters = {"json": context.to_json, "csv": to_csv} + ctx.handle_export( + raw_data=by_cnx_count, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + include_settings=include_settings, + ) + + +def to_csv(ctx, raw_data, include_settings=True, **kwargs): + """Pass.""" + rows = [] + + simples = [ + "name", + "node_name", + "node_id", + "status_raw", + "cnx_count", + "cnx_count_ok", + "cnx_count_bad", + ] + + cnx_tmpl = "cnx{idx}_{t}".format + + for adapter in raw_data: + row = {k: adapter[k] for k in simples} + + for idx, cnx in enumerate(adapter["cnx"]): + status = [ + "status: {}".format(cnx["status_raw"]), + "error: {}".format(cnx["error"]), + ] + + row[cnx_tmpl(idx=idx, t="id")] = cnx["id"] + row[cnx_tmpl(idx=idx, t="status")] = context.join_cr(status) + + if include_settings: + row[cnx_tmpl(idx=idx, t="settings")] = context.join_tv(cnx["config"]) + + if include_settings: + row["adapter_settings"] = context.join_tv(adapter["settings"]) + row["advanced_settings"] = context.join_tv(adapter["adv_settings"]) + + rows.append(row) + + return context.dictwriter(rows=rows) diff --git a/axonius_api_client/cli/grp_cnx/__init__.py b/axonius_api_client/cli/grp_cnx/__init__.py new file mode 100644 index 00000000..e794d767 --- /dev/null +++ b/axonius_api_client/cli/grp_cnx/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import cmd_add, cmd_check, cmd_delete, cmd_discover, cmd_get + + +@click.group() +@context.pass_context +def cnx(ctx): + """Work with adapter connections.""" + return ctx + + +cnx.add_command(cmd_get.cmd) +cnx.add_command(cmd_add.cmd) +cnx.add_command(cmd_delete.cmd) +cnx.add_command(cmd_check.cmd) +cnx.add_command(cmd_discover.cmd) diff --git a/axonius_api_client/cli/grp_cnx/cmd_add.py b/axonius_api_client/cli/grp_cnx/cmd_add.py new file mode 100644 index 00000000..5f555b8d --- /dev/null +++ b/axonius_api_client/cli/grp_cnx/cmd_add.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + +HIDDEN = ["secret", "key", "password"] + + +@click.command("add", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--adapter", + "-a", + help="The name of the adapter to add the connection to.", + required=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--node", + "-n", + help="The name of the node running --adapter to add the connection to.", + default="master", + show_envvar=True, + show_default=True, +) +@click.option( + "--config", + "-c", + help="Configuration keys in the form of key=value.", + type=context.SplitEquals(), + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--skip", + "-s", + "skips", + help="Configuration keys to not prompt for.", + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--hidden", + "hiddens", + help="List of configuration items to hide input when prompting.", + default=HIDDEN, + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--prompt-opt/--no-prompt-opt", + "-po/-npo", + help="Prompt for optional items that are not supplied.", + is_flag=True, + default=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--include-settings/--no-include-settings", + "-is/-nis", + help="Include connection settings in CSV export.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--error/--no-error", + "-e/-ne", + help="Return the new connection object even if an error happens.", + default=True, + is_flag=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +def cmd( + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + adapter, + node, + config, + skips, + hiddens, + prompt_opt, + error, + include_settings, +): + """Get all adapters with clients that have errors.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + with context.exc_wrap(wraperror=ctx.wraperror): + adapter = client.adapters.get_single(adapter=adapter, node=node) + + config = dict(config) + skips = [x.lower().strip() for x in skips] + hiddens = [x.lower().strip() for x in hiddens] + + schemas = adapter["cnx_settings"].values() + schemas = sorted(schemas, key=lambda x: x["required"], reverse=True) + + for schema in schemas: + try: + config[schema["name"]] = grp_common.handle_schema( + config=config, + schema=schema, + hiddens=hiddens, + prompt_opt=prompt_opt, + skips=skips, + ) + except grp_common.SkipItem: + continue + + with context.exc_wrap(wraperror=ctx.wraperror): + cnx = client.adapters.cnx.add(adapter=adapter, config=config, error=error) + + grp_common.handle_response(cnx=cnx, action="adding") + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=cnx, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + include_settings=include_settings, + ) diff --git a/axonius_api_client/cli/grp_cnx/cmd_check.py b/axonius_api_client/cli/grp_cnx/cmd_check.py new file mode 100644 index 00000000..e5eb758e --- /dev/null +++ b/axonius_api_client/cli/grp_cnx/cmd_check.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from ... import tools +from .. import context +from . import grp_common + + +@click.command("check", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--rows", + "-r", + help="The output from 'cnx get' supplied as a file or via stdin.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@click.option( + "--error/--no-error", + "-e/-ne", + help="Stop checking connections on error.", + default=True, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--include-settings/--no-include-settings", + "-is/-nis", + help="Include connection settings in CSV export.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +def cmd( + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + rows, + error, + include_settings, +): + """Get all adapters with clients that have errors.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + + cnxs = tools.listify(obj=content, dictkeys=False) + + msg = "Loaded {nc} connections from --rows" + msg = msg.format(nc=len(cnxs)) + ctx.echo_ok(msg) + + processed = [] + + with context.exc_wrap(wraperror=ctx.wraperror): + for cnx in cnxs: + if "cnx" in cnx: + cnx = cnx["cnx"] + + raw_data = client.adapters.cnx.check(cnx=cnx, error=error) + grp_common.handle_response(cnx=raw_data, action="checking") + processed.append(raw_data) + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=processed, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + include_settings=include_settings, + ) diff --git a/axonius_api_client/cli/grp_cnx/cmd_delete.py b/axonius_api_client/cli/grp_cnx/cmd_delete.py new file mode 100644 index 00000000..5fdd8117 --- /dev/null +++ b/axonius_api_client/cli/grp_cnx/cmd_delete.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from ... import tools +from .. import context +from . import grp_common + + +@click.command("delete", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--rows", + "-r", + help="The output from 'cnx get' supplied as a file or via stdin.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@click.option( + "--delete-entities/--no-delete-entities", + "-de/-nde", + help="Delete information for this connection from associated assets.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--wait", + "-w", + help="Wait this many seconds before deleting", + default=30, + type=click.INT, + show_envvar=True, + show_default=True, +) +@click.option( + "--error/--no-error", + "-e/-ne", + help="Stop deleting connections on error.", + default=True, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--force/--no-force", + "-f/-nf", + help="Actually delete the connections.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--include-settings/--no-include-settings", + "-is/-nis", + help="Include connection settings in CSV export.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +def cmd( + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + rows, + delete_entities, + wait, + error, + force, + include_settings, +): + """Get all adapters with clients that have errors.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + cnxs = tools.listify(obj=content, dictkeys=False) + + msg = "Loaded {nc} connections from --rows" + msg = msg.format(nc=len(cnxs)) + ctx.echo_ok(msg) + + processed = [] + + with context.exc_wrap(wraperror=ctx.wraperror): + for cnx in cnxs: + if "cnx" in cnx: + cnx = cnx["cnx"] + + info_keys = ["adapter_name", "node_name", "id", "uuid", "status", "error"] + cnxinfo = {k: cnx[k] for k in info_keys} + + msg = "In {s} second will delete connection: {ci}" + msg = msg.format(s=wait, ci=context.join_kv(obj=cnxinfo, indent=" " * 4)) + ctx.echo_warn(msg) + + raw_data = client.adapters.cnx.delete( + cnx=cnx, + delete_entities=delete_entities, + force=force, + error=error, + sleep=wait, + warning=False, + ) + + grp_common.handle_response(cnx=raw_data, action="deleting") + + processed.append(raw_data) + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=processed, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + include_settings=include_settings, + ) diff --git a/axonius_api_client/cli/grp_cnx/cmd_discover.py b/axonius_api_client/cli/grp_cnx/cmd_discover.py new file mode 100644 index 00000000..821f6142 --- /dev/null +++ b/axonius_api_client/cli/grp_cnx/cmd_discover.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from ... import tools +from .. import context +from . import grp_common + + +@click.command("discover", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--rows", + "-r", + help="The output from 'cnx get' supplied as a file or via stdin.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@click.option( + "--error/--no-error", + "-e/-ne", + help="Stop discovering connections on error.", + default=True, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--include-settings/--no-include-settings", + "-is/-nis", + help="Include connection settings in CSV export.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +def cmd( + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + rows, + error, + include_settings, +): + """Get all adapters with clients that have errors.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + + cnxs = tools.listify(obj=content, dictkeys=False) + + msg = "Loaded {nc} connections from --rows" + msg = msg.format(nc=len(cnxs)) + ctx.echo_ok(msg) + + processed = [] + + with context.exc_wrap(wraperror=ctx.wraperror): + for cnx in cnxs: + if "cnx" in cnx: + cnx = cnx["cnx"] + + raw_data = client.adapters.cnx.update(cnx=cnx, error=error) + grp_common.handle_response(cnx=raw_data, action="discovering") + processed.append(raw_data) + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=processed, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + include_settings=include_settings, + ) diff --git a/axonius_api_client/cli/grp_cnx/cmd_get.py b/axonius_api_client/cli/grp_cnx/cmd_get.py new file mode 100644 index 00000000..916fc673 --- /dev/null +++ b/axonius_api_client/cli/grp_cnx/cmd_get.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--rows", + "-r", + help="The output from 'adapters get' supplied as a file or via stdin.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@click.option( + "--id", + "-i", + "ids", + help="Only include connections with matching IDs.", + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--working/--no-working", + "-w/-nw", + help="Include connections that are working.", + default=True, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--broken/--no-broken", + "-b/-nb", + help="Include connections that are broken.", + default=True, + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--include-settings/--no-include-settings", + "-is/-nis", + help="Include connection settings in CSV export.", + default=False, + is_flag=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +def cmd( + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + rows, + ids, + working, + broken, + include_settings, +): + """Get all adapters with clients that have errors.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + + cnxs = [] + for adapter in content: + if "cnx" not in adapter: + msg = "No 'cnx' key found in adapter with keys: {k}" + msg = msg.format(k=list(adapter)) + ctx.echo_error(msg) + cnxs += adapter["cnx"] + + msg = "Loaded {nc} connections from {na} adapters" + msg = msg.format(nc=len(cnxs), na=len(content)) + ctx.echo_ok(msg) + + statuses = [] + + if working: + statuses.append(True) + + if broken: + statuses.append(False) + + with context.exc_wrap(wraperror=ctx.wraperror): + by_statuses = client.adapters.cnx.filter_by_status(cnxs=cnxs, value=statuses) + context.check_empty( + ctx=ctx, + this_data=by_statuses, + prev_data=cnxs, + value_type="connection statuses", + value=statuses, + objtype="connections", + known_cb=ctx.obj.adapters.cnx.get_known, + known_cb_key="cnxs", + ) + + by_ids = client.adapters.cnx.filter_by_ids(cnxs=by_statuses, value=ids) + context.check_empty( + ctx=ctx, + this_data=by_ids, + prev_data=by_statuses, + value_type="connection ids", + value=ids, + objtype="connections", + known_cb=ctx.obj.adapters.cnx.get_known, + known_cb_key="cnxs", + ) + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=by_ids, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + include_settings=include_settings, + ) diff --git a/axonius_api_client/cli/grp_cnx/grp_common.py b/axonius_api_client/cli/grp_cnx/grp_common.py new file mode 100644 index 00000000..981dd0f6 --- /dev/null +++ b/axonius_api_client/cli/grp_cnx/grp_common.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import re + +import click + +from ... import tools +from .. import context + + +def to_csv(ctx, raw_data, include_settings=True, **kwargs): + """Pass.""" + rows = [] + + simples = ["adapter_name", "node_name", "id", "uuid", "status_raw", "error"] + + for cnx in raw_data: + if "cnx" in cnx: + cnx = cnx["cnx"] + + row = {k: cnx[k] for k in simples} + if include_settings: + row["settings"] = context.join_tv(cnx["config"]) + + rows.append(row) + + return context.dictwriter(rows=rows) + + +def handle_schema(config, schema, hiddens, prompt_opt, skips): + """Pass.""" + name = schema["name"] + required = schema["required"] + default = schema.get("default", None) + + schema["hide_input"] = hide_input = any([re.search(x, name) for x in hiddens]) + + smsg = "\n{s}\n Configuration item schema:{it}" + smsg = smsg.format(s="*" * 40, it=context.join_kv(obj=schema, indent=" " * 4)) + click.secho(message=smsg, fg="blue", err=True) + + if config.get(name): + hasmsg = "\nSkipping item, was provided via '--config {}=...'\n" + hasmsg = hasmsg.format(name) + click.secho(message=hasmsg, fg="cyan", err=True) + raise SkipItem() + + if not required and any([re.search(x, name) for x in skips]): + skipmsg = "\nSkipping item, was provided via '--skip {}'\n" + skipmsg = skipmsg.format(name) + click.secho(message=skipmsg, fg="cyan", err=True) + raise SkipItem() + + if not required and not prompt_opt: + skipmsg = "\nSkipping optional item due to --no-prompt-opt\n" + skipmsg = skipmsg.format(name) + click.secho(message=skipmsg, fg="cyan", err=True) + raise SkipItem() + + ptype = determine_type(schema=schema) + ptext = "\nProvide value for item" + ptext = click.style(text=ptext, fg="bright_blue") + + value = click.prompt( + text=ptext, + default=default, + hide_input=hide_input, + type=ptype, + err=True, + show_default=True, + show_choices=True, + ) + + return value + + +TYPE_MAP = { + "bool": click.BOOL, + "integer": click.INT, + "number": click.INT, + "file": click.Path( + exists=True, + file_okay=True, + dir_okay=False, + writable=False, + readable=True, + resolve_path=True, + allow_dash=False, + path_type=None, + ), +} + + +def determine_type(schema): + """Pass.""" + type_str = schema["type"] + enum = schema.get("enum", []) + has_enum = "enum" in schema + + ptype = None + + if type_str == "string" and has_enum and isinstance(enum, tools.LIST) and enum: + ptype = click.Choice(choices=enum, case_sensitive=True) + elif type_str in TYPE_MAP: + ptype = TYPE_MAP[type_str] + + return ptype + + +class SkipItem(Exception): + """Pass.""" + + +def handle_response(cnx, action): + """Pass.""" + had_error = cnx["response_had_error"] + response = cnx["response"] + + info_keys = ["adapter_name", "node_name", "id", "uuid", "status", "error"] + cnxinfo = {k: cnx["cnx"][k] for k in info_keys} + + color = "red" if had_error else "green" + + msg = [ + "", + "Finished {a} connection", + "Had error: {he}", + "response:\n{r}", + "connection:{ci}\n", + ] + msg = tools.join_cr(obj=msg, pre=False, indent="").format( + a=action, + he=had_error, + r=tools.json_dump(response), + ci=context.join_kv(obj=cnxinfo, indent=" " * 2), + ) + click.secho(message=msg, fg=color, err=True) diff --git a/axonius_api_client/cli/grp_labels/__init__.py b/axonius_api_client/cli/grp_labels/__init__.py new file mode 100644 index 00000000..c58f909c --- /dev/null +++ b/axonius_api_client/cli/grp_labels/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import cmd_add, cmd_get, cmd_remove + + +@click.group() +@context.pass_context +def labels(ctx): + """Work with device assets.""" + return ctx + + +labels.add_command(cmd_get.cmd) +labels.add_command(cmd_add.cmd) +labels.add_command(cmd_remove.cmd) diff --git a/axonius_api_client/cli/grp_labels/cmd_add.py b/axonius_api_client/cli/grp_labels/cmd_add.py new file mode 100644 index 00000000..94d3390b --- /dev/null +++ b/axonius_api_client/cli/grp_labels/cmd_add.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("add", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--rows", + "-r", + help="The JSON data of rows returned by any get command for this object type.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@click.option( + "--label", + "-l", + help="Labels to add to rows.", + required=True, + multiple=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + rows, + label, +): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + + api = getattr(client, clickctx.parent.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.labels.add(rows=content, labels=label) + + formatters = {"json": context.to_json} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_labels/cmd_get.py b/axonius_api_client/cli/grp_labels/cmd_get.py new file mode 100644 index 00000000..4b556f11 --- /dev/null +++ b/axonius_api_client/cli/grp_labels/cmd_get.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("get", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, +): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.labels.get() + + formatters = {"json": context.to_json} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_labels/cmd_remove.py b/axonius_api_client/cli/grp_labels/cmd_remove.py new file mode 100644 index 00000000..1d1e9df2 --- /dev/null +++ b/axonius_api_client/cli/grp_labels/cmd_remove.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("remove", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--rows", + "-r", + help="The JSON data of rows returned by any get command for this object type.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@click.option( + "--label", + "-l", + help="Labels to add to rows.", + required=True, + multiple=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + rows, + label, +): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + + api = getattr(client, clickctx.parent.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.labels.remove(rows=content, labels=label) + + formatters = {"json": context.to_json} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_objects/__init__.py b/axonius_api_client/cli/grp_objects/__init__.py new file mode 100644 index 00000000..a5069f9a --- /dev/null +++ b/axonius_api_client/cli/grp_objects/__init__.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context, grp_labels, grp_reports, grp_saved_query +from . import ( + cmd_count, + cmd_count_by_saved_query, + cmd_fields, + cmd_get, + cmd_get_by_hostname, + cmd_get_by_ip, + cmd_get_by_mac, + cmd_get_by_mail, + cmd_get_by_saved_query, + cmd_get_by_subnet, + cmd_get_by_username, +) + + +@click.group() +@context.pass_context +def devices(ctx): + """Work with device assets.""" + return ctx + + +@click.group() +@context.pass_context +def users(ctx): + """Work with user assets.""" + return ctx + + +users.add_command(cmd_get.cmd) +users.add_command(cmd_fields.cmd) +users.add_command(cmd_count.cmd) +users.add_command(cmd_count_by_saved_query.cmd) +users.add_command(cmd_get_by_username.cmd) +users.add_command(cmd_get_by_mail.cmd) +users.add_command(cmd_get_by_saved_query.cmd) +users.add_command(grp_reports.reports) +users.add_command(grp_labels.labels) +users.add_command(grp_saved_query.saved_query) + +devices.add_command(cmd_get.cmd) +devices.add_command(cmd_fields.cmd) +devices.add_command(cmd_count.cmd) +devices.add_command(cmd_count_by_saved_query.cmd) +devices.add_command(cmd_get_by_hostname.cmd) +devices.add_command(cmd_get_by_ip.cmd) +devices.add_command(cmd_get_by_mac.cmd) +devices.add_command(cmd_get_by_subnet.cmd) +devices.add_command(cmd_get_by_saved_query.cmd) +devices.add_command(grp_reports.reports) +devices.add_command(grp_labels.labels) +devices.add_command(grp_saved_query.saved_query) diff --git a/axonius_api_client/cli/grp_objects/cmd_count.py b/axonius_api_client/cli/grp_objects/cmd_count.py new file mode 100644 index 00000000..d4dbe9de --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_count.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("count", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--query", + "-q", + help="Query built from Query Wizard to return count of objects.", + metavar="QUERY", + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + query, +): + """Get all objects matching a query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.count(query=query) + + formatters = {"json": context.to_json} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_count_by_saved_query.py b/axonius_api_client/cli/grp_objects/cmd_count_by_saved_query.py new file mode 100644 index 00000000..95f31605 --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_count_by_saved_query.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("count-by-saved-query", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--name", + "-n", + help="Name of saved query to get count of assets from.", + required=True, + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + name, +): + """Get all objects matching a query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.count_by_saved_query(name=name) + + formatters = {"json": context.to_json} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_fields.py b/axonius_api_client/cli/grp_objects/cmd_fields.py new file mode 100644 index 00000000..38efa6dd --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_fields.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import re + +import click + +from .. import context + + +@click.command("fields", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--adapter-re", + "-ar", + default=".*", + help="Only fetch fields for adapters matching this regex.", + metavar="REGEX", + show_envvar=True, + show_default=True, +) +@click.option( + "--field-re", + "-fr", + default=".*", + help="Only fetch fields matching this regex.", + metavar="REGEX", + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + adapter_re, + field_re, +): + """Get the fields/columns for all adapters.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.command.name) + + adapter_rec = re.compile(adapter_re, re.I) + field_rec = re.compile(field_re, re.I) + + raw_data = {} + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_fields = api.fields.get() + + for adapter, adapter_fields in raw_fields.items(): + if not adapter_rec.search(adapter): + continue + + for field, field_into in adapter_fields.items(): + if not field_rec.search(field): + continue + raw_data[adapter] = raw_data.get(adapter, []) + raw_data[adapter].append(field) + + if not raw_data: + msg = "No fields found matching adapter regex {are!r} and field regex {fre!r}" + msg = msg.format(are=adapter_re, fre=field_re) + ctx.echo_error(msg) + + formatters = {"json": context.to_json, "csv": to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) + + +def to_csv(ctx, raw_data, **kwargs): + """Pass.""" + rows = [] + headers = [] + for adapter, fields in raw_data.items(): + headers.append(adapter) + for idx, field in enumerate(fields): + if len(rows) < idx + 1: + rows.append({}) + row_data = {adapter: field} + rows[idx].update(row_data) + + return context.dictwriter(rows=rows, headers=headers) diff --git a/axonius_api_client/cli/grp_objects/cmd_get.py b/axonius_api_client/cli/grp_objects/cmd_get.py new file mode 100644 index 00000000..bfcdce67 --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("get", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--query", + "-q", + help="Query built from Query Wizard to filter objects (empty returns all).", + metavar="QUERY", + show_envvar=True, + show_default=True, +) +@click.option( + "--field", + "-f", + help="Columns to include in the format of adapter:field.", + metavar="ADAPTER:FIELD", + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--fields-default/--no-fields-default", + "-fd/-nfd", + default=True, + help="Include default fields for this object type.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--max-rows", "-mr", help="Only return this many rows.", type=click.INT, hidden=True +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + query, + field, + fields_default, + max_rows, +): + """Get all objects matching a query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.get( + query=query, fields=field, fields_default=fields_default, max_rows=max_rows + ) + + formatters = {"json": context.to_json, "csv": context.obj_to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_get_by_hostname.py b/axonius_api_client/cli/grp_objects/cmd_get_by_hostname.py new file mode 100644 index 00000000..652190f7 --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get_by_hostname.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get-by-hostname", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@grp_common.get_by_opts +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + value, + query, + field, + fields_default, + max_rows, +): + """Get all objects matching a query.""" + grp_common.get_by_cmd( + clickctx=clickctx, + ctx=ctx, + url=url, + key=key, + secret=secret, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + value=value, + query=query, + field=field, + fields_default=fields_default, + max_rows=max_rows, + method="get_by_hostname", + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_get_by_ip.py b/axonius_api_client/cli/grp_objects/cmd_get_by_ip.py new file mode 100644 index 00000000..95994f24 --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get_by_ip.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get-by-ip", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@grp_common.get_by_opts +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + value, + query, + field, + fields_default, + max_rows, +): + """Get all objects matching a query.""" + grp_common.get_by_cmd( + clickctx=clickctx, + ctx=ctx, + url=url, + key=key, + secret=secret, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + value=value, + query=query, + field=field, + fields_default=fields_default, + max_rows=max_rows, + method="get_by_ip", + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_get_by_mac.py b/axonius_api_client/cli/grp_objects/cmd_get_by_mac.py new file mode 100644 index 00000000..e4de59f7 --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get_by_mac.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get-by-mac", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@grp_common.get_by_opts +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + value, + query, + field, + fields_default, + max_rows, +): + """Get all objects matching a query.""" + grp_common.get_by_cmd( + clickctx=clickctx, + ctx=ctx, + url=url, + key=key, + secret=secret, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + value=value, + query=query, + field=field, + fields_default=fields_default, + max_rows=max_rows, + method="get_by_mac", + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_get_by_mail.py b/axonius_api_client/cli/grp_objects/cmd_get_by_mail.py new file mode 100644 index 00000000..416645fd --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get_by_mail.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get-by-mail", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@grp_common.get_by_opts +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + value, + query, + field, + fields_default, + max_rows, +): + """Get all objects matching a query.""" + grp_common.get_by_cmd( + clickctx=clickctx, + ctx=ctx, + url=url, + key=key, + secret=secret, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + value=value, + query=query, + field=field, + fields_default=fields_default, + max_rows=max_rows, + method="get_by_mail", + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_get_by_saved_query.py b/axonius_api_client/cli/grp_objects/cmd_get_by_saved_query.py new file mode 100644 index 00000000..12e450db --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get_by_saved_query.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("get-by-saved-query", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--name", + "-n", + help="Name of saved query to get assets from.", + required=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--max-rows", "-mr", help="Only return this many rows.", type=click.INT, hidden=True +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + name, + max_rows, +): + """Get all objects matching a query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.get_by_saved_query(name=name, max_rows=max_rows) + + formatters = {"json": context.to_json, "csv": context.obj_to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_get_by_subnet.py b/axonius_api_client/cli/grp_objects/cmd_get_by_subnet.py new file mode 100644 index 00000000..bf170bc2 --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get_by_subnet.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get-by-subnet", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--value", + "-v", + help="Value to search for.", + required=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--query", + "-q", + help="Query to add to the end of the query built to search for --value.", + default="", + metavar="QUERY", + show_envvar=True, + show_default=True, +) +@click.option( + "--field", + "-f", + help="Columns to include in the format of adapter:field.", + metavar="ADAPTER:FIELD", + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--fields-default/--no-fields-default", + "-fd/-nfd", + default=True, + help="Include default columns for this object type.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--max-rows", "-mr", help="Only return this many rows.", type=click.INT, hidden=True +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + value, + query, + field, + fields_default, + max_rows, +): + """Get all objects matching a query.""" + grp_common.get_by_cmd( + clickctx=clickctx, + ctx=ctx, + url=url, + key=key, + secret=secret, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + value=value, + query=query, + field=field, + fields_default=fields_default, + max_rows=max_rows, + method="get_by_subnet", + ) diff --git a/axonius_api_client/cli/grp_objects/cmd_get_by_username.py b/axonius_api_client/cli/grp_objects/cmd_get_by_username.py new file mode 100644 index 00000000..1675099a --- /dev/null +++ b/axonius_api_client/cli/grp_objects/cmd_get_by_username.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get-by-username", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@grp_common.get_by_opts +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + value, + query, + field, + fields_default, + max_rows, +): + """Get all objects matching a query.""" + grp_common.get_by_cmd( + clickctx=clickctx, + ctx=ctx, + url=url, + key=key, + secret=secret, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + value=value, + query=query, + field=field, + fields_default=fields_default, + max_rows=max_rows, + method="get_by_username", + ) diff --git a/axonius_api_client/cli/grp_objects/grp_common.py b/axonius_api_client/cli/grp_objects/grp_common.py new file mode 100644 index 00000000..acbf0819 --- /dev/null +++ b/axonius_api_client/cli/grp_objects/grp_common.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import functools + +import click + +from .. import context + + +def get_by_opts(func): + """Combine commonly appearing @click.option decorators.""" + # + @click.option( + "--value", + "-v", + help="Values to search for.", + required=True, + multiple=True, + show_envvar=True, + show_default=True, + ) + @click.option( + "--query", + "-q", + help="Query to add to the end of the query built to search for --value.", + default="", + metavar="QUERY", + show_envvar=True, + show_default=True, + ) + @click.option( + "--field", + "-f", + help="Columns to include in the format of adapter:field.", + metavar="ADAPTER:FIELD", + multiple=True, + show_envvar=True, + show_default=True, + ) + @click.option( + "--fields-default/--no-fields-default", + "-fd/-nfd", + default=True, + help="Include default columns for this object type.", + is_flag=True, + show_envvar=True, + show_default=True, + ) + @click.option( + "--max-rows", + "-mr", + help="Only return this many rows.", + type=click.INT, + hidden=True, + ) + @functools.wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + +def get_by_cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + value, + query, + field, + fields_default, + max_rows, + method, +): + """Pass.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.command.name) + + apimethod = getattr(api, method) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = apimethod( + value=value[0] if context.is_list(value) and len(value) == 1 else value, + query_post=query, + fields=field, + fields_default=fields_default, + max_rows=max_rows, + ) + + formatters = {"json": context.to_json, "csv": context.obj_to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_reports/__init__.py b/axonius_api_client/cli/grp_reports/__init__.py new file mode 100644 index 00000000..c1fe5941 --- /dev/null +++ b/axonius_api_client/cli/grp_reports/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import cmd_missing_adapters + + +@click.group() +@context.pass_context +def reports(ctx): + """Work with device assets.""" + return ctx + + +reports.add_command(cmd_missing_adapters.cmd) diff --git a/axonius_api_client/cli/grp_reports/cmd_missing_adapters.py b/axonius_api_client/cli/grp_reports/cmd_missing_adapters.py new file mode 100644 index 00000000..499f0548 --- /dev/null +++ b/axonius_api_client/cli/grp_reports/cmd_missing_adapters.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context + + +@click.command("missing-adapters", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--rows", + "-r", + help="The JSON data of rows returned by any get command for this object type.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + rows, +): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + + api = getattr(client, clickctx.parent.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.reports.missing_adapters(rows=content) + + formatters = {"json": context.to_json, "csv": context.obj_to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_saved_query/__init__.py b/axonius_api_client/cli/grp_saved_query/__init__.py new file mode 100644 index 00000000..3f98c60f --- /dev/null +++ b/axonius_api_client/cli/grp_saved_query/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import cmd_add, cmd_delete, cmd_get, cmd_get_by_name + + +@click.group() +@context.pass_context +def saved_query(ctx): + """Work with device assets.""" + return ctx + + +saved_query.add_command(cmd_get.cmd) +saved_query.add_command(cmd_get_by_name.cmd) +saved_query.add_command(cmd_add.cmd) +saved_query.add_command(cmd_delete.cmd) diff --git a/axonius_api_client/cli/grp_saved_query/cmd_add.py b/axonius_api_client/cli/grp_saved_query/cmd_add.py new file mode 100644 index 00000000..abb244a7 --- /dev/null +++ b/axonius_api_client/cli/grp_saved_query/cmd_add.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from ... import constants +from .. import context +from . import grp_common + + +@click.command("add", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--name", + "-n", + help="Name of saved query to create.", + required=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--query", + "-q", + help="Query built from Query Wizard.", + required=True, + metavar="QUERY", + show_envvar=True, + show_default=True, +) +@click.option( + "--field", + "-f", + help="Columns to include in the format of adapter:field.", + metavar="ADAPTER:FIELD", + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--fields-default/--no-fields-default", + "-fd/-nfd", + default=True, + help="Include default fields for this object type.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--sort-field", + "-sf", + help="Column to sort data on.", + metavar="ADAPTER:FIELD", + show_envvar=True, + show_default=True, +) +@click.option( + "--sort-descending/--no-sort-descending", + "-sd", + default=True, + help="Sort --sort-field descending.", + is_flag=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--column-filter", + "-cf", + help="Columns to filter in the format of adapter:field=value.", + metavar="ADAPTER:FIELD=value", + type=context.SplitEquals(), + multiple=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--gui-page-size", + "-gps", + default=format(constants.GUI_PAGE_SIZES[0]), + help="Number of rows to show per page in GUI.", + type=click.Choice([format(x) for x in constants.GUI_PAGE_SIZES]), + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + name, + query, + field, + fields_default, + sort_field, + sort_descending, + column_filter, + gui_page_size, +): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + api = getattr(client, clickctx.parent.parent.command.name) + + column_filters = dict(column_filter) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.saved_query.add( + name=name, + query=query, + fields=field, + fields_default=fields_default, + sort=sort_field, + sort_descending=sort_descending, + column_filters=column_filters, + gui_page_size=gui_page_size, + ) + + msg = "Successfully created saved query: {n}" + msg = msg.format(n=raw_data["name"]) + ctx.echo_ok(msg) + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_saved_query/cmd_delete.py b/axonius_api_client/cli/grp_saved_query/cmd_delete.py new file mode 100644 index 00000000..2a0d35b3 --- /dev/null +++ b/axonius_api_client/cli/grp_saved_query/cmd_delete.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import time + +import click + +from ... import tools +from .. import context + + +@click.command("delete", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@click.option( + "--rows", + "-r", + help="JSON rows returned by any get command for saved queries of this object type.", + default="-", + type=click.File(mode="r"), + show_envvar=True, + show_default=True, +) +@click.option( + "--wait", + "-w", + help="Wait this many seconds before deleting", + default=30, + type=click.INT, + show_envvar=True, + show_default=True, +) +@context.pass_context +@click.pass_context +def cmd(clickctx, ctx, url, key, secret, rows, wait): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + content = context.json_from_stream(ctx=ctx, stream=rows, src="--rows") + content = tools.listify(obj=content, dictkeys=False) + names = tools.join_comma([x["name"] for x in content]) + + msg = "In {s} second will delete saved queries: {n}" + msg = msg.format(s=wait, n=names) + ctx.echo_warn(msg) + + time.sleep(wait) + + api = getattr(client, clickctx.parent.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + api.saved_query.delete(rows=content) + + msg = "Successfully deleted saved queries: {n}" + msg = msg.format(n=names) + ctx.echo_ok(msg) diff --git a/axonius_api_client/cli/grp_saved_query/cmd_get.py b/axonius_api_client/cli/grp_saved_query/cmd_get.py new file mode 100644 index 00000000..63c310cb --- /dev/null +++ b/axonius_api_client/cli/grp_saved_query/cmd_get.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--max-rows", "-mr", help="Only return this many rows.", type=click.INT, hidden=True +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + max_rows, +): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.saved_query.get(max_rows=max_rows) + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_saved_query/cmd_get_by_name.py b/axonius_api_client/cli/grp_saved_query/cmd_get_by_name.py new file mode 100644 index 00000000..7dd12fcb --- /dev/null +++ b/axonius_api_client/cli/grp_saved_query/cmd_get_by_name.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import click + +from .. import context +from . import grp_common + + +@click.command("get-by-name", context_settings=context.CONTEXT_SETTINGS) +@context.connect_options +@context.export_options +@click.option( + "--name", + "-n", + help="Name of saved query to get.", + required=True, + show_envvar=True, + show_default=True, +) +@click.option( + "--max-rows", "-mr", help="Only return this many rows.", type=click.INT, hidden=True +) +@context.pass_context +@click.pass_context +def cmd( + clickctx, + ctx, + url, + key, + secret, + export_format, + export_file, + export_path, + export_overwrite, + name, + max_rows, +): + """Get a report of adapters for objects in query.""" + client = ctx.start_client(url=url, key=key, secret=secret) + + api = getattr(client, clickctx.parent.parent.command.name) + + with context.exc_wrap(wraperror=ctx.wraperror): + raw_data = api.saved_query.get_by_name(value=name, max_rows=max_rows) + + formatters = {"json": context.to_json, "csv": grp_common.to_csv} + + ctx.handle_export( + raw_data=raw_data, + formatters=formatters, + export_format=export_format, + export_file=export_file, + export_path=export_path, + export_overwrite=export_overwrite, + ) diff --git a/axonius_api_client/cli/grp_saved_query/grp_common.py b/axonius_api_client/cli/grp_saved_query/grp_common.py new file mode 100644 index 00000000..17adde6e --- /dev/null +++ b/axonius_api_client/cli/grp_saved_query/grp_common.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +"""Command line interface for Axonius API Client.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +from ... import tools +from .. import context + + +def to_csv(ctx, raw_data, **kwargs): + """Pass.""" + rows = [] + + kvtmpl = "{}: {}".format + + for raw_row in tools.listify(raw_data, dictkeys=False): + row = { + k: context.join_cr(v, is_cell=True) + for k, v in raw_row.items() + if context.is_los(v) + } + rows.append(row) + + view = raw_row.get("view", {}) + query = view.get("query", {}) + fields = view.get("fields", []) + colfilters = view.get("colFilters", {}) + colfilters = [kvtmpl(k, v) for k, v in colfilters.items()] + sort = view.get("sort", {}) + + row["query"] = query.get("filter", None) + row["fields"] = context.join_cr(fields, is_cell=True) + row["column_filters"] = context.join_cr(colfilters, is_cell=True) + row["sort_descending"] = format(sort.get("desc")) + row["sort_field"] = sort.get("field") + + return context.dictwriter(rows=rows) diff --git a/axonius_api_client/connect.py b/axonius_api_client/connect.py new file mode 100644 index 00000000..b49932d1 --- /dev/null +++ b/axonius_api_client/connect.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +"""Axonius API Client utility tools module.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import re + +import requests + +from . import api, auth, constants, exceptions, http, logs, tools + + +class Connect(object): + """Pass. + + Attributes: + actions (:obj:`api.actions.Actions`): Actions API object. + adapters (TYPE): Description + devices (TYPE): Description + enforcements (TYPE): Description + users (TYPE): Description + + """ + + _REASON_RES = [ + re.compile(r".*?object at.*?\>\: ([a-zA-Z0-9\]\[: ]+)"), + re.compile(r".*?\] (.*) "), + ] + + @classmethod + def _get_exc_reason(cls, exc): + """Pass.""" + reason = format(exc) + for reason_re in cls._REASON_RES: + if reason_re.search(reason): + return reason_re.sub(r"\1", reason).rstrip("')") + return reason + + def __init__(self, url, key, secret, **kwargs): + """Pass.""" + self._started = False + self._start_dt = None + self._wraperror = kwargs.get("wraperror", True) + + proxy = kwargs.get("proxy", "") + certpath = kwargs.get("certpath", "") + certverify = kwargs.get("certverify", False) + certwarn = kwargs.get("certwarn", True) + save_history = kwargs.get("save_history", False) + log_request_attrs = kwargs.get("log_request_attrs", False) + log_response_attrs = kwargs.get("log_response_attrs", False) + log_request_body = kwargs.get("log_request_body", False) + log_response_body = kwargs.get("log_response_body", False) + log_logger = kwargs.get("log_logger", logs.LOG) + log_level_package = kwargs.get("log_level_package", constants.LOG_LEVEL_PACKAGE) + log_level_http = kwargs.get("log_level_http", constants.LOG_LEVEL_HTTP) + log_level_auth = kwargs.get("log_level_auth", constants.LOG_LEVEL_AUTH) + log_level_api = kwargs.get("log_level_api", constants.LOG_LEVEL_API) + log_level_console = kwargs.get("log_level_console", constants.LOG_LEVEL_CONSOLE) + log_level_file = kwargs.get("log_level_file", constants.LOG_LEVEL_FILE) + log_console = kwargs.get("log_console", False) + log_console_method = kwargs.get("log_console_method", logs.add_stderr) + log_file = kwargs.get("log_file", False) + log_file_method = kwargs.get("log_file_method", logs.add_file) + log_file_name = kwargs.get("log_file_name", constants.LOG_FILE_NAME) + log_file_path = kwargs.get("log_file_path", constants.LOG_FILE_PATH) + log_file_max_mb = kwargs.get("log_file_max_mb", constants.LOG_FILE_MAX_MB) + log_file_max_files = kwargs.get( + "log_file_max_files", constants.LOG_FILE_MAX_FILES + ) + + logs.set_level(obj=log_logger, level=log_level_package) + + self._handler_file = None + self._handler_con = None + + if log_console: + self._handler_con = log_console_method( + obj=log_logger, level=log_level_console + ) + + if log_file: + self._handler_file = log_file_method( + obj=log_logger, + level=log_level_file, + file_path=log_file_path, + file_name=log_file_name, + max_mb=log_file_max_mb, + max_files=log_file_max_files, + ) + + self._http_args = { + "url": url, + "https_proxy": proxy, + "certpath": certpath, + "certwarn": certwarn, + "certverify": certverify, + "log_level": log_level_http, + "log_request_attrs": log_request_attrs, + "log_response_attrs": log_response_attrs, + "log_request_body": log_request_body, + "log_response_body": log_response_body, + "save_history": save_history, + } + + self._auth_args = {"key": key, "secret": secret, "log_level": log_level_auth} + + self._http = http.Http(**self._http_args) + + self._auth = auth.ApiKey(http=self._http, **self._auth_args) + + self._api_args = {"auth": self._auth, "log_level": log_level_api} + + @property + def users(self): + """Pass.""" + self.start() + if not hasattr(self, "_users"): + self._users = api.Users(**self._api_args) + return self._users + + @property + def devices(self): + """Pass.""" + self.start() + if not hasattr(self, "_devices"): + self._devices = api.Devices(**self._api_args) + return self._devices + + @property + def adapters(self): + """Pass.""" + self.start() + if not hasattr(self, "_adapters"): + self._adapters = api.Adapters(**self._api_args) + return self._adapters + + @property + def enforcements(self): + """Pass.""" + self.start() + if not hasattr(self, "_enforcements"): + self._enforcements = api.Enforcements(**self._api_args) + return self._enforcements + + def start(self): + """Pass.""" + if not self._started: + try: + self._auth.login() + except Exception as exc: + if not self._wraperror: + raise + + msg_pre = "Unable to connect to {url!r}".format(url=self._http.url) + + if isinstance(exc, requests.exceptions.ConnectTimeout): + msg = "{pre}: connection timed out after {t} seconds" + msg = msg.format(pre=msg_pre, t=self._http._CONNECT_TIMEOUT) + raise exceptions.ConnectError(msg=msg, exc=exc) + elif isinstance(exc, requests.exceptions.ConnectionError): + msg = "{pre}: {reason}" + msg = msg.format(pre=msg_pre, reason=self._get_exc_reason(exc=exc)) + raise exceptions.ConnectError(msg=msg, exc=exc) + elif isinstance(exc, exceptions.InvalidCredentials): + msg = "{pre}: Invalid Credentials supplied" + msg = msg.format(pre=msg_pre, url=self._http.url) + raise exceptions.ConnectError(msg=msg, exc=exc) + + msg = "{pre}: {exc}" + msg = msg.format(pre=msg_pre, exc=exc) + raise exceptions.ConnectError(msg=msg, exc=exc) + + self._started = True + self._start_dt = tools.dt_now() + + def __str__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + client = getattr(self, "_http", "") + url = getattr(client, "url", self._http_args["url"]) + if self._started: + uptime = tools.dt_sec_ago(self._start_dt) + return "Connected to {url!r} for {uptime}".format(uptime=uptime, url=url) + else: + return "Not connected to {url!r}".format(url=url) + + def __repr__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return self.__str__() diff --git a/axonius_api_client/constants.py b/axonius_api_client/constants.py index f2d53e96..b28ed1c8 100644 --- a/axonius_api_client/constants.py +++ b/axonius_api_client/constants.py @@ -1,24 +1,19 @@ # -*- coding: utf-8 -*- """Constants for this package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals + +import logging +import os + +from . import __package__ as PACKAGE_ROOT MAX_PAGE_SIZE = 2000 """:obj:`int`: Maximum page size that REST API allows.""" -DEFAULT_PAGE_SIZE = 1000 -""":obj:`int`: Default page size to use for public API methods.""" - GUI_PAGE_SIZES = [25, 50, 100] """:obj:`list` of :obj:`int`: Valid page sizes for GUI paging.""" -GENERIC_FIELD_PREFIX = "specific_data.data" -""":obj:`str`: Prefix that all generic fields should begin with.""" - -ADAPTER_FIELD_PREFIX = "adapters_data.{adapter_name}" -""":obj:`str`: Prefix that all adapter fields should begin with.""" +QUERY_USE_POST_LENGTH = 1000 LOG_REQUEST_ATTRS_BRIEF = [ "request to {request.url!r}", @@ -53,3 +48,39 @@ "size={size}", ] """:obj:`list` of :obj:`str`: Response attributes to log when verbose=True.""" + +LOG_FMT_CONSOLE = "%(levelname)-8s [%(name)s] %(message)s" +LOG_FMT_FILE = "%(asctime)s %(levelname)-8s [%(name)s:%(funcName)s()] %(message)s" + +LOG_DATEFMT_CONSOLE = "%m/%d/%Y %I:%M:%S %p" +LOG_DATEFMT_FILE = "%m/%d/%Y %I:%M:%S %p" + +LOG_LEVEL_CONSOLE = "info" +LOG_LEVEL_FILE = "info" +LOG_LEVEL_HTTP = "info" +LOG_LEVEL_AUTH = "info" +LOG_LEVEL_API = "info" +LOG_LEVEL_PACKAGE = "debug" + +LOG_LEVELS_STR = ["debug", "info", "warning", "error", "fatal"] +LOG_LEVELS_STR_CSV = ", ".join(LOG_LEVELS_STR) +LOG_LEVELS_INT = [getattr(logging, x.upper()) for x in LOG_LEVELS_STR] +LOG_LEVELS_INT_CSV = ", ".join([format(x) for x in LOG_LEVELS_INT]) + +LOG_FILE_PATH = os.getcwd() +LOG_FILE_PATH_MODE = 0o700 +LOG_FILE_NAME = "{pkg}.log".format(pkg=PACKAGE_ROOT) +LOG_FILE_MAX_MB = 5 +LOG_FILE_MAX_FILES = 5 + +LOG_NAME_STDERR = "handler_stderr" +LOG_NAME_STDOUT = "handler_stdout" +LOG_NAME_FILE = "handler_file" + + +CSV_FIELDS = { + "device": ["id", "serial", "mac_address", "hostname", "name"], + "user": ["id", "username", "mail", "name"], + "sw": ["hostname", "installed_sw_name"], +} +SETTING_UNCHANGED = ["unchanged"] diff --git a/axonius_api_client/exceptions.py b/axonius_api_client/exceptions.py index 5adf85b4..b089b458 100644 --- a/axonius_api_client/exceptions.py +++ b/axonius_api_client/exceptions.py @@ -1,10 +1,576 @@ # -*- coding: utf-8 -*- """Parent exception and warnings for this package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals +from . import tools -class PackageError(Exception): + +class AxonError(Exception): """Parent exception for all package errors.""" + + +class AxonWarning(Warning): + """Pass.""" + + +class ApiError(AxonError): + """Parent exception for all API errors.""" + + +class ApiWarning(AxonWarning): + """Parent exception for all API errors.""" + + +class BetaWarning(AxonWarning): + """Pass.""" + + def __init__(self, obj): + """Constructor.""" + msg = "Object {obj} is considered **BETA** status! Here be dragons..." + msg = msg.format(obj=obj) + + super(AxonWarning, self).__init__(msg) + + +class ToolsError(AxonError): + """Parent exception for all tools errors.""" + + +class AuthError(AxonError): + """Parent exception for all Authentication errors.""" + + +class HttpError(AxonError): + """Parent exception for all Authentication errors.""" + + +class CnxError(ApiError): + """Pass.""" + + +class CnxWarning(ApiWarning): + """Pass.""" + + +class CnxDeleteForce(CnxError): + """Pass.""" + + def __init__(self, cnxinfo): + """Constructor. + + Args: + added (:obj:`requests.Response`): + Response error was thrown for. + exc (:obj:`Exception`, optional): + Original exception thrown. + + Defaults to: None. + + """ + msg = [ + "Connection info: {cnxinfo}", + "Will not delete connection unless force=True!!", + ] + msg = tools.join_cr(obj=msg).format(cnxinfo=cnxinfo) + super(CnxDeleteForce, self).__init__(msg) + + +class CnxDeleteFailed(CnxError): + """Pass.""" + + def __init__(self, cnxinfo, response): + """Constructor.""" + self.cnxinfo = cnxinfo + self.response = response + + msg = [ + "Connection info: {cnxinfo}", + "Failed to delete connection!!", + "Response:{response}", + ] + msg = tools.join_cr(obj=msg).format( + cnxinfo=cnxinfo, response=tools.json_reload(obj=response, error=False) + ) + + super(CnxDeleteFailed, self).__init__(msg) + + +class CnxDeleteWarning(CnxWarning): + """Pass.""" + + def __init__(self, cnxinfo, sleep): + """Constructor.""" + msg = ["Connection info: {cnxinfo}", "Will delete connection in {s} seconds!!"] + msg = tools.join_cr(obj=msg).format(s=sleep, cnxinfo=cnxinfo) + + super(CnxDeleteWarning, self).__init__(msg) + + +class CnxDeleteFailedWarning(CnxWarning): + """Pass.""" + + def __init__(self, cnxinfo, response): + """Constructor.""" + self.cnxinfo = cnxinfo + self.response = response + + msg = [ + "Connection info: {cnxinfo}", + "Failed to delete connection!!", + "Response: {response}", + ] + msg = tools.join_cr(obj=msg).format(cnxinfo=cnxinfo, response=response) + + super(CnxDeleteFailedWarning, self).__init__(msg) + + +class CnxRefetchFailure(CnxError): + """Pass.""" + + def __init__( + self, response, adapter, node, filter_value, filter_method, known=None, **kwargs + ): + """Constructor. + + Args: + response (:obj:`requests.Response`): + Response error was thrown for. + exc (:obj:`Exception`, optional): + Original exception thrown. + + Defaults to: None. + + """ + self.response = response + self.adapter = adapter + self.node = node + self.known = known + self.kwargs = kwargs + + if known: + known = known_cb(known=known, kwargs=kwargs) + known = tools.join_cr(obj=known, indent=" ") + + msgs = [ + "Failed to find connection", + "Adapter {!r} on node {!r}".format(adapter, node), + "Filter value {!r}".format(filter_value), + "Filter method {}".format(filter_method), + tools.json_reload(obj=response, error=False), + "Known connections: {}".format(known), + ] + + msg = tools.join_cr(obj=msgs) + super(CnxRefetchFailure, self).__init__(msg) + + +class CnxCsvWarning(CnxWarning): + """Pass.""" + + def __init__(self, ids_type, ids, name, headers): + """Constructor.""" + msg = "No {ids_type} identifiers {ids} found in CSV file {name} headers {h}" + msg = msg.format(ids_type=ids_type, ids=ids, name=name, h=headers) + + super(CnxCsvWarning, self).__init__(msg) + + +class CnxConnectFailure(CnxError): + """Error when response has error key in JSON.""" + + def __init__(self, response, adapter, node): + """Constructor. + + Args: + response (:obj:`requests.Response`): + Response error was thrown for. + exc (:obj:`Exception`, optional): + Original exception thrown. + + Defaults to: None. + + """ + self.response = response + self.adapter = adapter + self.node = node + + msg = "Connection test failed for adapter {a!r} on node {n!r}:\n{r}" + msg = msg.format( + a=adapter, n=node, r=tools.json_reload(obj=response, error=False) + ) + + super(CnxConnectFailure, self).__init__(msg) + + +class CnxSettingError(CnxError): + """Pass.""" + + def __init__(self, name, value, schema, adapter, error): + """Pass.""" + self.name = name + self.value = value + self.schema = schema + self.adapter = adapter + self.error = error + + msg = [ + "Error with {req} setting {n!r} on adapter {a!r} on node {an!r}", + "Supplied value of {v!r}", + "Setting schema:", + "{ss}", + "Error: {error}", + ] + + msg = tools.join_cr(obj=msg).format( + a=adapter["name"], + an=adapter["node_name"], + req="required" if schema["required"] else "optional", + n=name, + v=value, + error=error, + ss=tools.json_dump(obj=schema, error=False), + ) + + super(CnxSettingError, self).__init__(msg) + + +class CnxSettingMissing(CnxSettingError): + """Pass.""" + + def __init__(self, name, value, schema, adapter): + """Pass.""" + error = "Setting {n!r} was not supplied and no default value defined" + error = error.format(n=name) + + super(CnxSettingMissing, self).__init__( + name=name, value=value, schema=schema, error=error, adapter=adapter + ) + + +class CnxSettingFileMissing(CnxSettingError): + """Pass.""" + + def __init__(self, name, value, schema, adapter): + """Pass.""" + examples = [ + { + name: { + "uuid": "uuid of already uploaded file", + "filename": "name of already uploaded file", + } + }, + { + name: { + "filename": "name of file to use when uploading file", + "filecontent": "content of file to upload", + "filecontent_type": "optional mime type", + } + }, + { + name: { + "filepath": "path of file to upload", + "filecontent_type": "optional mime type", + } + }, + ] + examples = tools.join_cr(obj=[format(x) for x in examples]) + + error = "File setting {n!r} with value {v!r} is invalid, examples: {ex}" + error = error.format(n=name, v=value, ex=examples) + + super(CnxSettingFileMissing, self).__init__( + name=name, value=value, schema=schema, error=error, adapter=adapter + ) + + +class CnxSettingInvalidType(CnxSettingError): + """Pass.""" + + def __init__(self, name, value, schema, mustbe, adapter): + """Pass.""" + self.mustbe = mustbe + + error = "Invalid type supplied {t!r}, must be type {mt!r}" + error = error.format(t=type(value).__name__, mt=mustbe) + + super(CnxSettingInvalidType, self).__init__( + name=name, value=value, schema=schema, error=error, adapter=adapter + ) + + +class CnxSettingInvalidChoice(CnxSettingError): + """Pass.""" + + def __init__(self, name, value, enum, schema, adapter): + """Pass.""" + self.enum = enum + + error = "Invalid value {v!r}, must be one of {e}" + error = error.format(v=value, e=enum) + + super(CnxSettingInvalidChoice, self).__init__( + name=name, value=value, schema=schema, error=error, adapter=adapter + ) + + +class CnxSettingUnknownType(CnxSettingError): + """Pass.""" + + def __init__(self, name, value, type_str, schema, adapter): + """Pass.""" + self.type_str = type_str + + error = "Unknown connection setting type {t!r} in schema" + error = error.format(t=type_str) + + super(CnxSettingUnknownType, self).__init__( + name=name, value=value, schema=schema, error=error, adapter=adapter + ) + + +class ResponseError(ApiError): + """Parent exception for any response error.""" + + def __init__(self, response, error="", exc=None, details=True, bodies=True): + """Constructor. + + Args: + response (:obj:`requests.Response`): + Response error was thrown for. + error (:obj:`str`, optional): + Error message. + + Defaults to: "". + exc (:obj:`Exception`, optional): + Original exception thrown. + + Defaults to: None. + bodies (:obj:`bool`, optional): + Show request and response bodies. + + Defaults to: True. + + """ + self.response = response + """:obj:`requests.Response`: Response error was thrown for.""" + + error = error or "Response error!" + self.error = error + """:obj:`str`: Error message.""" + + self.exc = exc + """:obj:`Exception`: Original exception thrown.""" + + msgs = [] + + if error: + msgs.append(error) + + if details: + txt = [ + "code={r.status_code!r}", + "reason={r.reason!r}", + "method={r.request.method!r}", + "url={r.url!r}", + ] + txt = tools.join_comma(obj=txt).format(r=response) + error = "Response details: {}".format(txt) + msgs.append(error) + + if exc: + error = "original exception: {}".format(exc) + msgs.append(error) + + if bodies: + msgs += [ + "*** request ***", + tools.json_reload(obj=response.request.body, error=False), + "*** response ***", + tools.json_reload(obj=response.text, error=False), + ] + + msg = tools.join_cr(obj=msgs) + + super(ResponseError, self).__init__(msg) + + +class ResponseNotOk(ResponseError): + """Error when response has invalid JSON.""" + + +class JsonInvalid(ResponseError): + """Error when response has invalid JSON.""" + + def __init__(self, response, exc=None, details=True, bodies=True): + """Constructor. + + Args: + response (:obj:`requests.Response`): + Response error was thrown for. + exc (:obj:`Exception`, optional): + Original exception thrown. + + Defaults to: None. + + """ + error = "JSON is not valid in response" + super(JsonInvalid, self).__init__( + response=response, error=error, exc=exc, details=details, bodies=bodies + ) + + +class JsonError(ResponseError): + """Error when response has error key in JSON.""" + + def __init__(self, response, data, exc=None, details=True, bodies=False): + """Constructor. + + Args: + response (:obj:`requests.Response`): + Response error was thrown for. + exc (:obj:`Exception`, optional): + Original exception thrown. + + Defaults to: None. + + """ + if isinstance(data, dict): + data = ["{}: {}".format(k, v) for k, v in data.items()] + data = tools.join_cr(obj=data, indent=" ") + + error = "Found error in response JSON: {d}" + error = error.format(d=data) + + super(JsonError, self).__init__( + response=response, error=error, exc=exc, details=details, bodies=bodies + ) + + +class ValueNotFound(ApiError): + """Pass.""" + + def __init__( + self, + value, + value_msg, + known=None, + known_msg=None, + exc=None, + match_type="equals", + **kwargs + ): + """Constructor.""" + self.value = value + self.known = known + self.kwargs = kwargs + self.exc = exc + + msgs = [] + + if exc: + msg = "Original exception: {exc}".format(exc=exc) + msgs.append(msg) + + msg = "Unable to find {vm} that {mt} value {v!r}" + msg = msg.format(vm=value_msg, v=value, mt=match_type) + msgs.append(msg) + + if known: + known = known_cb(known=known, kwargs=kwargs) + + msg = "Valid {known_msg}: {v}" + msg = msg.format( + known_msg=known_msg, v=tools.join_cr(obj=known, indent=" ") + ) + msgs.append(msg) + + msg = tools.join_cr(obj=msgs) + super(ApiError, self).__init__(msg) + + +class InvalidCredentials(AuthError): + """Error on failed login.""" + + def __init__(self, auth, exc=None): + """Constructor. + + Args: + auth (:obj:`axonius_api_client.models.AuthModel`): + Authentication method. + exc (:obj:`Exception`, optional): + Original Exception, if any. + + Defaults to: None. + + """ + self.auth = auth + """:obj:`axonius_api_client.models.AuthModel`: Authentication method.""" + + self.exc = exc + """:obj:`Exception`: Original Exception, if any.""" + + msg = "Invalid credentials on {auth} -- exception: {exc}" + msg = msg.format(auth=auth, exc=exc) + super(InvalidCredentials, self).__init__(msg) + + +class NotLoggedIn(AuthError): + """Error when not logged in.""" + + def __init__(self, auth): + """Constructor. + + Args: + auth (:obj:`axonius_api_client.models.AuthModel`): + Authentication method. + + """ + self.auth = auth + """:obj:`axonius_api_client.models.AuthModel`: Authentication method.""" + + msg = "Must call login() on {auth}" + msg = msg.format(auth=auth) + super(NotLoggedIn, self).__init__(msg) + + +class AlreadyLoggedIn(AuthError): + """Error when already logged in.""" + + def __init__(self, auth): + """Constructor. + + Args: + auth (:obj:`axonius_api_client.models.AuthModel`): + Authentication method. + + """ + self.auth = auth + """:obj:`axonius_api_client.models.AuthModel`: Authentication method.""" + + msg = "Already logged in on {auth}" + msg = msg.format(auth=auth) + super(AlreadyLoggedIn, self).__init__(msg) + + +class ConnectError(AxonError): + """Pass.""" + + def __init__(self, msg, exc): + """Pass.""" + self.msg = msg + self.exc = exc + super(ConnectError, self).__init__(msg) + + +def known_cb(known, kwargs=None): + """Pass.""" + kwargs = kwargs or {} + + if callable(known): + try: + known = known(**kwargs) + except Exception as exc: + msg = "known callback {cb} with kwargs {kw} failed with exception {exc}" + known = [msg.format(cb=known, kw=kwargs, exc=exc)] + + return known diff --git a/axonius_api_client/http.py b/axonius_api_client/http.py new file mode 100644 index 00000000..90a3efd2 --- /dev/null +++ b/axonius_api_client/http.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +"""Axonius API HTTP client module.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import logging +import warnings + +import requests +import six + +from . import constants, exceptions, logs, tools, version + +InsecureRequestWarning = requests.urllib3.exceptions.InsecureRequestWarning + + +class Http(object): + """HTTP client for sending requests usings :obj:`requests.Session`. + + Attributes: + session (:obj:`requests.Session`): Session object for sending requests. + url (:obj:`str`): URL of Axonius instance. + + """ + + def __init__( + self, + url, + connect_timeout=5, + response_timeout=60, + certpath=None, + certwarn=True, + certverify=False, + http_proxy=None, + https_proxy=None, + save_last=True, + save_history=False, + **kwargs + ): + """Constructor. + + Args: + url (:obj:`str` or :obj:`axonius_api_client.http.parser.ParserUrl`): + Axonius API URL. + connect_timeout (:obj:`int`, optional): + Seconds to wait for connection to url to open. + + Defaults to: 5. + response_timeout (:obj:`int`, optional): + Seconds to wait for response from url. + + Defaults to: 60. + certpath (:obj:`str`, optional): + Enable validation using a path to CA bundle instead of the system CA + bundle. + + Defaults to: None. + certwarn (:obj:`bool`, optional): + Show InsecureRequestWarning. + * True = Show warning just once. + * False = Never show warning. + * None = Show warning always. + + Defaults to: True. + certverify (:obj:`bool`, optional): + If certpath is empty, control SSL certification validation. + * True = Throw errors if SSL certificate is invalid. + * False = Throw warnings if SSL certificate is invalid. + + Defaults to: False. + http_proxy (:obj:`str`, optional): + HTTP proxy to use when connecting to url. + + Defaults to: None. + https_proxy (:obj:`str`, optional): + HTTPS proxy to use when connecting to url. + + Defaults to: None. + save_last (:obj:`bool`, optional): + Save last request & response to :attr:`_LAST_REQUEST` and + :attr:`_LAST_RESPONSE`. + + Defaults to: True. + save_history (:obj:`bool`, optional): + Add last response to :attr:`_HISTORY`. + + Defaults to: False. + kwargs: + log_level (:obj:`str`): + Control logging level of object. + + Defaults to: :attr:`constants.LOG_LEVEL_HTTP`. + log_level_urllib (:obj:`str`): + Control logging level of urllib. + + Defaults to: "warning". + log_request_attrs (:obj:`bool`): + Control request attr logging. + True = verbose, False = brief, None = none + + Defaults to: None. + log_response_attrs (:obj:`bool`): + Control response attr logging. + True = verbose, False = brief, None = none + + Defaults to: None. + log_request_body (:obj:`bool`): + Control request body logging. + + Defaults to: False. + log_response_body (:obj:`bool`): + Control response body logging. + + Defaults to: False. + + """ + log_level = kwargs.get("log_level", constants.LOG_LEVEL_HTTP) + self._log = logs.get_obj_log(obj=self, level=log_level) + """:obj:`logging.Logger`: Logger for this object.""" + + if isinstance(url, ParserUrl): + self._URLPARSED = url + else: + self._URLPARSED = ParserUrl(url=url, default_scheme="https") + + self.url = self._URLPARSED.url + """:obj:`str`: URL of Axonius API.""" + + self._LAST_REQUEST = None + """:obj:`requests.PreparedRequest`: Last request sent.""" + + self._LAST_RESPONSE = None + """:obj:`requests.Response`: Last response received.""" + + self._SAVE_LAST = save_last + """:obj:`bool`: Save requests to last_request and responses to last_response.""" + + self._HISTORY = [] + """:obj:`list` of :obj:`requests.Response`: History of responses.""" + + self._SAVE_HISTORY = save_history + """:obj:`bool`: Append all responses to history.""" + + self._CONNECT_TIMEOUT = connect_timeout + """:obj:`int`: Seconds to wait for connection to url to open.""" + + self._RESPONSE_TIMEOUT = response_timeout + """:obj:`int`: Seconds to wait for response from url.""" + + self.session = requests.Session() + """:obj:`requests.Session`: Session object to use.""" + + self.session.verify = certpath if certpath else certverify + self.session.proxies = {} + self.session.proxies["https"] = https_proxy + self.session.proxies["http"] = http_proxy + + self._LOG_REQUEST_BODY = kwargs.get("log_request_body", False) + """:obj:`bool`: Log the full request body.""" + + self._LOG_RESPONSE_BODY = kwargs.get("log_response_body", False) + """:obj:`bool`: Log the full response body.""" + + self._LOG_RESPONSE_ATTRS = [] + """:obj:`list` of :obj:`str`: Request attributes to log.""" + + self._LOG_REQUEST_ATTRS = [] + """:obj:`list` of :obj:`str`: Response attributes to log.""" + + log_response_attrs = kwargs.get("log_response_attrs", None) + if log_response_attrs is True: + self._LOG_RESPONSE_ATTRS = constants.LOG_RESPONSE_ATTRS_VERBOSE + elif log_response_attrs is False: + self._LOG_RESPONSE_ATTRS = constants.LOG_RESPONSE_ATTRS_BRIEF + + log_request_attrs = kwargs.get("log_request_attrs", None) + if log_request_attrs is True: + self._LOG_REQUEST_ATTRS = constants.LOG_REQUEST_ATTRS_VERBOSE + elif log_request_attrs is False: + self._LOG_REQUEST_ATTRS = constants.LOG_REQUEST_ATTRS_BRIEF + + if certwarn is True: + warnings.simplefilter("once", InsecureRequestWarning) + elif certwarn is False: + warnings.simplefilter("ignore", InsecureRequestWarning) + + urllog = logging.getLogger("urllib3.connectionpool") + logs.set_level(obj=urllog, level=kwargs.get("log_level_urllib", "warning")) + + def __call__( + self, + path=None, + route=None, + method="get", + data=None, + params=None, + headers=None, + json=None, + files=None, + **kwargs + ): + """Create, prepare, and then send a request using :attr:`session`. + + Args: + path (:obj:`str`, optional): + Path to append to :attr:`url` for this request. + + Defaults to: None. + route (:obj:`str`, optional): + Route to append to path for this request. + + Defaults to: None. + method (:obj:`str`, optional): + Method to use. + + Defaults to: "get". + data (:obj:`str`, optional): + Data to send in request body. + + Defaults to: None. + params (:obj:`dict`, optional): + Parameters to encode in URL. + + Defaults to: None. + headers (:obj:`dict`, optional): + Headers to send in request. + + Defaults to: None. + json (:obj:`dict`, optional): + Dictionary to encode as JSON string and send in request. + + Defaults to: None. + files (:obj:`tuple` of :obj:`tuple`, optional): + Files to attach to request. + + Defaults to: None. + kwargs: + connect_timeout (:obj:`int`): Override object connect timeout. + response_timeout (:obj:`int`): Override object response timeout. + proxies (:obj:`dict`): Override object proxies. + verify (:obj:`bool` or :obj:`str`): Override object verify. + stream (:obj:`object`): See requests docs. + cert (:obj:`str`): See requests docs. + + Returns: + :obj:`requests.Response` + + """ + url = tools.join_url(self.url, path, route) + + headers = headers or {} + headers.setdefault("User-Agent", self.user_agent) + + request = requests.Request( + url=url, + method=method, + data=data, + headers=headers, + params=params, + json=json, + files=files or [], + ) + prepped_request = self.session.prepare_request(request=request) + + if self._SAVE_LAST: + self._LAST_REQUEST = prepped_request + + if self._LOG_REQUEST_ATTRS: + msg = ", ".join(self._LOG_REQUEST_ATTRS) + msg = msg.format( + request=prepped_request, size=len(prepped_request.body or "") + ) + self._log.debug(msg) + + send_args = self.session.merge_environment_settings( + url=prepped_request.url, + proxies=kwargs.get("proxies", None), + stream=kwargs.get("stream", None), + verify=kwargs.get("verify", None), + cert=kwargs.get("cert", None), + ) + + send_args["request"] = prepped_request + send_args["timeout"] = ( + kwargs.get("connect_timeout", self._CONNECT_TIMEOUT), + kwargs.get("response_timeout", self._RESPONSE_TIMEOUT), + ) + + if self._LOG_REQUEST_BODY: + msg = "request body:\n{body}" + msg = msg.format( + body=tools.json_dump(obj=prepped_request.body, error=False) + ) + self._log.debug(msg) + + response = self.session.send(**send_args) + + if self._SAVE_LAST: + self._LAST_RESPONSE = response + + if self._SAVE_HISTORY: + self._HISTORY.append(response) + + if self._LOG_RESPONSE_ATTRS: + msg = ", ".join(self._LOG_RESPONSE_ATTRS) + msg = msg.format(response=response, size=len(response.text or "")) + self._log.debug(msg) + + if self._LOG_RESPONSE_BODY: + msg = "response body:\n{body}" + msg = msg.format(body=tools.json_dump(obj=response.text, error=False)) + self._log.debug(msg) + + return response + + def __str__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return "{c.__module__}.{c.__name__}(url={url!r})".format( + c=self.__class__, url=self.url + ) + + def __repr__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return self.__str__() + + @property + def user_agent(self): + """Build a user agent string for use in User-Agent header. + + Returns: + :obj:`str` + + """ + msg = "{name}.{clsname}/{ver}" + return msg.format( + name=__name__, clsname=self.__class__.__name__, ver=version.__version__ + ) + + +class ParserUrl(object): + """Parse a URL and ensure it has the neccessary bits.""" + + def __init__(self, url, default_scheme="https"): + """Constructor. + + Args: + url (:obj:`str`): + URL to parse + default_scheme (:obj:`str`, optional): + If no scheme in URL, use this. + + Defaults to: "https" + + Raises: + :exc:`exceptions.HttpError`: + If parsed URL winds up without a hostname, port, or scheme. + + """ + self._init_url = url + """:obj:`str`: Initial URL provided.""" + + self._init_scheme = default_scheme + """:obj:`str`: Default scheme provided.""" + + self._init_parsed = six.moves.urllib.parse.urlparse(url) + """:obj:`urllib.parse.ParseResult`: First pass of parsing URL.""" + + self.parsed = self.reparse( + parsed=self._init_parsed, default_scheme=default_scheme + ) + """:obj:`urllib.parse.ParseResult`: Second pass of parsing URL.""" + + for part in ["hostname", "port", "scheme"]: + if not getattr(self.parsed, part, None): + error = ( + "Parsed URL into {pstr!r} and no {part!r} provided in URL {url!r}" + ) + error = error.format(part=part, url=url, pstr=self.parsed_str) + raise exceptions.HttpError(error) + + def __str__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + msg = "{c.__module__}.{c.__name__}({parsed})".format + return msg(c=self.__class__, parsed=self.parsed_str) + + def __repr__(self): + """Show object info. + + Returns: + :obj:`str` + + """ + return self.__str__() + + @property + def hostname(self): + """Hostname part from :attr:`ParserUrl.parsed`. + + Returns: + :obj:`str` + + """ + return self.parsed.hostname + + @property + def port(self): + """Port part from :attr:`ParserUrl.parsed`. + + Returns + :obj:`int` + + """ + return int(self.parsed.port) + + @property + def scheme(self): + """Scheme part from :attr:`ParserUrl.parsed`. + + Returns: + :obj:`str` + + """ + return self.parsed.scheme + + @property + def url(self): + """Get scheme, hostname, and port from :attr:`ParserUrl.parsed`. + + Returns: + :obj:`str` + + """ + return self.unparse_base(parsed_result=self.parsed) + + @property + def url_full(self): + """Get full URL from :attr:`ParserUrl.parsed`. + + Returns: + :obj:`str` + + """ + return self.unparse_all(parsed_result=self.parsed) + + @property + def parsed_str(self): + """Create string of :attr:`ParserUrl.parsed`. + + Returns: + :obj:`str` + + """ + parsed = getattr(self, "parsed", None) + attrs = [ + "scheme", + "netloc", + "hostname", + "port", + "path", + "params", + "query", + "fragment", + ] + atmpl = "{a}={v!r}".format + attrs = [atmpl(a=a, v="{}".format(getattr(parsed, a, "")) or "") for a in attrs] + return ", ".join(attrs) + + def make_netloc(self, host, port): + """Create netloc from host and port. + + Args: + host (:obj:`str`): + Host part to use in netloc. + port (:obj:`str`): + Port part to use in netloc. + + Returns: + :obj:`str` + + """ + return ":".join([host, port]) if port else host + + def reparse(self, parsed, default_scheme=""): + """Reparse a parsed URL into a parsed URL with values fixed. + + Args: + parsed (:obj:`urllib.parse.ParseResult`): + Parsed URL to reparse. + default_scheme (:obj:`str`, optional): + If no scheme in URL, use this. + + Defaults to: "" + + Returns: + :obj:`urllib.parse.ParseResult` + + """ + scheme, netloc, path, params, query, fragment = parsed + host = parsed.hostname + port = format(parsed.port or "") + + if not netloc and scheme and path and path.split("/")[0].isdigit(): + """For case: + >>> urllib.parse.urlparse('host:443/') + ParseResult( + scheme='host', netloc='', path='443/', params='', query='', fragment='' + ) + """ + host = scheme # switch host from scheme to host + port = path.split("/")[0] # remove / from path and assign to port + path = "" # empty out path + scheme = default_scheme + netloc = ":".join([host, port]) + + if not netloc and path: + """For cases: + >>> urllib.parse.urlparse('host:443') + ParseResult( + scheme='', netloc='', path='host:443', params='', query='', fragment='' + ) + >>> urllib.parse.urlparse('host') + ParseResult( + scheme='', netloc='', path='host', params='', query='', fragment='' + ) + """ + netloc, path = path, netloc + if ":" in netloc: + host, port = netloc.split(":", 1) + netloc = ":".join([host, port]) if port else host + else: + host = netloc + + scheme = scheme or default_scheme + if not scheme and port: + if format(port) == "443": + scheme = "https" + elif format(port) == "80": + scheme = "http" + + if not port: + if scheme == "https": + netloc = self.make_netloc(host, "443") + elif scheme == "http": + netloc = self.make_netloc(host, "80") + + pass2 = six.moves.urllib.parse.urlunparse( + (scheme, netloc, path, params, query, fragment) + ) + return six.moves.urllib.parse.urlparse(pass2) + + def unparse_base(self, parsed_result): + """Unparse a parsed URL into just the scheme, hostname, and port parts. + + Args: + parsed_result (:obj:`urllib.parse.ParseResult`): + Parsed URL to unparse. + + Returns: + :obj:`str` + + """ + # only unparse self.parsed into url with scheme and netloc + bits = (parsed_result.scheme, parsed_result.netloc, "", "", "", "") + return six.moves.urllib.parse.urlunparse(bits) + + def unparse_all(self, parsed_result): + """Unparse a parsed URL with all the parts. + + Args: + parsed_result (:obj:`urllib.parse.ParseResult`): + Parsed URL to unparse. + + Returns: + :obj:`str` + + """ + return six.moves.urllib.parse.urlunparse(parsed_result) diff --git a/axonius_api_client/http/__init__.py b/axonius_api_client/http/__init__.py deleted file mode 100644 index e91e4e6c..00000000 --- a/axonius_api_client/http/__init__.py +++ /dev/null @@ -1,273 +0,0 @@ -# -*- coding: utf-8 -*- -"""Axonius API Client package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import warnings -import logging - -import requests - -from . import urlparser -from .. import constants -from .. import tools -from .. import version - -LOG = logging.getLogger(__name__) - - -class HttpClient(object): - """Wrapper for sending requests usings :obj:`requests.Session`.""" - - def __init__(self, url, **kwargs): - """Constructor. - - Args: - url (:obj:`str` or :obj:`urlparser.UrlParser`): - Axonius API URL. - **kwargs: - connect_timeout (:obj:`int`, optional): - Seconds to wait for connection to url to open. - - Defaults to: 5. - response_timeout (:obj:`int`, optional): - Seconds to wait for response from url. - - Defaults to: 60. - verify (:obj:`bool` or :obj:`str`, optional): - Enable/Disable SSL cert validation. - - Defaults to: False. - verbose (:obj:`bool`, optional): - Log request and response verbose attributes. - * True = Log verbose attributes, - * False = Log brief attributes. - * None = Log no attributes. - - Defaults to: None. - save_last (:obj:`bool`, optional): - Save last request & response to :attr:`last_request` and - :attr:`last_response`. - - Defaults to: True. - save_history (:obj:`bool`, optional): - Add last response to :attr:`history`. - - Defaults to: False. - quiet_urllib (:obj:`bool`, optional): - Disable urllib3 InsecureRequestWarning and set logging level - for urllib3.connectionpool to WARNING. - - Defaults to: True. - - Notes: - If verify is False, no SSL cert verification is done. - - If verify is True, SSL cert verification is done using the default CA bundle - for this OS. - - If verify is str, SSL cert verification is done using CA bundle at path. - - If :attr:`requests.Session.trust_env` is False or verify is False, - no OS environment variables are used at all. - - If :attr:`requests.Session.trust_env` is True and verify is True, OS env - variables $REQUESTS_CA_BUNDLE, and $CURL_CA_BUNDLE are used for path to - CA bundle if set. - - Caveat: If previous request was made with :attr:`session` and - :attr:`requests.Session.verify` is changed but - :meth:`requests.Session.close` has not been called, the new verify will - not be used. - - """ - self._log = LOG.getChild(self.__class__.__name__) - """:obj:`logging.Logger`: Logger for this object.""" - - if isinstance(url, urlparser.UrlParser): - url = url.url - else: - parsed_url = urlparser.UrlParser(url=url, default_scheme="https") - url = parsed_url.url - - self.url = url - """:obj:`str`: URL of Axonius API.""" - - self.last_request = None - """:obj:`requests.PreparedRequest`: Last request sent.""" - - self.last_response = None - """:obj:`requests.Response`: Last response received.""" - - self.save_last = kwargs.get("save_last", True) - """:obj:`bool`: Save requests to last_request and responses to last_response.""" - - self.history = [] - """:obj:`list` of :obj:`requests.Response`: History of responses.""" - - self.save_history = kwargs.get("save_history", False) - """:obj:`bool`: Append all responses to history.""" - - self.connect_timeout = kwargs.get("connect_timeout", 5) - """:obj:`int`: Seconds to wait for connection to url to open.""" - - self.response_timeout = kwargs.get("response_timeout", 60) - """:obj:`int`: Seconds to wait for response from url.""" - - self.session = requests.Session() - """:obj:`requests.Session`: Session object to use.""" - - self.session.verify = kwargs.get("verify", False) - - self.LOG_REQUEST_ATTRS = [] - """:obj:`list` of :obj:`str`: Attributes to log before sending request.""" - - self.LOG_RESPONSE_ATTRS = [] - """:obj:`list` of :obj:`str`: Attributes to log after receiving response.""" - - verbose = kwargs.get("verbose", False) - - if verbose is False: - self.LOG_REQUEST_ATTRS = constants.LOG_REQUEST_ATTRS_BRIEF - self.LOG_RESPONSE_ATTRS = constants.LOG_RESPONSE_ATTRS_BRIEF - elif verbose is True: - self.LOG_REQUEST_ATTRS = constants.LOG_REQUEST_ATTRS_VERBOSE - self.LOG_RESPONSE_ATTRS = constants.LOG_RESPONSE_ATTRS_VERBOSE - - quiet_urllib = kwargs.get("quiet_urllib", True) - - if quiet_urllib is True: - urlwarn = requests.urllib3.exceptions.InsecureRequestWarning - warnings.simplefilter("ignore", urlwarn) - - urllog = logging.getLogger("urllib3.connectionpool") - urllog.setLevel(logging.WARNING) - - def __call__( - self, - path="", - route="", - method="get", - data=None, - params=None, - headers=None, - json=None, - files=None, - **kwargs - ): - """Create, prepare, and then send a request using :attr:`session`. - - Args: - path (:obj:`str`, optional): - Path to append to :attr:`url` for this request. - method (:obj:`str`, optional): - Method to use. - - Defaults to: "get". - data (:obj:`str`, optional): - Data to send in request body. - - Defaults to: None. - params (:obj:`dict`, optional): - Parameters to encode in URL. - - Defaults to: None. - headers (:obj:`dict`, optional): - Headers to send in request. - - Defaults to: None. - - Returns: - :obj:`requests.Response` - - """ - # FUTURE: doc kwargs and files - url = tools.urljoin(self.url, path, route) - - headers = headers or {} - headers.setdefault("User-Agent", self.user_agent) - - request = requests.Request( - url=url, - method=method, - data=data, - headers=headers, - params=params, - json=json, - files=files or [], - ) - prepped_request = self.session.prepare_request(request=request) - - if self.save_last: - self.last_request = prepped_request - - if self.LOG_REQUEST_ATTRS: - msg = ", ".join(self.LOG_REQUEST_ATTRS) - msg = msg.format( - request=prepped_request, size=len(prepped_request.body or "") - ) - self._log.debug(msg) - - send_args = self.session.merge_environment_settings( - url=prepped_request.url, - proxies=kwargs.get("proxies", None), - stream=kwargs.get("stream", None), - verify=kwargs.get("verify", None), - cert=kwargs.get("cert", None), - ) - - send_args["request"] = prepped_request - send_args["timeout"] = ( - kwargs.get("connect_timeout", self.connect_timeout), - kwargs.get("response_timeout", self.response_timeout), - ) - - response = self.session.send(**send_args) - - if self.save_last: - self.last_response = response - - if self.save_history: - self.history.append(response) - - if self.LOG_RESPONSE_ATTRS: - msg = ", ".join(self.LOG_RESPONSE_ATTRS) - msg = msg.format(response=response, size=len(response.text or "")) - self._log.debug(msg) - - return response - - def __str__(self): - """Show object info. - - Returns: - :obj:`str` - - """ - return "{c.__module__}.{c.__name__}(url={url!r})".format( - c=self.__class__, url=self.url - ) - - def __repr__(self): - """Show object info. - - Returns: - :obj:`str` - - """ - return self.__str__() - - @property - def user_agent(self): - """Build a user agent string for use in User-Agent header. - - Returns: - :obj:`str` - - """ - msg = "{name}.{clsname}/{ver}" - return msg.format( - name=__name__, clsname=self.__class__.__name__, ver=version.__version__ - ) diff --git a/axonius_api_client/http/exceptions.py b/axonius_api_client/http/exceptions.py deleted file mode 100644 index 79dcb8f8..00000000 --- a/axonius_api_client/http/exceptions.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -"""Axonius API Client HTTP errors.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from .. import exceptions - - -class HttpError(exceptions.PackageError): - """Parent exception for all Authentication errors.""" diff --git a/axonius_api_client/http/urlparser.py b/axonius_api_client/http/urlparser.py deleted file mode 100644 index f5d6706e..00000000 --- a/axonius_api_client/http/urlparser.py +++ /dev/null @@ -1,255 +0,0 @@ -# -*- coding: utf-8 -*- -"""URL parser module.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import six - -from . import exceptions - - -class UrlParser(object): - """Parse a URL and ensure it has the neccessary bits.""" - - def __init__(self, url, default_scheme="https"): - """Constructor. - - Args: - url (:obj:`str`): - URL to parse - default_scheme (:obj:`str`, optional): - If no scheme in URL, use this. - - Defaults to: "https" - - Raises: - :exc:`exceptions.HttpError`: - If parsed URL winds up without a hostname, port, or scheme. - - """ - self._init_url = url - """:obj:`str`: Initial URL provided.""" - - self._init_scheme = default_scheme - """:obj:`str`: Default scheme provided.""" - - self._init_parsed = six.moves.urllib.parse.urlparse(url) - """:obj:`urllib.parse.ParseResult`: First pass of parsing URL.""" - - self.parsed = self.reparse( - parsed=self._init_parsed, default_scheme=default_scheme - ) - """:obj:`urllib.parse.ParseResult`: Second pass of parsing URL.""" - - for part in ["hostname", "port", "scheme"]: - if not getattr(self.parsed, part, None): - error = ( - "Parsed URL into {pstr!r} and no {part!r} provided in URL {url!r}" - ) - error = error.format(part=part, url=url, pstr=self.parsed_str) - raise exceptions.HttpError(error) - - def __str__(self): - """Show object info. - - Returns: - :obj:`str` - - """ - msg = "{c.__module__}.{c.__name__}({parsed})".format - return msg(c=self.__class__, parsed=self.parsed_str) - - def __repr__(self): - """Show object info. - - Returns: - :obj:`str` - - """ - return self.__str__() - - @property - def hostname(self): - """Hostname part from :attr:`UrlParser.parsed`. - - Returns: - :obj:`str` - - """ - return self.parsed.hostname - - @property - def port(self): - """Port part from :attr:`UrlParser.parsed`. - - Returns - :obj:`int` - - """ - return int(self.parsed.port) - - @property - def scheme(self): - """Scheme part from :attr:`UrlParser.parsed`. - - Returns: - :obj:`str` - - """ - return self.parsed.scheme - - @property - def url(self): - """Get scheme, hostname, and port from :attr:`UrlParser.parsed`. - - Returns: - :obj:`str` - - """ - return self.unparse_base(parsed_result=self.parsed) - - @property - def url_full(self): - """Get full URL from :attr:`UrlParser.parsed`. - - Returns: - :obj:`str` - - """ - return self.unparse_all(parsed_result=self.parsed) - - @property - def parsed_str(self): - """Create string of :attr:`UrlParser.parsed`. - - Returns: - :obj:`str` - - """ - parsed = getattr(self, "parsed", None) - attrs = [ - "scheme", - "netloc", - "hostname", - "port", - "path", - "params", - "query", - "fragment", - ] - atmpl = "{a}={v!r}".format - attrs = [atmpl(a=a, v="{}".format(getattr(parsed, a, "")) or "") for a in attrs] - return ", ".join(attrs) - - def make_netloc(self, host, port): - """Create netloc from host and port. - - Args: - host (:obj:`str`): - Host part to use in netloc. - port (:obj:`str`): - Port part to use in netloc. - - Returns: - :obj:`str` - - """ - return ":".join([host, port]) if port else host - - def reparse(self, parsed, default_scheme=""): - """Reparse a parsed URL into a parsed URL with values fixed. - - Args: - parsed (:obj:`urllib.parse.ParseResult`): - Parsed URL to reparse. - default_scheme (:obj:`str`, optional): - If no scheme in URL, use this. - - Defaults to: "" - - Returns: - :obj:`urllib.parse.ParseResult` - - """ - scheme, netloc, path, params, query, fragment = parsed - host = parsed.hostname - port = format(parsed.port or "") - - if not netloc and scheme and path and path.split("/")[0].isdigit(): - """For case: - >>> urllib.parse.urlparse('host:443/') - ParseResult( - scheme='host', netloc='', path='443/', params='', query='', fragment='' - ) - """ - host = scheme # switch host from scheme to host - port = path.split("/")[0] # remove / from path and assign to port - path = "" # empty out path - scheme = default_scheme - netloc = ":".join([host, port]) - - if not netloc and path: - """For cases: - >>> urllib.parse.urlparse('host:443') - ParseResult( - scheme='', netloc='', path='host:443', params='', query='', fragment='' - ) - >>> urllib.parse.urlparse('host') - ParseResult( - scheme='', netloc='', path='host', params='', query='', fragment='' - ) - """ - netloc, path = path, netloc - if ":" in netloc: - host, port = netloc.split(":", 1) - netloc = ":".join([host, port]) if port else host - else: - host = netloc - - scheme = scheme or default_scheme - if not scheme and port: - if format(port) == "443": - scheme = "https" - elif format(port) == "80": - scheme = "http" - - if not port: - if scheme == "https": - netloc = self.make_netloc(host, "443") - elif scheme == "http": - netloc = self.make_netloc(host, "80") - - pass2 = six.moves.urllib.parse.urlunparse( - (scheme, netloc, path, params, query, fragment) - ) - return six.moves.urllib.parse.urlparse(pass2) - - def unparse_base(self, parsed_result): - """Unparse a parsed URL into just the scheme, hostname, and port parts. - - Args: - parsed_result (:obj:`urllib.parse.ParseResult`): - Parsed URL to unparse. - - Returns: - :obj:`str` - - """ - # only unparse self.parsed into url with scheme and netloc - bits = (parsed_result.scheme, parsed_result.netloc, "", "", "", "") - return six.moves.urllib.parse.urlunparse(bits) - - def unparse_all(self, parsed_result): - """Unparse a parsed URL with all the parts. - - Args: - parsed_result (:obj:`urllib.parse.ParseResult`): - Parsed URL to unparse. - - Returns: - :obj:`str` - - """ - return six.moves.urllib.parse.urlunparse(parsed_result) diff --git a/axonius_api_client/logs.py b/axonius_api_client/logs.py new file mode 100644 index 00000000..d66c4b9e --- /dev/null +++ b/axonius_api_client/logs.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +"""Axonius API Client utility tools module.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import logging +import logging.handlers +import time + +from . import __package__ as PACKAGE_ROOT +from . import constants, exceptions, tools + + +def gmtime(): + """Set the logging system to use GMT for time strings.""" + logging.Formatter.converter = time.gmtime + + +def localtime(): + """Set the logging system to use local time for time strings.""" + logging.Formatter.converter = time.localtime + + +def get_obj_log(obj, level=None, **kwargs): + """Pass.""" + logger = kwargs.get("logger", logging.getLogger(obj.__class__.__module__)) + log = logger.getChild(obj.__class__.__name__) + set_level(obj=log, level=level) + return log + + +def set_level(obj, level=None): + """Set a logger or handler to a log level. + + Args: + obj (:obj:`logging.Logger` or :obj:`logging.Handler`): + Object to set lvl on. + level (:obj:`str` or :obj:`int`): + Level to set obj to. + + """ + if level: + obj.setLevel(getattr(logging, str_level(level=level))) + + +def str_level(level): + """Get a logging level in str format. + + Args: + level (:obj:`str` or :obj:`int`): + Level to get str format of. + + Returns: + :obj:`str` + + """ + if tools.is_int(obj=level, digit=True): + level_mapped = logging.getLevelName(int(level)) + if hasattr(logging, level_mapped): + return level_mapped + + if isinstance(level, tools.STR): + if hasattr(logging, level.upper()): + return level.upper() + + error = "Invalid logging level {level!r}, must be one of {lstr} or {lint}" + error = error.format( + level=level, + lstr=constants.LOG_LEVELS_STR_CSV, + lint=constants.LOG_LEVELS_INT_CSV, + ) + raise exceptions.ToolsError(error) + + +def add_stderr(obj, **kwargs): + """Add a StreamHandler to a logger object.""" + level = kwargs.get("level", constants.LOG_LEVEL_CONSOLE) + hname = kwargs.get("hname", constants.LOG_NAME_STDERR) + fmt = kwargs.get("fmt", constants.LOG_FMT_CONSOLE) + datefmt = kwargs.get("datefmt", constants.LOG_DATEFMT_CONSOLE) + htype = logging.StreamHandler + + return add_handler( + obj=obj, hname=hname, htype=htype, level=level, fmt=fmt, datefmt=datefmt + ) + + +def add_stdout(obj, **kwargs): + """Add a StreamHandler to a logger object.""" + level = kwargs.get("level", constants.LOG_LEVEL_CONSOLE) + hname = kwargs.get("hname", constants.LOG_NAME_STDOUT) + fmt = kwargs.get("fmt", constants.LOG_FMT_CONSOLE) + datefmt = kwargs.get("datefmt", constants.LOG_DATEFMT_CONSOLE) + htype = logging.StreamHandler + + return add_handler( + obj=obj, hname=hname, htype=htype, level=level, fmt=fmt, datefmt=datefmt + ) + + +def add_file(obj, **kwargs): + """Pass.""" + level = kwargs.get("level", constants.LOG_LEVEL_FILE) + hname = kwargs.get("hname", constants.LOG_NAME_FILE) + file_path = kwargs.get("file_path", constants.LOG_FILE_PATH) + file_name = kwargs.get("file_name", constants.LOG_FILE_NAME) + file_path_mode = kwargs.get("file_path_mode", constants.LOG_FILE_PATH_MODE) + max_mb = kwargs.get("max_mb", constants.LOG_FILE_MAX_MB) + max_files = kwargs.get("max_files", constants.LOG_FILE_MAX_FILES) + fmt = kwargs.get("fmt", constants.LOG_FMT_FILE) + datefmt = kwargs.get("datefmt", constants.LOG_DATEFMT_FILE) + htype = logging.handlers.RotatingFileHandler + + path = tools.path(obj=file_path) + path.mkdir(mode=file_path_mode, parents=True, exist_ok=True) + + handler = add_handler( + obj=obj, + level=level, + htype=htype, + fmt=fmt, + datefmt=datefmt, + hname=hname, + filename=format(path / file_name), + maxBytes=max_mb * 1024 * 1024, + backupCount=max_files, + ) + handler.PATH = path + return handler + + +def add_null(obj, traverse=True, **kwargs): + """Add a Null handler to a logger if it has no handlers.""" + hname = kwargs.get("hname", "NULL") + found = find_handlers(obj=obj, hname=hname, traverse=traverse) + htype = logging.NullHandler + if found: + return None + return add_handler(obj=obj, htype=htype, hname=hname, fmt="", datefmt="", level="") + + +def add_handler(obj, htype, level, hname, fmt, datefmt, **kwargs): + """Pass.""" + handler = htype(**kwargs) + + if hname: + handler.name = hname + + if fmt: + handler.setFormatter(logging.Formatter(fmt=fmt, datefmt=datefmt)) + + if level: + set_level(obj=handler, level=level) + + obj.addHandler(handler) + return handler + + +def del_stderr(obj, traverse=True, **kwargs): + """Remove a StreamHandler from a logger if found.""" + hname = kwargs.get("hname", constants.LOG_NAME_STDERR) + htype = logging.StreamHandler + return del_handler(obj=obj, hname=hname, htype=htype, traverse=traverse) + + +def del_stdout(obj, traverse=True, **kwargs): + """Remove a StreamHandler from a logger if found.""" + hname = kwargs.get("hname", constants.LOG_NAME_STDOUT) + htype = logging.StreamHandler + return del_handler(obj=obj, hname=hname, htype=htype, traverse=traverse) + + +def del_file(obj, traverse=True, **kwargs): + """Remove a RotatingFileHandler from a logger if found.""" + hname = kwargs.get("hname", constants.LOG_NAME_FILE) + htype = logging.handlers.RotatingFileHandler + return del_handler(obj=obj, hname=hname, htype=htype, traverse=traverse) + + +def del_null(obj, traverse=True, **kwargs): + """Remove a Null handler from a logger if found.""" + hname = kwargs.get("hname", "NULL") + htype = logging.NullHandler + return del_handler(obj=obj, hname=hname, htype=htype, traverse=traverse) + + +def del_handler(obj, hname="", htype="", traverse=True): + """Pass.""" + found = find_handlers(obj=obj, hname=hname, htype=htype, traverse=traverse) + for name, handlers in found.items(): + for handler in handlers: + logging.getLogger(name).removeHandler(handler) + return found + + +def find_handlers(obj, hname="", htype=None, traverse=True): + """Find all handlers by traversing up the tree from obj.""" + handlers = {} + + for handler in obj.handlers: + match_name = hname and handler.name == hname + match_type = htype and isinstance(handler, htype) + + if match_name or match_type: + handlers[obj.name] = handlers.get(obj.name, []) + + if handler not in handlers[obj.name]: + handlers[obj.name].append(handler) + + if obj.parent and traverse: + found = find_handlers( + obj=obj.parent, hname=hname, htype=htype, traverse=traverse + ) + handlers.update(found) + + return handlers + + +LOG = logging.getLogger(PACKAGE_ROOT) +add_null(obj=LOG) +gmtime() diff --git a/axonius_api_client/tests/__init__.py b/axonius_api_client/tests/__init__.py index 16a5cd83..10e5a108 100644 --- a/axonius_api_client/tests/__init__.py +++ b/axonius_api_client/tests/__init__.py @@ -1,6 +1,3 @@ # -*- coding: utf-8 -*- """Test suite.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals diff --git a/axonius_api_client/tests/api/__init__.py b/axonius_api_client/tests/api/__init__.py deleted file mode 100644 index 16a5cd83..00000000 --- a/axonius_api_client/tests/api/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals diff --git a/axonius_api_client/tests/api/test_devices.py b/axonius_api_client/tests/api/test_devices.py deleted file mode 100644 index 0ba5c71e..00000000 --- a/axonius_api_client/tests/api/test_devices.py +++ /dev/null @@ -1,297 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.auth.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import pytest -import requests -import six - -import axonius_api_client - - -@pytest.mark.needs_url -@pytest.mark.needs_any_creds -@pytest.mark.parametrize("creds", ["creds_user", "creds_key"], indirect=True) -class TestDevices(object): - """Test axonius_api_client.api.Devices.""" - - @pytest.fixture(scope="session") - def api_client(self, api_url, creds): - """Get an API client.""" - auth_cls = creds["cls"] - creds = {k: v for k, v in creds.items() if k != "cls"} - if not any(list(creds.values())): - pytest.skip("No credentials provided for {}: {}".format(auth_cls, creds)) - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = auth_cls(http_client=http_client, **creds) - auth.login() - api_client = axonius_api_client.api.Devices(auth=auth) - return api_client - - def test__request_json(self, api_client): - """Test that JSON is returned when is_json=True.""" - response = api_client._request( - path=axonius_api_client.api.routers.ApiV1.devices.count, - method="get", - raw=False, - is_json=True, - check_status=True, - ) - assert not isinstance(response, requests.Response) - assert not isinstance(response, six.string_types) - - def test__request_raw(self, api_client): - """Test that response obj is returned when raw=True.""" - response = api_client._request( - path=axonius_api_client.api.routers.ApiV1.devices.count, - method="get", - raw=True, - is_json=True, - check_status=True, - ) - assert isinstance(response, requests.Response) - - def test__request_text(self, api_client): - """Test that str is returned when raw=False and is_json=False.""" - response = api_client._request( - path=axonius_api_client.api.routers.ApiV1.devices.count, - method="get", - raw=False, - is_json=False, - check_status=True, - ) - assert isinstance(response, six.string_types) - - def test_not_logged_in(self, api_url, creds): - """Test exc thrown when auth method not logged in.""" - auth_cls = creds["cls"] - creds = {k: v for k, v in creds.items() if k != "cls"} - if not any(list(creds.values())): - pytest.skip("No credentials provided for {}: {}".format(auth_cls, creds)) - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = auth_cls(http_client=http_client, **creds) - with pytest.raises(axonius_api_client.auth.exceptions.NotLoggedIn): - axonius_api_client.api.Devices(auth=auth) - - # TODO: better _request exc tests - def test__request_invalid(self, api_client): - """Test private _request method throws ResponseError.""" - with pytest.raises(axonius_api_client.api.exceptions.ResponseError): - api_client._request(path="invalid_route") - - def test_str_repr(self, api_client): - """Test str/repr has URL.""" - assert "auth" in format(api_client) - assert "auth" in repr(api_client) - - def test_get_fields(self, api_client): - """Test devices/fields API call.""" - fields = api_client.get_fields() - assert isinstance(fields, dict) - assert "specific" in fields - assert "generic" in fields - assert "schema" in fields - - def test__get_no_query_no_fields(self, api_client): - """Test private get method without query or fields.""" - rows = api_client._get(query=None, fields=None, row_start=0, page_size=1) - assert "assets" in rows - for row in rows["assets"]: - assert "adapters" in row - assert "specific_data" in row - - @pytest.mark.parametrize( - "fields", - [ - [ - "specific_data.data.hostname", - "specific_data.data.network_interfaces.ips", - ], - "specific_data.data.hostname,specific_data.data.network_interfaces.ips", - ], - ) - @pytest.mark.parametrize( - "query", [None, '(specific_data.data.id == ({"$exists":true,"$ne": ""}))'] - ) - def test__get_queries_fields(self, api_client, query, fields): - """Test private get method with queries and fields.""" - rows = api_client._get(query=query, fields=fields, row_start=0, page_size=1) - assert "assets" in rows - for row in rows["assets"]: - assert "adapters" in row - assert "internal_axon_id" in row - assert "specific_data.data.hostname" in row - assert "specific_data.data.network_interfaces.ips" in row - - @pytest.mark.parametrize( - "query", [None, '(specific_data.data.id == ({"$exists":true,"$ne": ""}))'] - ) - def test_get_no_fields(self, api_client, query): - """Test get method with default fields.""" - for row in api_client.get(query=query, page_size=1, page_count=4): - assert "specific_data.data.hostname" in row - assert "specific_data.data.network_interfaces.ips" in row - - @pytest.mark.parametrize( - "query", [None, '(specific_data.data.id == ({"$exists":true,"$ne": ""}))'] - ) - @pytest.mark.parametrize("generic_fields", [["all"], ["specific_data.data"]]) - def test_get_all_fields(self, api_client, query, generic_fields): - """Test get method with all fields.""" - for row in api_client.get( - query=query, page_size=2, page_count=1, generic=generic_fields - ): - for data in row["specific_data.data"]: - assert isinstance(data, tuple([dict] + list(six.string_types))) - assert data - - def test_get_by_name_valid(self, api_client): - """Test get_by_name with a valid host name.""" - rows = list(api_client.get(page_count=1, page_size=1)) - if not rows: - msg = "No devices on system, unable to test" - pytest.skip(msg) - - name = rows[0]["specific_data.data.hostname"] - name = name[0] if isinstance(name, (list, tuple)) else name - row = api_client.get_by_name(value=name) - assert isinstance(row, dict) - - def test_get_by_name_valid_regex(self, api_client): - """Test get_by_name with a valid host name.""" - rows = list(api_client.get(page_count=1, page_size=1)) - if not rows: - msg = "No devices on system, unable to test" - pytest.skip(msg) - - name = rows[0]["specific_data.data.hostname"] - name = name[0] if isinstance(name, (list, tuple)) else name - row = api_client.get_by_name(value=name, regex=True) - assert isinstance(row, dict) - - def test_get_by_name_invalid(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - api_client.get_by_name(value="this_should_not_exist_yo") - - def test_get_min_max_1_notfound(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - list(api_client.get(row_count_min=1, row_count_max=1)) - - def test_get_min_toofew(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.TooFewObjectsFound): - list(api_client.get(row_count_min=9999999999)) - - def test_get_min_toomany(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.TooManyObjectsFound): - list(api_client.get(row_count_max=0)) - - def test_get_by_id_valid(self, api_client): - """Test get_by_id with a valid row id.""" - rows = api_client._get(page_size=1) - for row in rows["assets"]: - full_row = api_client.get_by_id(id=row["internal_axon_id"]) - assert "generic" in full_row - assert "specific" in full_row - - def test_get_by_id_invalid(self, api_client): - """Test get_by_id with an invalid row id.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - api_client.get_by_id(id="this_wont_work_yo") - - def test_get_count(self, api_client): - """Test devices/count API call.""" - response = api_client.get_count() - assert isinstance(response, six.integer_types) - - def test_get_count_query(self, api_client): - """Test devices/count API call with query (unable to assume greater than 0).""" - response = api_client.get_count( - query='(specific_data.data.id == ({"$exists":true,"$ne": ""}))' - ) - assert isinstance(response, six.integer_types) - - def test_get_count_query_0(self, api_client): - """Test devices/count API call with query that should return 0.""" - response = api_client.get_count( - query='(specific_data.data.id == ({"$exists":false,"$eq": "dsaf"}))' - ) - assert response == 0 - - def test__get_saved_query(self, api_client): - """Test private get_saved_query.""" - rows = api_client._get_saved_query() - assert isinstance(rows, dict) - assert "assets" in rows - assert "page" in rows - - def test_get_saved_query_by_name_invalid(self, api_client): - """Test get_saved_query_by_name.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - api_client.get_saved_query_by_name(name="this_wont_exist_yo", regex=False) - - @pytest.fixture - def test_create_saved_query_badwolf123(self, api_client): - """Test create_saved_query.""" - name = "badwolf 123" - response = api_client.create_saved_query( - name=name, query='(specific_data.data.id == ({"$exists":true,"$ne": ""}))' - ) - assert isinstance(response, six.string_types) - return name, response - - @pytest.fixture - def test_create_saved_query_badwolf456(self, api_client): - """Test create_saved_query.""" - name = "badwolf 456" - response = api_client.create_saved_query( - name=name, - query='(specific_data.data.id == ({"$exists":true,"$ne": ""}))', - sort_field="id", - ) - assert isinstance(response, six.string_types) - return name, response - - def test_get_saved_query_by_name_valid( - self, api_client, test_create_saved_query_badwolf123 - ): - """Test get_saved_query_by_name.""" - name, id = test_create_saved_query_badwolf123 - rows = api_client.get_saved_query_by_name(name=name, regex=True) - assert isinstance(rows, (list, tuple)) - assert len(rows) > 0 - for row in rows: - assert "name" in row - assert "uuid" in row - - @pytest.mark.parametrize("query", [None, 'name == regex("badwolf", "i")']) - def test_get_saved_query( - self, api_client, query, test_create_saved_query_badwolf123 - ): - """Test get_saved_query.""" - rows = list(api_client.get_saved_query(query=query, page_size=1)) - assert isinstance(rows, (list, tuple)) - assert len(rows) > 0 - for row in rows: - assert "name" in row - assert "uuid" in row - - def test_delete_saved_query_by_name( - self, api_client, test_create_saved_query_badwolf123 - ): - """Test delete_saved_query_by_name.""" - name, id = test_create_saved_query_badwolf123 - rows = api_client.delete_saved_query_by_name(name=name) - assert not rows - - def test__delete_saved_query(self, api_client, test_create_saved_query_badwolf456): - """Test private _delete_saved_query.""" - name, id = test_create_saved_query_badwolf456 - rows = api_client._delete_saved_query(ids=[id]) - assert not rows diff --git a/axonius_api_client/tests/api/test_users.py b/axonius_api_client/tests/api/test_users.py deleted file mode 100644 index cb6b550e..00000000 --- a/axonius_api_client/tests/api/test_users.py +++ /dev/null @@ -1,287 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.auth.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import pytest -import requests -import six - -import axonius_api_client - - -@pytest.mark.needs_url -@pytest.mark.needs_any_creds -@pytest.mark.parametrize("creds", ["creds_user", "creds_key"], indirect=True) -class TestUsers(object): - """Test axonius_api_client.api.Users.""" - - @pytest.fixture(scope="session") - def api_client(self, api_url, creds): - """Get an API client.""" - auth_cls = creds["cls"] - creds = {k: v for k, v in creds.items() if k != "cls"} - if not any(list(creds.values())): - pytest.skip("No credentials provided for {}: {}".format(auth_cls, creds)) - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = auth_cls(http_client=http_client, **creds) - auth.login() - api_client = axonius_api_client.api.Users(auth=auth) - return api_client - - def test__request_json(self, api_client): - """Test that JSON is returned when is_json=True.""" - response = api_client._request( - path=axonius_api_client.api.routers.ApiV1.users.count, - method="get", - raw=False, - is_json=True, - check_status=True, - ) - assert not isinstance(response, requests.Response) - assert not isinstance(response, six.string_types) - - def test__request_raw(self, api_client): - """Test that response obj is returned when raw=True.""" - response = api_client._request( - path=axonius_api_client.api.routers.ApiV1.users.count, - method="get", - raw=True, - is_json=True, - check_status=True, - ) - assert isinstance(response, requests.Response) - - def test__request_text(self, api_client): - """Test that str is returned when raw=False and is_json=False.""" - response = api_client._request( - path=axonius_api_client.api.routers.ApiV1.users.count, - method="get", - raw=False, - is_json=False, - check_status=True, - ) - assert isinstance(response, six.string_types) - - def test_not_logged_in(self, api_url, creds): - """Test exc thrown when auth method not logged in.""" - auth_cls = creds["cls"] - creds = {k: v for k, v in creds.items() if k != "cls"} - if not any(list(creds.values())): - pytest.skip("No credentials provided for {}: {}".format(auth_cls, creds)) - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = auth_cls(http_client=http_client, **creds) - with pytest.raises(axonius_api_client.auth.exceptions.NotLoggedIn): - axonius_api_client.api.Users(auth=auth) - - def test_str_repr(self, api_client): - """Test str/repr has URL.""" - assert "auth" in format(api_client) - assert "auth" in repr(api_client) - - def test_get_fields(self, api_client): - """Test devices/fields API call.""" - fields = api_client.get_fields() - assert isinstance(fields, dict) - assert "specific" in fields - assert "generic" in fields - assert "schema" in fields - - def test__get_no_query_no_fields(self, api_client): - """Test private get method without query or fields.""" - rows = api_client._get(query=None, fields=None, row_start=0, page_size=1) - assert "assets" in rows - for row in rows["assets"]: - assert "adapters" in row - assert "specific_data" in row - - @pytest.mark.parametrize( - "fields", - [ - ["specific_data.data.username", "specific_data.data.id"], - "specific_data.data.username,specific_data.data.id", - ], - ) - @pytest.mark.parametrize( - "query", [None, '(specific_data.data.id == ({"$exists":true,"$ne": ""}))'] - ) - def test__get_queries_fields(self, api_client, query, fields): - """Test private get method with queries and fields.""" - rows = api_client._get(query=query, fields=fields, row_start=0, page_size=1) - assert "assets" in rows - for row in rows["assets"]: - assert "adapters" in row - assert "internal_axon_id" in row - assert "specific_data.data.username" in row - assert "specific_data.data.id" in row - - @pytest.mark.parametrize( - "query", [None, '(specific_data.data.id == ({"$exists":true,"$ne": ""}))'] - ) - def test_get_no_fields(self, api_client, query): - """Test get method with default fields.""" - for row in api_client.get(query=query, page_size=1, page_count=4): - assert "specific_data.data.username" in row - - @pytest.mark.parametrize( - "query", [None, '(specific_data.data.id == ({"$exists":true,"$ne": ""}))'] - ) - @pytest.mark.parametrize("generic_fields", [["all"], ["specific_data.data"]]) - def test_get_all_fields(self, api_client, query, generic_fields): - """Test get method with all fields.""" - for row in api_client.get( - query=query, page_size=1, page_count=2, generic=generic_fields - ): - for data in row["specific_data.data"]: - assert isinstance(data, tuple([dict] + list(six.string_types))) - assert data - - def test_get_by_name_valid(self, api_client): - """Test get_by_name with a valid host name.""" - rows = list(api_client.get(page_count=1, page_size=1)) - if not rows: - msg = "No users on system, unable to test" - pytest.skip(msg) - - name = rows[0]["specific_data.data.username"] - name = name[0] if isinstance(name, (list, tuple)) else name - row = api_client.get_by_name(value=name) - assert isinstance(row, dict) - - def test_get_by_name_valid_regex(self, api_client): - """Test get_by_name with a valid host name.""" - rows = list(api_client.get(page_count=1, page_size=1)) - if not rows: - msg = "No users on system, unable to test" - pytest.skip(msg) - - name = rows[0]["specific_data.data.username"] - name = name[0] if isinstance(name, (list, tuple)) else name - row = api_client.get_by_name(value=name, regex=True) - assert isinstance(row, dict) - - def test_get_by_name_invalid(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - api_client.get_by_name(value="this_should_not_exist_yo") - - def test_get_min_max_1_notfound(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - list(api_client.get(row_count_min=1, row_count_max=1)) - - def test_get_min_toofew(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.TooFewObjectsFound): - list(api_client.get(row_count_min=9999999999)) - - def test_get_min_toomany(self, api_client): - """Test get_by_name with a valid host name.""" - with pytest.raises(axonius_api_client.api.exceptions.TooManyObjectsFound): - list(api_client.get(row_count_max=0)) - - def test_get_by_id_valid(self, api_client): - """Test get_by_id with a valid row id.""" - rows = api_client._get(page_size=1) - for row in rows["assets"]: - full_row = api_client.get_by_id(id=row["internal_axon_id"]) - assert "generic" in full_row - assert "specific" in full_row - - def test_get_by_id_invalid(self, api_client): - """Test get_by_id with an invalid row id.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - api_client.get_by_id(id="this_wont_work_yo") - - def test_get_count(self, api_client): - """Test devices/count API call.""" - response = api_client.get_count() - assert isinstance(response, six.integer_types) - - def test_get_count_query(self, api_client): - """Test devices/count API call with query (unable to assume greater than 0).""" - response = api_client.get_count( - query='(specific_data.data.id == ({"$exists":true,"$ne": ""}))' - ) - assert isinstance(response, six.integer_types) - - def test_get_count_query_0(self, api_client): - """Test devices/count API call with query that should return 0.""" - response = api_client.get_count( - query='(specific_data.data.id == ({"$exists":false,"$eq": "dsaf"}))' - ) - assert response == 0 - - def test__get_saved_query(self, api_client): - """Test private get_saved_query.""" - rows = api_client._get_saved_query() - assert isinstance(rows, dict) - assert "assets" in rows - assert "page" in rows - - def test_get_saved_query_by_name_invalid(self, api_client): - """Test get_saved_query_by_name.""" - with pytest.raises(axonius_api_client.api.exceptions.ObjectNotFound): - api_client.get_saved_query_by_name(name="this_wont_exist_yo", regex=False) - - @pytest.fixture - def test_create_saved_query_badwolf123(self, api_client): - """Test create_saved_query.""" - name = "badwolf 123" - response = api_client.create_saved_query( - name=name, query='(specific_data.data.id == ({"$exists":true,"$ne": ""}))' - ) - assert isinstance(response, six.string_types) - return name, response - - @pytest.fixture - def test_create_saved_query_badwolf456(self, api_client): - """Test create_saved_query.""" - name = "badwolf 456" - response = api_client.create_saved_query( - name=name, - query='(specific_data.data.id == ({"$exists":true,"$ne": ""}))', - sort_field="id", - ) - assert isinstance(response, six.string_types) - return name, response - - def test_get_saved_query_by_name_valid( - self, api_client, test_create_saved_query_badwolf123 - ): - """Test get_saved_query_by_name.""" - name, id = test_create_saved_query_badwolf123 - rows = api_client.get_saved_query_by_name(name=name, regex=True) - assert isinstance(rows, (list, tuple)) - assert len(rows) > 0 - for row in rows: - assert "name" in row - assert "uuid" in row - - @pytest.mark.parametrize("query", [None, 'name == regex("badwolf", "i")']) - def test_get_saved_query( - self, api_client, query, test_create_saved_query_badwolf123 - ): - """Test get_saved_query.""" - rows = list(api_client.get_saved_query(query=query, page_size=1)) - assert isinstance(rows, (list, tuple)) - assert len(rows) > 0 - for row in rows: - assert "name" in row - assert "uuid" in row - - def test_delete_saved_query_by_name( - self, api_client, test_create_saved_query_badwolf123 - ): - """Test delete_saved_query_by_name.""" - name, id = test_create_saved_query_badwolf123 - rows = api_client.delete_saved_query_by_name(name=name) - assert not rows - - def test__delete_saved_query(self, api_client, test_create_saved_query_badwolf456): - """Test private _delete_saved_query.""" - name, id = test_create_saved_query_badwolf456 - rows = api_client._delete_saved_query(ids=[id]) - assert not rows diff --git a/axonius_api_client/tests/api/test_utils.py b/axonius_api_client/tests/api/test_utils.py deleted file mode 100644 index 073fa792..00000000 --- a/axonius_api_client/tests/api/test_utils.py +++ /dev/null @@ -1,143 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.tools.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import pytest - -import axonius_api_client - - -FAKE_FIELDS = { - "generic": [ - {"name": "specific_data.data.hostname", "title": "Host Name", "type": "string"} - ], - "specific": { - "known_adapter": [ - { - "name": "adapters_data.known_adapter.hostname", - "title": "Host Name", - "type": "string", - } - ] - }, -} - - -def test_max_page_size_invalid(): - """Test exc thrown when page_size > constants.MAX_PAGE_SIZE.""" - page_size = axonius_api_client.constants.MAX_PAGE_SIZE + 100 - with pytest.raises(axonius_api_client.api.exceptions.ApiError): - axonius_api_client.api.utils.check_max_page_size(page_size=page_size) - - -@pytest.mark.parametrize("adapter", ["known", "known_adapter"]) -class TestFindAdapter(object): - """Test axonius_api_client.api.utils.find_adapter.""" - - @pytest.mark.parametrize("known_names", [None, ["known_adapter"]]) - def test_valid(self, adapter, known_names): - """Test that _adapter gets added properly.""" - response = axonius_api_client.api.utils.find_adapter( - name=adapter, known_names=known_names - ) - assert response == "known_adapter" - - def test_invalid(self, adapter): - """Test exc thrown when name not in known_names.""" - with pytest.raises(axonius_api_client.api.exceptions.UnknownAdapterName): - axonius_api_client.api.utils.find_adapter( - name=adapter, known_names=["other_known_adapter"] - ) - - -class TestFindFieldGeneric(object): - """Test axonius_api_client.api.utils.find_field with generic fields.""" - - @pytest.mark.parametrize("fields", [None, FAKE_FIELDS]) - @pytest.mark.parametrize("field", ["hostname", "specific_data.data.hostname"]) - def test_valid(self, field, fields): - """Test field name found and/or prefixed properly.""" - response = axonius_api_client.api.utils.find_field( - name=field, adapter="generic", fields=fields - ) - assert response == "specific_data.data.hostname" - - def test_invalid(self): - """Test exc thrown when field not in FAKE_FIELDS.""" - with pytest.raises(axonius_api_client.api.exceptions.UnknownFieldName): - axonius_api_client.api.utils.find_field( - name="unknown_field", adapter="generic", fields=FAKE_FIELDS - ) - - -@pytest.mark.parametrize("adapter", ["known", "known_adapter"]) -class TestFindFieldAdapter(object): - """Test axonius_api_client.api.utils.find_field with adapter fields.""" - - @pytest.mark.parametrize("fields", [None, FAKE_FIELDS]) - @pytest.mark.parametrize( - "field", ["hostname", "adapters_data.known_adapter.hostname"] - ) - def test_valid(self, adapter, field, fields): - """Test field name found and/or prefixed properly.""" - response = axonius_api_client.api.utils.find_field( - name=field, fields=fields, adapter=adapter - ) - assert response == "adapters_data.known_adapter.hostname" - - def test_invalid(self, adapter): - """Test exc thrown when field not in FAKE_FIELDS.""" - with pytest.raises(axonius_api_client.api.exceptions.UnknownFieldName): - axonius_api_client.api.utils.find_field( - name="unknown_field", fields=FAKE_FIELDS, adapter=adapter - ) - - -class TestValidateFields(object): - """Test axonius_api_client.api.utils.validate_fields.""" - - @pytest.mark.parametrize( - "fields", - [ - { - "fields": {"generic": ["hostname", "specific_data.data.hostname"]}, - "expected": ["specific_data.data.hostname"], - }, - { - "fields": {"generic": ["hostname"]}, - "expected": ["specific_data.data.hostname"], - }, - { - "fields": {"generic": ["hostname"], "known_adapter": ["hostname"]}, - "expected": [ - "specific_data.data.hostname", - "adapters_data.known_adapter.hostname", - ], - }, - ], - ) - def test__validate_fields_valid(self, fields): - """Test valid dup fields dont show up and prefix gets added properly.""" - response = axonius_api_client.api.utils.validate_fields( - known_fields=FAKE_FIELDS, ignore_non_list=True, **fields["fields"] - ) - assert response == fields["expected"] - - def test__validate_fields_invalid_field(self): - """Test exc thrown when invalid generic field supplied.""" - with pytest.raises(axonius_api_client.api.exceptions.UnknownFieldName): - axonius_api_client.api.utils.validate_fields( - known_fields=FAKE_FIELDS, generic=["this_wont_exist_yo"] - ) - - def test__validate_fields_invalid_adapter(self): - """Test exc thrown when invalid adapter name supplied.""" - with pytest.raises(axonius_api_client.api.exceptions.UnknownAdapterName): - axonius_api_client.api.utils.validate_fields( - known_fields=FAKE_FIELDS, - generic=["hostname"], - this_wont_exist_yo=["hostname"], - ) diff --git a/axonius_api_client/tests/auth/__init__.py b/axonius_api_client/tests/auth/__init__.py deleted file mode 100644 index 16a5cd83..00000000 --- a/axonius_api_client/tests/auth/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals diff --git a/axonius_api_client/tests/auth/test_key.py b/axonius_api_client/tests/auth/test_key.py deleted file mode 100644 index 1a64d284..00000000 --- a/axonius_api_client/tests/auth/test_key.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.auth.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import pytest - -import axonius_api_client - - -@pytest.mark.needs_url -class TestAuthKey(object): - """Test axonius_api_client.auth.AuthKey.""" - - auth_cls = axonius_api_client.auth.AuthKey - bad_creds = {"key": "bad", "secret": "bad"} - - @pytest.mark.needs_key_creds - def test_valid_creds(self, api_url, creds_key): - """Test str/repr has URL.""" - creds = {k: v for k, v in creds_key.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - auth.login() - assert auth.is_logged_in - assert "url" in format(auth) - assert "url" in repr(auth) - - @pytest.mark.needs_key_creds - def test_logout(self, api_url, creds_key): - """Test no exc when logout() after login().""" - creds = {k: v for k, v in creds_key.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - auth.login() - auth.logout() - assert not auth.is_logged_in - - @pytest.mark.needs_key_creds - def test_login_already_logged_in(self, api_url, creds_key): - """Test exc thrown when login() and login() already called.""" - creds = {k: v for k, v in creds_key.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - auth.login() - with pytest.raises(axonius_api_client.auth.exceptions.AlreadyLoggedIn): - auth.login() - - @pytest.mark.needs_key_creds - def test_logout_not_logged_in(self, api_url, creds_key): - """Test exc thrown when logout() but login() not called.""" - creds = {k: v for k, v in creds_key.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - with pytest.raises(axonius_api_client.auth.exceptions.NotLoggedIn): - auth.logout() - - def test_invalid_creds(self, api_url): - """Test str/repr has URL.""" - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **self.bad_creds) - with pytest.raises(axonius_api_client.auth.exceptions.InvalidCredentials): - auth.login() - - def test_http_lock_fail(self, api_url): - """Test using an http client from another authmethod throws exc.""" - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth1 = self.auth_cls(http_client=http_client, **self.bad_creds) - assert auth1.http_client._auth_lock - with pytest.raises(axonius_api_client.auth.exceptions.AuthError): - self.auth_cls(http_client=http_client, **self.bad_creds) diff --git a/axonius_api_client/tests/auth/test_user.py b/axonius_api_client/tests/auth/test_user.py deleted file mode 100644 index 6f5468de..00000000 --- a/axonius_api_client/tests/auth/test_user.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.auth.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import pytest - -import axonius_api_client - - -@pytest.mark.needs_url -class TestAuthUser(object): - """Test axonius_api_client.auth.AuthUser.""" - - auth_cls = axonius_api_client.auth.AuthUser - bad_creds = {"username": "bad", "password": "bad"} - - @pytest.mark.needs_user_creds - def test_valid_creds(self, api_url, creds_user): - """Test str/repr has URL.""" - creds = {k: v for k, v in creds_user.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - auth.login() - assert auth.is_logged_in - assert "url" in format(auth) - assert "url" in repr(auth) - - @pytest.mark.needs_user_creds - def test_logout(self, api_url, creds_user): - """Test no exc when logout() after login().""" - creds = {k: v for k, v in creds_user.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - auth.login() - auth.logout() - assert not auth.is_logged_in - - @pytest.mark.needs_user_creds - def test_logout_not_logged_in(self, api_url, creds_user): - """Test exc thrown when logout() but login() not called.""" - creds = {k: v for k, v in creds_user.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - with pytest.raises(axonius_api_client.auth.exceptions.NotLoggedIn): - auth.logout() - - @pytest.mark.needs_user_creds - def test_login_already_logged_in(self, api_url, creds_user): - """Test exc thrown when login() and login() already called.""" - creds = {k: v for k, v in creds_user.items() if k != "cls"} - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **creds) - auth.login() - with pytest.raises(axonius_api_client.auth.exceptions.AlreadyLoggedIn): - auth.login() - - def test_invalid_creds(self, api_url): - """Test str/repr has URL.""" - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth = self.auth_cls(http_client=http_client, **self.bad_creds) - with pytest.raises(axonius_api_client.auth.exceptions.InvalidCredentials): - auth.login() - - def test_http_lock_fail(self, api_url): - """Test using an http client from another authmethod throws exc.""" - http_client = axonius_api_client.http.HttpClient(url=api_url) - auth1 = self.auth_cls(http_client=http_client, **self.bad_creds) - assert auth1.http_client._auth_lock - with pytest.raises(axonius_api_client.auth.exceptions.AuthError): - self.auth_cls(http_client=http_client, **self.bad_creds) diff --git a/axonius_api_client/tests/conftest.py b/axonius_api_client/tests/conftest.py index e658326f..2d7e99c4 100644 --- a/axonius_api_client/tests/conftest.py +++ b/axonius_api_client/tests/conftest.py @@ -1,177 +1,48 @@ # -*- coding: utf-8 -*- """Conf for py.test.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals -import re import os -import pytest +import dotenv -import axonius_api_client +dotenv.load_dotenv() AX_URL = os.environ.get("AX_URL", None) or None -AX_USERNAME = os.environ.get("AX_USERNAME", None) or None -AX_PASSWORD = os.environ.get("AX_PASSWORD", None) or None AX_KEY = os.environ.get("AX_KEY", None) or None AX_SECRET = os.environ.get("AX_SECRET", None) or None -def join(opts, switch): - """Join a string with switch.""" - return "({})".format(" {} ".format(switch).join(opts)) - - -NEEDS_USERNAME = join(["--username", "$AX_USERNAME"], "OR") -NEEDS_PASSWORD = join(["--password", "$AX_PASSWORD"], "OR") -NEEDS_KEY = join(["--key", "$AX_KEY"], "OR") -NEEDS_SECRET = join(["--secret", "$AX_SECRET"], "OR") -NEEDS_URL = join(["--url", "$AX_URL"], "OR") - -NEEDS_USER_CREDS = join([NEEDS_USERNAME, NEEDS_PASSWORD], "AND") -NEEDS_KEY_CREDS = join([NEEDS_KEY, NEEDS_SECRET], "AND") -NEEDS_ANY_CREDS = join([NEEDS_USER_CREDS, NEEDS_KEY_CREDS], "AND/OR") - - def pytest_addoption(parser): """Add API connection options.""" parser.addoption( - "--url", + "--ax-url", action="store", default=AX_URL, - required=False, + required=not bool(AX_URL), help="URL of Axonius API", ) parser.addoption( - "--username", - action="store", - default=AX_USERNAME, - required=False, - help="Username for Axonius API", - ) - parser.addoption( - "--password", - action="store", - default=AX_PASSWORD, - required=False, - help="Password for Axonius API", - ) - parser.addoption( - "--key", + "--ax-key", action="store", default=AX_KEY, - required=False, - help="Username for Axonius API", + required=not bool(AX_KEY), + help="API key for Axonius API", ) parser.addoption( - "--secret", + "--ax-secret", action="store", default=AX_SECRET, - required=False, - help="Password for Axonius API", + required=not bool(AX_SECRET), + help="API secret for Axonius API", ) def pytest_configure(config): """Ini file additions.""" config.addinivalue_line( - "markers", "needs_user_creds: requires {}".format(NEEDS_USER_CREDS) + "filterwarnings", "error::axonius_api_client.exceptions.AxonWarning" ) - config.addinivalue_line( - "markers", "needs_key_creds: requires {}".format(NEEDS_KEY_CREDS) - ) - config.addinivalue_line( - "markers", "needs_any_creds: requires {}".format(NEEDS_ANY_CREDS) - ) - config.addinivalue_line("markers", "needs_url: requires {}".format(NEEDS_URL)) config.addinivalue_line( "filterwarnings", "ignore::urllib3.exceptions.InsecureRequestWarning" ) - - -def pytest_runtest_setup(item): - """Handle marks.""" - username = item.config.getoption("--username") - password = item.config.getoption("--password") - key = item.config.getoption("--key") - secret = item.config.getoption("--secret") - url = item.config.getoption("--url") - - needs = [] - - has_user_creds = all([username, password]) - has_key_creds = all([key, secret]) - has_any_creds = any([has_user_creds, has_key_creds]) - - if "needs_url" in item.keywords and not url: - needs.append(NEEDS_URL) - - if "needs_user_creds" in item.keywords and not has_user_creds: - needs.append(NEEDS_USER_CREDS) - - if "needs_key_creds" in item.keywords and not has_key_creds: - needs.append(NEEDS_KEY_CREDS) - - if "needs_any_creds" in item.keywords and not has_any_creds: - needs.append(NEEDS_ANY_CREDS) - - if needs: - msg = "Need {needs} for this test!" - msg = msg.format(needs=join(needs, "AND")) - pytest.skip(msg) - - -@pytest.fixture(scope="session") -def api_url(request): - """Fixture for getting API URL.""" - url = request.config.getoption("--url") - if url: - parsed_url = axonius_api_client.http.urlparser.UrlParser( - url=url, default_scheme="https" - ) - url = parsed_url.url - return url - - -@pytest.fixture(scope="session") -def creds_user(request): - """Fixture for getting username/password creds.""" - return { - "cls": axonius_api_client.auth.AuthUser, - "username": request.config.getoption("--username"), - "password": request.config.getoption("--password"), - } - - -@pytest.fixture(scope="session") -def creds_key(request): - """Fixture for getting key/secret creds.""" - return { - "cls": axonius_api_client.auth.AuthKey, - "key": request.config.getoption("--key"), - "secret": request.config.getoption("--secret"), - } - - -@pytest.fixture(scope="session") -def creds(request): - """Pass.""" - return request.getfixturevalue(request.param) - - -@pytest.fixture -def log_check(): - """Fixture to check if list of regexes found in pytest logging captures.""" - # wrapper - def _log_check(caplog, entries): - """Check if entries match caplog.""" - msgs = [rec.message for rec in caplog.records] - for entry in entries: - if not any(re.search(entry, m) for m in msgs): - error = "Did not find entry in log: {!r}\nAll entries:\n{}" - error = error.format(entry, "\n".join(msgs)) - raise Exception(error) - - return _log_check diff --git a/axonius_api_client/tests/http/__init__.py b/axonius_api_client/tests/http/__init__.py deleted file mode 100644 index 16a5cd83..00000000 --- a/axonius_api_client/tests/http/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals diff --git a/axonius_api_client/tests/http/test_http.py b/axonius_api_client/tests/http/test_http.py deleted file mode 100644 index a91997e8..00000000 --- a/axonius_api_client/tests/http/test_http.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.http.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import logging -import sys - -import pytest -import requests - -import axonius_api_client - - -class TestHttpClient(object): - """Test axonius_api_client.http.HttpClient.""" - - def test_str_repr(self, httpbin_secure): - """Test str/repr has URL.""" - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url) - assert httpbin_secure.url in format(http_client) - assert httpbin_secure.url in repr(http_client) - - def test_parsed_url(self, httpbin_secure): - """Test url=UrlParser() works.""" - url = httpbin_secure.url - parsed_url = axonius_api_client.http.urlparser.UrlParser( - url=url, default_scheme="https" - ) - http_client = axonius_api_client.http.HttpClient(url=parsed_url) - assert httpbin_secure.url in format(http_client) - assert httpbin_secure.url in repr(http_client) - - def test_user_agent(self, httpbin_secure): - """Test user_agent has version in it.""" - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url) - assert axonius_api_client.version.__version__ in http_client.user_agent - - def test_not_quiet_urllib(self, httpbin_secure): - """Test quiet_urllib=False shows warning from urllib3.""" - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url, quiet_urllib=False) - with pytest.warns(requests.urllib3.exceptions.InsecureRequestWarning): - http_client() - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="requires python3.6 or higher" - ) - def test_verify_ca_bundle(self, httpbin_secure, httpbin_ca_bundle): - """Test quiet_urllib=False no warning from urllib3 when using ca bundle.""" - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url, quiet_urllib=False) - response = http_client() - assert response.status_code == 200 - - def test_save_last(self, httpbin_secure): - """Test last req/resp with save_last=True.""" - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url, save_last=True) - response = http_client() - assert response == http_client.last_response - assert response.request == http_client.last_request - - def test_save_history(self, httpbin_secure): - """Test last resp added to history with save_history=True.""" - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url, save_history=True) - response = http_client() - assert response in http_client.history - - def test_logging_verbose_none(self, httpbin_secure, caplog, log_check): - """Test no logging of request/response when verbose=None.""" - caplog.set_level(logging.DEBUG) - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url, verbose=None) - http_client() - assert not caplog.records - - def test_logging_verbose_true(self, httpbin_secure, caplog, log_check): - """Test verbose logging of request/response when verbose=True.""" - caplog.set_level(logging.DEBUG) - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url, verbose=True) - http_client() - entries = [ - "request.*{}.*headers".format(httpbin_secure.url + "/"), - "response.*{}.*headers".format(httpbin_secure.url + "/"), - ] - log_check(caplog, entries) - - def test_logging_verbose_false(self, httpbin_secure, caplog, log_check): - """Test brief logging of request/response when verbose=False.""" - caplog.set_level(logging.DEBUG) - url = httpbin_secure.url - http_client = axonius_api_client.http.HttpClient(url=url, verbose=False) - http_client() - entries = [ - "request.*{}".format(httpbin_secure.url + "/"), - "response.*{}".format(httpbin_secure.url + "/"), - ] - log_check(caplog, entries) diff --git a/axonius_api_client/tests/http/test_urlparser.py b/axonius_api_client/tests/http/test_urlparser.py deleted file mode 100644 index d4a670f2..00000000 --- a/axonius_api_client/tests/http/test_urlparser.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.tools.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import pytest -import axonius_api_client - - -class TestUrlParser(object): - """Test axonius_api_client.http.urlparser.UrlParser.""" - - def test_schemehostport443(self): - """Test a proper URL gets parsed the same.""" - u = axonius_api_client.http.urlparser.UrlParser("https://host:443/blah") - assert u.hostname == "host" - assert u.port == 443 - assert u.scheme == "https" - assert u.parsed.path == "/blah" - assert u.url_full == "https://host:443/blah" - assert u.url == "https://host:443" - - def test_str_repr(self): - """Test str/repr has URL path.""" - u = axonius_api_client.http.urlparser.UrlParser("https://host:443/blah") - assert u.parsed.path in format(u) - assert u.parsed.path in repr(u) - - def test_schemehost_noport443(self): - """Test port gets added for https scheme.""" - u = axonius_api_client.http.urlparser.UrlParser("https://host") - assert u.hostname == "host" - assert u.port == 443 - assert u.scheme == "https" - - def test_host_noschemeport(self): - """Test exc when no port or scheme in URL.""" - exc = axonius_api_client.http.exceptions.HttpError - match = "no.*'port'" - with pytest.raises(exc, match=match): - axonius_api_client.http.urlparser.UrlParser("host", default_scheme="") - - def test_unknownschemehost_noport(self): - """Test exc when no port and non http/https scheme.""" - exc = axonius_api_client.http.exceptions.HttpError - match = "no.*'port'" - with pytest.raises(exc, match=match): - axonius_api_client.http.urlparser.UrlParser("httpx://host") - - def test_hostport443_withslash(self): - """Test scheme added with port 443 and no scheme in URL.""" - u = axonius_api_client.http.urlparser.UrlParser("host:443/") - assert u.hostname == "host" - assert u.port == 443 - assert u.scheme == "https" - - def test_hostport443_noscheme(self): - """Test scheme added with port 443 and no scheme in URL.""" - u = axonius_api_client.http.urlparser.UrlParser("host:443", default_scheme="") - assert u.hostname == "host" - assert u.port == 443 - assert u.scheme == "https" - - def test_hostport80_noscheme(self): - """Test scheme added with port 80 and no scheme in URL.""" - u = axonius_api_client.http.urlparser.UrlParser("host:80", default_scheme="") - assert u.hostname == "host" - assert u.port == 80 - assert u.scheme == "http" - - def test_schemehost_noport80(self): - """Test port added with no port and http scheme in URL.""" - u = axonius_api_client.http.urlparser.UrlParser("http://host") - assert u.hostname == "host" - assert u.port == 80 - assert u.scheme == "http" diff --git a/axonius_api_client/tests/test_tools.py b/axonius_api_client/tests/test_tools.py deleted file mode 100644 index a82faf0b..00000000 --- a/axonius_api_client/tests/test_tools.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -"""Test suite for axonius_api_client.tools.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import axonius_api_client - - -class TestUrlJoin(object): - """Test axonius_api_client.tools.urljoin.""" - - def test_urljoin_url(self): - """Test url gets joined properly no matter the slashes.""" - r = axonius_api_client.tools.urljoin("https://test.com") - assert r == "https://test.com/" - r = axonius_api_client.tools.urljoin("https://test.com/") - assert r == "https://test.com/" - r = axonius_api_client.tools.urljoin("https://test.com////") - assert r == "https://test.com/" - r = axonius_api_client.tools.urljoin("https://test.com", "") - assert r == "https://test.com/" - r = axonius_api_client.tools.urljoin("https://test.com", "", "") - assert r == "https://test.com/" - r = axonius_api_client.tools.urljoin("https://test.com", "/", "") - assert r == "https://test.com/" - r = axonius_api_client.tools.urljoin("https://test.com", "/", "/") - assert r == "https://test.com/" - - def test_urljoin_url_path(self): - """Test url, path gets joined properly no matter the slashes.""" - r = axonius_api_client.tools.urljoin("https://test.com", "a") - assert r == "https://test.com/a" - r = axonius_api_client.tools.urljoin("https://test.com", "/a") - assert r == "https://test.com/a" - r = axonius_api_client.tools.urljoin("https://test.com", "//a") - assert r == "https://test.com/a" - r = axonius_api_client.tools.urljoin("https://test.com", "a/") - assert r == "https://test.com/a/" - r = axonius_api_client.tools.urljoin("https://test.com", "a/b") - assert r == "https://test.com/a/b" - r = axonius_api_client.tools.urljoin("https://test.com", "a/b", "") - assert r == "https://test.com/a/b" - r = axonius_api_client.tools.urljoin("https://test.com", "a/b/", "") - assert r == "https://test.com/a/b/" - r = axonius_api_client.tools.urljoin("https://test.com", "a/b", "/") - assert r == "https://test.com/a/b/" - r = axonius_api_client.tools.urljoin("https://test.com", "a/b", "/////") - assert r == "https://test.com/a/b/" - - def test_urljoin_url_path_route(self): - """Test url, path, route gets joined properly no matter the slashes.""" - r = axonius_api_client.tools.urljoin("https://test.com", "a", "b") - assert r == "https://test.com/a/b" - r = axonius_api_client.tools.urljoin("https://test.com", "/a", "b") - assert r == "https://test.com/a/b" - r = axonius_api_client.tools.urljoin("https://test.com", "//a", "b") - assert r == "https://test.com/a/b" - r = axonius_api_client.tools.urljoin("https://test.com", "a", "b/c/d") - assert r == "https://test.com/a/b/c/d" diff --git a/axonius_api_client/tests/tests_api/__init__.py b/axonius_api_client/tests/tests_api/__init__.py new file mode 100644 index 00000000..10e5a108 --- /dev/null +++ b/axonius_api_client/tests/tests_api/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- +"""Test suite.""" +from __future__ import absolute_import, division, print_function, unicode_literals diff --git a/axonius_api_client/tests/tests_api/test_api_adapters.py b/axonius_api_client/tests/tests_api/test_api_adapters.py new file mode 100644 index 00000000..aac2d849 --- /dev/null +++ b/axonius_api_client/tests/tests_api/test_api_adapters.py @@ -0,0 +1,1344 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonapi.api.users_devices.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import warnings + +import pytest + +import axonius_api_client as axonapi +from axonius_api_client import constants, exceptions, tools + +from .. import utils + +# FUTURE: BR: adding cnx with parsed config instead of raw config breaks adapters._get() +# FUTURE: add atexit to verify no badwolf cnxs? + +CSV_FILENAME = "badwolf.csv" +CSV_FIELDS = ["mac_address", "field1"] +CSV_ROW = ["01:37:53:9E:82:7C", "e"] +CSV_FILECONTENTS = [",".join(CSV_FIELDS), ",".join(CSV_ROW)] +CSV_FILECONTENT_STR = "\r\n".join(CSV_FILECONTENTS) + "\r\n" +CSV_FILECONTENT_BYTES = CSV_FILECONTENT_STR.encode() + +FAKE_CNX_OK = { + "adapter_name": "fluff1", + "adapter_name_raw": "fluff1_adapter", + "id": "foobar1", + "node_name": "xbxb", + "node_id": "xbxb", + "uuid": "abc123", + "status": True, +} +FAKE_CNX_BAD = { + "adapter_name": "fluff2", + "adapter_name_raw": "fluff2_adapter", + "node_name": "xbxb", + "node_id": "xbxb", + "id": "foobar2", + "uuid": "zxy987", + "status": False, +} +FAKE_CNXS = [FAKE_CNX_OK, FAKE_CNX_BAD] +FAKE_ADAPTER_CNXS_OK = { + "cnx": [FAKE_CNX_OK], + "name": "fluff1", + "name_raw": "fluff1_adapter", + "node_name": "master", + "cnx_count": 2, + "status": True, +} +FAKE_ADAPTER_CNXS_BAD = { + "cnx": FAKE_CNXS, + "name": "fluff2", + "name_raw": "fluff2_adapter", + "node_name": "master", + "cnx_count": 2, + "status": False, +} +FAKE_ADAPTER_NOCLIENTS = { + "cnx": [], + "name": "fluff3", + "name_raw": "fluff3_adapter", + "node_name": "master", + "cnx_count": 0, + "status": None, +} +FAKE_ADAPTERS = [FAKE_ADAPTER_CNXS_BAD, FAKE_ADAPTER_CNXS_OK, FAKE_ADAPTER_NOCLIENTS] + + +@pytest.fixture(scope="module") +def apiobj(request): + """Pass.""" + auth = utils.get_auth(request) + + api = axonapi.Adapters(auth=auth) + + utils.check_apiobj(authobj=auth, apiobj=api) + + utils.check_apiobj_children(apiobj=api, cnx=axonapi.api.adapters.Cnx) + return api + + +@pytest.fixture(scope="module") +def csv_adapter(apiobj): + """Pass.""" + return apiobj.get_single(adapter="csv", node="master") + + +class TestAdapters(object): + """Pass.""" + + def test__get(self, apiobj): + """Pass.""" + adapters = apiobj._get() + assert isinstance(adapters, dict) + + def test_get(self, apiobj): + """Pass.""" + adapters = apiobj.get() + assert isinstance(adapters, tools.LIST) + + def test_get_single(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.get_single(adapter="badwolf") + + with pytest.raises(exceptions.ValueNotFound): + apiobj.get_single(adapter="csv", node="badwolf") + + data = apiobj.get_single(adapter="csv") + assert isinstance(data, dict) + assert data["name"] == "csv" + + data1 = apiobj.get_single(adapter=data) + assert data1 == data + + def test__upload_file(self, apiobj, csv_adapter): + """Pass.""" + data = apiobj._upload_file( + adapter_name=csv_adapter["name_raw"], + node_id=csv_adapter["node_id"], + field="csv", + name=CSV_FILENAME, + content=CSV_FILECONTENT_BYTES, + ) + assert isinstance(data, dict) + assert data["uuid"] + assert data["filename"] + + data = apiobj._upload_file( + adapter_name=csv_adapter["name_raw"], + node_id=csv_adapter["node_id"], + field="csv", + name=CSV_FILENAME, + content=CSV_FILECONTENT_STR, + ) + assert isinstance(data, dict) + assert data["uuid"] + assert data["filename"] + + def test_upload_file_str(self, apiobj, csv_adapter): + """Pass.""" + data = apiobj.upload_file_str( + adapter=csv_adapter, + field="csv", + name=CSV_FILENAME, + content=CSV_FILECONTENT_BYTES, + ) + assert isinstance(data, dict) + assert data["uuid"] + assert data["filename"] + + data = apiobj.upload_file_str( + adapter=csv_adapter, + field="csv", + name=CSV_FILENAME, + content=CSV_FILECONTENT_STR, + ) + assert isinstance(data, dict) + assert data["uuid"] + assert data["filename"] + + def test_upload_file_path(self, apiobj, tmp_path, csv_adapter): + """Pass.""" + test_path = tmp_path / CSV_FILENAME + test_path.write_text(CSV_FILECONTENT_STR) + + data = apiobj.upload_file_path(adapter=csv_adapter, field="csv", path=test_path) + assert isinstance(data, dict) + assert isinstance(data["uuid"], tools.STR) + assert data["filename"] == CSV_FILENAME + + test_path.write_bytes(CSV_FILECONTENT_BYTES) + + data = apiobj.upload_file_path(adapter=csv_adapter, field="csv", path=test_path) + assert isinstance(data, dict) + assert isinstance(data["uuid"], tools.STR) + assert data["filename"] == CSV_FILENAME + + def test_filter_by_names_regex(self, apiobj): + """Pass.""" + data = apiobj.filter_by_names(value="RE:.*", adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + assert len(data) >= 1 + + data = apiobj.filter_by_names( + value="RE:{}".format(FAKE_ADAPTER_CNXS_OK["name"]), adapters=FAKE_ADAPTERS + ) + assert isinstance(data, tools.LIST) + assert len(data) == 1 + + def test_filter_by_names_counts(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.filter_by_names(value="xxx", match_count=1, adapters=FAKE_ADAPTERS) + + data = apiobj.filter_by_names( + value="xxx", match_count=1, match_error=False, adapters=FAKE_ADAPTERS + ) + assert isinstance(data, tools.LIST) + assert len(data) == 0 + + with pytest.raises(exceptions.ValueNotFound): + apiobj.filter_by_names( + value="RE:.*", match_count=1, match_error=True, adapters=FAKE_ADAPTERS + ) + + data = apiobj.filter_by_names( + value="RE:.*", match_count=1, match_error=False, adapters=FAKE_ADAPTERS + ) + assert isinstance(data, tools.LIST) + assert len(data) == 3 + + def test_filter_by_nodes_regex(self, apiobj): + """Pass.""" + data = apiobj.filter_by_nodes(value="RE:master", adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + assert len(data) >= 1 + + data = apiobj.filter_by_nodes(value="RE:master", adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + assert len(data) >= 1 + + def test_filter_by_nodes_counts(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.filter_by_nodes( + value="xxx", match_count=1, match_error=True, adapters=FAKE_ADAPTERS + ) + + data = apiobj.filter_by_nodes( + value="xxx", match_count=1, match_error=False, adapters=FAKE_ADAPTERS + ) + assert isinstance(data, tools.LIST) + assert len(data) == 0 + + with pytest.raises(exceptions.ValueNotFound): + apiobj.filter_by_nodes( + value="RE:.*", match_count=1, match_error=True, adapters=FAKE_ADAPTERS + ) + + data = apiobj.filter_by_nodes( + value="RE:.*", match_count=1, match_error=False, adapters=FAKE_ADAPTERS + ) + assert isinstance(data, tools.LIST) + assert len(data) == 3 + + def test_filter_by_cnx_count(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.filter_by_cnx_count( + value=9999, match_count=1, match_error=True, adapters=FAKE_ADAPTERS + ) + + data = apiobj.filter_by_cnx_count(adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + assert len(data) == 3 + + data = apiobj.filter_by_cnx_count(value=2, adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + assert len(data) == 2 + + data = apiobj.filter_by_cnx_count( + value=9999, match_count=1, match_error=False, adapters=FAKE_ADAPTERS + ) + assert isinstance(data, tools.LIST) + assert len(data) == 0 + + def test_filter_by_status(self, apiobj): + """Pass.""" + data = apiobj.filter_by_status(value=True, adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + for x in data: + assert x["status"] is True + + data = apiobj.filter_by_status(value=False, adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + for x in data: + assert x["status"] is False + + data = apiobj.filter_by_status(value=None, adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + for x in data: + assert x["status"] is None + + data = apiobj.filter_by_status(value=[False, True], adapters=FAKE_ADAPTERS) + assert isinstance(data, tools.LIST) + for x in data: + assert x["status"] in [False, True] + + with pytest.raises(exceptions.ValueNotFound): + apiobj.filter_by_status( + value="x", adapters=FAKE_ADAPTERS, match_count=1, match_error=True + ) + + +class TestCnx(object): + """Pass.""" + + def _add_csv(self, apiobj, csv_adapter, name): + """Pass.""" + uploaded = apiobj.upload_file_str( + adapter=csv_adapter, + field="csv", + name=name, + content=CSV_FILECONTENT_BYTES, + content_type="text/csv", + ) + + config = {} + config["is_users_csv"] = False + config["is_installed_sw"] = False + config["user_id"] = "private_create_csv" + config["csv"] = uploaded + + added = apiobj.cnx._add( + adapter_name=csv_adapter["name_raw"], + node_id=csv_adapter["node_id"], + config=config, + ) + assert isinstance(added, dict) + assert isinstance(added["id"], tools.STR) + + assert added["error"] is None + assert added["status"] == "success" + assert added["client_id"] == "private_create_csv" + assert isinstance(added["id"], tools.STR) and added["id"] + return added, config + + def _delete_csv(self, apiobj, csv_adapter, added): + """Pass.""" + deleted = apiobj.cnx._delete( + adapter_name=csv_adapter["name_raw"], + node_id=csv_adapter["node_id"], + cnx_uuid=added["id"], + ) + assert not deleted + + not_deleted = apiobj.cnx._delete( + adapter_name=csv_adapter["name_raw"], + node_id=csv_adapter["node_id"], + cnx_uuid=added["id"], + ) + assert not_deleted + + # def test__build_known(self, apiobj, csv_adapter): + # """Pass.""" + # cnxs = apiobj.cnx.get(adapter=csv_adapter) + # known = apiobj.cnx._build_known(cnxs) + # assert isinstance(known, tools.LIST) + # for x in known: + # assert isinstance(x, tools.STR) + + def test_refetch(self, apiobj, csv_adapter): + """Pass.""" + response, config = self._add_csv( + apiobj, csv_adapter, name="badwolf_public_refetch" + ) + + refetched = apiobj.cnx.refetch( + adapter_name=csv_adapter["name"], + node_name=csv_adapter["node_name"], + response=response, + filter_method=apiobj.cnx.filter_by_uuids, + filter_value=response["id"], + ) + assert refetched["uuid"] == response["id"] + + self._delete_csv(apiobj, csv_adapter, response) + + def test_refetch_fail(self, apiobj, csv_adapter): + """Pass.""" + response = {"id": "asl;gkj;eoin;oigrnad"} + + with pytest.raises(exceptions.CnxRefetchFailure): + apiobj.cnx.refetch( + adapter_name=csv_adapter["name"], + node_name=csv_adapter["node_name"], + response=response, + filter_value="djafskjdf;kjsanhgdsaf", + filter_method=apiobj.cnx.filter_by_uuids, + retry=1, + sleep=0, + ) + + def test__add_check_delete(self, apiobj, csv_adapter): + """Pass.""" + added, config = self._add_csv( + apiobj, csv_adapter, name="badwolf_private_add_check_delete" + ) + + checked = apiobj.cnx._check( + adapter_name=csv_adapter["name_raw"], + node_id=csv_adapter["node_id"], + config=config, + ) + + assert isinstance(checked, dict) + assert isinstance(checked["message"], tools.STR) + assert checked["status"] == "error" + assert checked["type"] == "NotImplementedError" + + self._delete_csv(apiobj, csv_adapter, added) + + def test_add_delete(self, apiobj, csv_adapter): + """Pass.""" + config = dict( + dc_name="badwolf_public_add_delete", + user=CSV_FILENAME, + password=CSV_FILENAME, + ) + + adapter = apiobj.get_single("active_directory") + + with pytest.raises(exceptions.CnxConnectFailure) as exc: + apiobj.cnx.add( + adapter=adapter, config=config, parse_config=True, error=True + ) + + refetched = apiobj.cnx.refetch( + adapter_name=adapter["name"], + node_name=adapter["node_name"], + response=exc.value.response, + filter_value=exc.value.response["id"], + filter_method=apiobj.cnx.filter_by_uuids, + ) + + apiobj.cnx.delete(refetched, force=True, sleep=0, warning=False) + + def test_delete_noforce(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.CnxDeleteForce): + apiobj.cnx.delete(cnx=FAKE_CNX_BAD, force=False) + + def test_delete_warning(self, apiobj): + """Pass.""" + with pytest.warns(exceptions.CnxDeleteFailedWarning): + apiobj.cnx.delete( + cnx=FAKE_CNX_BAD, force=True, error=False, warning=True, sleep=0 + ) + + def test_delete_error(self, apiobj): + """Pass.""" + with pytest.warns(exceptions.CnxDeleteWarning): + with pytest.raises(exceptions.CnxDeleteFailed): + apiobj.cnx.delete( + cnx=FAKE_CNX_BAD, force=True, error=True, warning=True, sleep=0 + ) + + def test_get_adapter(self, apiobj, csv_adapter): + """Pass.""" + cnxs = apiobj.cnx.get(adapter=csv_adapter) + assert isinstance(cnxs, tools.LIST) + for x in cnxs: + assert x["adapter_name"] == "csv" + + def test_get_str(self, apiobj): + """Pass.""" + cnxs = apiobj.cnx.get(adapter="csv") + assert isinstance(cnxs, tools.LIST) + for x in cnxs: + assert x["adapter_name"] == "csv" + + def test_get_list(self, apiobj): + """Pass.""" + cnxs = apiobj.cnx.get(adapter=["csv", "active_directory"]) + assert isinstance(cnxs, tools.LIST) + for x in cnxs: + assert x["adapter_name"] in ["csv", "active_directory"] + + def test_get_none(self, apiobj): + """Pass.""" + all_adapters = apiobj.get() + ok_adapters = apiobj.filter_by_status(adapters=all_adapters, value=True) + cnxs = apiobj.cnx.get(adapter=None) + assert isinstance(cnxs, tools.LIST) + for cnx in cnxs: + assert isinstance(cnx, dict) + assert len(cnxs) >= len(ok_adapters) + + def test_filter_by_status(self, apiobj): + """Pass.""" + good = apiobj.cnx.filter_by_status(cnxs=FAKE_CNXS, value=True) + assert good == [FAKE_CNXS[0]] + + bad = apiobj.cnx.filter_by_status(cnxs=FAKE_CNXS, value=False) + assert bad == [FAKE_CNXS[1]] + + both = apiobj.cnx.filter_by_status(cnxs=FAKE_CNXS, value=[True, False]) + assert both == FAKE_CNXS + + with pytest.raises(exceptions.ValueNotFound): + apiobj.cnx.filter_by_status( + cnxs=FAKE_CNXS, value=["x"], match_count=1, match_error=True + ) + + def test_filter_by_ids(self, apiobj): + """Pass.""" + just1re = apiobj.cnx.filter_by_ids( + cnxs=FAKE_CNXS, value="RE:{}".format(FAKE_CNXS[0]["id"]) + ) + assert isinstance(just1re, tools.LIST) + assert just1re == [FAKE_CNXS[0]] + + just1 = apiobj.cnx.filter_by_ids( + cnxs=FAKE_CNXS, value="RE:{}".format(FAKE_CNXS[0]["id"]), match_count=1 + ) + assert just1 == [FAKE_CNXS[0]] + + with pytest.raises(exceptions.ValueNotFound): + apiobj.cnx.filter_by_ids( + cnxs=FAKE_CNXS, value="badwolfyfakfjlka", match_count=1 + ) + + def test_filter_by_uuids(self, apiobj): + """Pass.""" + just1re = apiobj.cnx.filter_by_uuids( + cnxs=FAKE_CNXS, value="RE:{}".format(FAKE_CNXS[0]["uuid"]) + ) + assert isinstance(just1re, tools.LIST) + assert just1re == [FAKE_CNXS[0]] + + just1 = apiobj.cnx.filter_by_uuids( + cnxs=FAKE_CNXS, value=FAKE_CNXS[0]["uuid"], match_count=1 + ) + assert just1 == [FAKE_CNXS[0]] + + with pytest.raises(exceptions.ValueNotFound): + apiobj.cnx.filter_by_uuids( + cnxs=FAKE_CNXS, value="badwolfyfakfjlka", match_count=1 + ) + + def test_update_success(self, apiobj): + """Pass.""" + cnxs = apiobj.cnx.get(adapter=None) + cnxs = apiobj.cnx.filter_by_status(cnxs=cnxs, value=True) + + if not cnxs: + reason = "No working connections found!" + pytest.skip(reason) + + for cnx in cnxs: + check = apiobj.cnx.check(cnx=cnx, error=False) + if not check["response_had_error"]: + response = apiobj.cnx.update(cnx=cnx, error=True) + assert response["cnx"]["uuid"] != cnx["uuid"] + return + + def test_update_failure(self, apiobj): + """Pass.""" + config = dict( + dc_name="badwolf_public_update_failure", + user=CSV_FILENAME, + password=CSV_FILENAME, + ) + + cnx = apiobj.cnx.add( + adapter="active_directory", config=config, parse_config=True, error=False + ) + + with pytest.raises(exceptions.CnxConnectFailure) as exc: + apiobj.cnx.update(cnx=cnx["cnx"], error=True) + + refetched = apiobj.cnx.refetch( + adapter_name=cnx["cnx"]["adapter_name"], + node_name=cnx["cnx"]["node_name"], + response=exc.value.response, + filter_value=exc.value.response["id"], + filter_method=apiobj.cnx.filter_by_uuids, + ) + + with pytest.warns(exceptions.CnxDeleteWarning): + apiobj.cnx.delete(refetched, force=True, sleep=0, warning=True) + + def test_update_parse(self, apiobj): + """Pass.""" + config = dict( + dc_name="badwolf_public_update_parse", + user=CSV_FILENAME, + password=CSV_FILENAME, + ) + cnx = apiobj.cnx.add( + adapter="active_directory", config=config, parse_config=True, error=False + ) + with pytest.raises(exceptions.CnxConnectFailure) as exc: + apiobj.cnx.update( + cnx=cnx["cnx"], new_config=config, parse_config=True, error=True + ) + + refetched = apiobj.cnx.refetch( + adapter_name=cnx["cnx"]["adapter_name"], + node_name=cnx["cnx"]["node_name"], + response=exc.value.response, + filter_value=exc.value.response["id"], + filter_method=apiobj.cnx.filter_by_uuids, + ) + + with pytest.warns(exceptions.CnxDeleteWarning): + apiobj.cnx.delete(refetched, force=True, sleep=0, warning=True) + + def test_check_success(self, apiobj): + """Pass.""" + cnxs = apiobj.cnx.get(adapter=None) + cnxs = apiobj.cnx.filter_by_status(cnxs=cnxs, value=True) + + if not cnxs: + reason = "No working connections found!" + pytest.skip(reason) + + for cnx in cnxs: + checked = apiobj.cnx.check(cnx=cnx, error=False) + + if not checked["response_had_error"]: + data = apiobj.cnx.check(cnx=cnx, error=True) + + assert isinstance(data, dict) + assert data["cnx"]["uuid"] == cnx["uuid"] + assert "response" in data + return + + def test_check_failure(self, apiobj): + """Pass.""" + cnxs = apiobj.cnx.get(adapter=None) + cnxs = apiobj.cnx.filter_by_status(cnxs=cnxs, value=False) + + if not cnxs: + reason = "No broken connections found!" + pytest.skip(reason) + + for cnx in cnxs: + checked = apiobj.cnx.check(cnx=cnx, error=False) + + if checked["response_had_error"]: + with pytest.raises(exceptions.CnxConnectFailure): + apiobj.cnx.check(cnx=cnx, error=True) + + return + + def test_add_delete_csv_str(self, apiobj): + """Pass.""" + added = apiobj.cnx.add_csv_str( + name=CSV_FILENAME, + content=CSV_FILECONTENT_BYTES, + field="badwolf_add_csv_str", + ) + assert isinstance(added, dict) + + apiobj.cnx.delete( + cnx=added["cnx"], force=True, error=False, warning=False, sleep=0 + ) + + def test_add_delete_csv_file(self, apiobj, tmp_path): + """Pass.""" + test_path = tmp_path / CSV_FILENAME + test_path.write_text(CSV_FILECONTENT_STR) + + added = apiobj.cnx.add_csv_file(path=test_path, field="badwolf_add_csv_file") + assert isinstance(added, dict) + + apiobj.cnx.delete( + cnx=added["cnx"], force=True, error=False, warning=False, sleep=0 + ) + + def test_add_delete_csv_url(self, apiobj): + """Pass.""" + added = apiobj.cnx.add_csv_url( + url="https://localhost/badwolf_add_csv_url.csv", + field="badwolf_add_csv_url", + error=False, + ) + assert isinstance(added, dict) + + apiobj.cnx.delete( + cnx=added["cnx"], force=True, error=False, warning=False, sleep=0 + ) + + def test_add_delete_csv_share(self, apiobj): + """Pass.""" + moo = "moo" + added = apiobj.cnx.add_csv_share( + share="smb://localhost/badwolf_add_csv_share.csv", + username=moo, + password=moo, + field="badwolf_add_csv_share", + error=False, + ) + assert isinstance(added, dict) + + apiobj.cnx.delete( + cnx=added["cnx"], force=True, error=False, warning=False, sleep=0 + ) + + +class TestValidateCsv(object): + """Pass.""" + + def test_device(self): + """Pass.""" + content = "{},test1\nabc,def\n".format(constants.CSV_FIELDS["device"][0]) + + with pytest.warns(None) as record: + axonapi.api.adapters.validate_csv(name=CSV_FILENAME, content=content) + + assert len(record) == 0 + + def test_device_bytes(self): + """Pass.""" + content = "{},test1\nabc,def\n".format(constants.CSV_FIELDS["device"][0]) + + with pytest.warns(None) as record: + axonapi.api.adapters.validate_csv( + name=CSV_FILENAME, content=content.encode() + ) + + assert len(record) == 0 + + def test_device_warn(self): + """Pass.""" + content = "test2,test1\nabc,def\n" + + with pytest.warns(exceptions.CnxCsvWarning) as record: + axonapi.api.adapters.validate_csv(name=CSV_FILENAME, content=content) + + assert len(record) == 1 + + def test_users(self): + """Pass.""" + content = "{},test1\nabc,def\n".format(constants.CSV_FIELDS["user"][0]) + + with pytest.warns(None) as record: + axonapi.api.adapters.validate_csv( + name=CSV_FILENAME, content=content, is_users=True + ) + + assert len(record) == 0 + + def test_users_warn(self): + """Pass.""" + content = "test2,test1\nabc,def\n" + + with pytest.warns(exceptions.CnxCsvWarning) as record: + axonapi.api.adapters.validate_csv( + name=CSV_FILENAME, content=content, is_users=True + ) + + assert len(record) == 1 + + def test_sw(self): + """Pass.""" + content = "{},test1\nabc,def\n".format(constants.CSV_FIELDS["sw"][0]) + + with pytest.warns(None) as record: + axonapi.api.adapters.validate_csv( + name=CSV_FILENAME, content=content, is_installed_sw=True + ) + + assert len(record) == 0, "threw warnings" + + def test_sw_warn(self): + """Pass.""" + content = "test2,test1\nabc,def\n" + + with pytest.warns(exceptions.CnxCsvWarning) as record: + axonapi.api.adapters.validate_csv( + name=CSV_FILENAME, content=content, is_installed_sw=True + ) + + assert len(record) == 1, "did not throw only one warning" + + +class TestParserCnxConfig(object): + """Pass.""" + + def test_ignore(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="string", required=True)) + + config = dict(test1="test1", ignore="x") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1="test1") + + def test_enum(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict( + test1=dict(name="test1", type="string", required=True, enum=["test1"]) + ) + + config = dict(test1="test1") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1="test1") + + def test_enum_invalidchoice(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict( + test1=dict(name="test1", type="string", required=True, enum=["test1"]) + ) + + config = dict(test1="badwolf") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + + with pytest.raises(exceptions.CnxSettingInvalidChoice): + parser.parse(adapter=csv_adapter, settings=fake_settings) + + def test_unchanged(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="string", required=True)) + + config = dict(test1=constants.SETTING_UNCHANGED) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1=constants.SETTING_UNCHANGED) + + def test_string(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="string", required=True)) + + config = dict(test1="test1") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1="test1") + + def test_number(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="number", required=True)) + + config = dict(test1="2") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1=2) + + def test_integer(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="integer", required=True)) + + config = dict(test1="2") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1=2) + + def test_bool(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="bool", required=True)) + + config = dict(test1=False) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1=False) + + def test_optional_default(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict( + test1=dict(name="test1", type="string", default="x", required=False) + ) + + config = dict() + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1="x") + + def test_optional_missing(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="string", required=False)) + + config = dict() + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict() + + def test_required_default(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict( + test1=dict(name="test1", type="string", default="x", required=True) + ) + + config = dict() + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1="x") + + def test_required_missing(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="string", required=True)) + + config = dict() + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + + with pytest.raises(exceptions.CnxSettingMissing): + parser.parse(adapter=csv_adapter, settings=fake_settings) + + def test_array(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="array", required=True)) + + config = dict(test1=["test1"]) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1=["test1"]) + + def test_array_comma(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="array", required=True)) + + config = dict(test1="test1,test2,test3") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1=["test1", "test2", "test3"]) + + def test_array_invalidtype(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="array", required=True)) + + config = dict(test1=[True]) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + + with pytest.raises(exceptions.CnxSettingInvalidType): + parser.parse(adapter=csv_adapter, settings=fake_settings) + + def test_badtype(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="string", required=True)) + + config = dict(test1=True) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + + with pytest.raises(exceptions.CnxSettingInvalidType): + parser.parse(adapter=csv_adapter, settings=fake_settings) + + def test_unknowntype(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="badwolf", required=True)) + + config = dict(test1="a") + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + + with pytest.raises(exceptions.CnxSettingUnknownType): + parser.parse(adapter=csv_adapter, settings=fake_settings) + + def test_file_badtype(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="file", required=True)) + + config = dict(test1=["X"]) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + + with pytest.raises(exceptions.CnxSettingInvalidType): + parser.parse(adapter=csv_adapter, settings=fake_settings) + + def test_file_missing(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="file", required=True)) + + config = dict(test1={}) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + + with pytest.raises(exceptions.CnxSettingFileMissing): + parser.parse(adapter=csv_adapter, settings=fake_settings) + + def test_file_uuid(self, apiobj, csv_adapter): + """Pass.""" + fake_settings = dict(test1=dict(name="test1", type="file", required=True)) + + config = dict(test1={"uuid": "x", "filename": "x", "ignore": "me"}) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1={"uuid": "x", "filename": "x"}) + + def test_filename(self, apiobj, csv_adapter, monkeypatch): + """Pass.""" + # + def mock_upload_file(**kwargs): + """Pass.""" + return {"uuid": "x", "filename": "badwolf"} + + monkeypatch.setattr(apiobj, "_upload_file", mock_upload_file) + + fake_settings = dict(test1=dict(name="test1", type="file", required=True)) + + config = dict(test1={"filename": "x", "filecontent": "x"}) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1={"uuid": "x", "filename": "badwolf"}) + + def test_filepath(self, apiobj, csv_adapter, monkeypatch, tmp_path): + """Pass.""" + # + def mock_upload_file(**kwargs): + """Pass.""" + return {"uuid": "x", "filename": CSV_FILENAME} + + monkeypatch.setattr(apiobj, "_upload_file", mock_upload_file) + + fake_settings = dict(test1=dict(name="test1", type="file", required=True)) + + test_path = tmp_path / CSV_FILENAME + test_path.write_text(CSV_FILECONTENT_STR) + + config = dict(test1={"filepath": test_path}) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1={"uuid": "x", "filename": CSV_FILENAME}) + + def test_filepath_str(self, apiobj, csv_adapter, monkeypatch, tmp_path): + """Pass.""" + # + def mock_upload_file(**kwargs): + """Pass.""" + return {"uuid": "x", "filename": CSV_FILENAME} + + monkeypatch.setattr(apiobj, "_upload_file", mock_upload_file) + + fake_settings = dict(test1=dict(name="test1", type="file", required=True)) + + test_path = tmp_path / CSV_FILENAME + test_path.write_text(CSV_FILECONTENT_STR) + + config = dict(test1=format(test_path)) + + parser = axonapi.api.adapters.ParserCnxConfig(raw=config, parent=apiobj.cnx) + parsed_config = parser.parse(adapter=csv_adapter, settings=fake_settings) + + assert parsed_config == dict(test1={"uuid": "x", "filename": CSV_FILENAME}) + + +class TestParserAdapters(object): + """Pass.""" + + def test_adapters(self, apiobj): + """Pass.""" + raw = apiobj._get() + parser = axonapi.api.adapters.ParserAdapters(raw=raw, parent=apiobj) + adapters = parser.parse() + assert isinstance(adapters, tools.LIST) + for adapter in adapters: + self.validate_adapter(adapter) + + def validate_cnx(self, aname, aname_raw, astatus, anid, anname, cnx): + """Pass.""" + assert isinstance(cnx, dict) + adapter_name = cnx.pop("adapter_name") + adapter_name_raw = cnx.pop("adapter_name_raw") + adapter_status = cnx.pop("adapter_status") + config = cnx.pop("config") + config_raw = cnx.pop("config_raw") + date_fetched = cnx.pop("date_fetched") + error = cnx.pop("error") + cid = cnx.pop("id") + node_id = cnx.pop("node_id") + node_name = cnx.pop("node_name") + status = cnx.pop("status") + status_raw = cnx.pop("status_raw") + uuid = cnx.pop("uuid") + + assert adapter_name == aname + assert adapter_name_raw == aname_raw + assert adapter_status == astatus + assert isinstance(config, dict) and config + assert isinstance(config_raw, dict) and config_raw + assert isinstance(date_fetched, tools.STR) + assert isinstance(error, tools.STR) or error is None + assert isinstance(cid, tools.STR) + assert node_id == anid + assert node_name == anname + assert isinstance(status, bool) + assert isinstance(status_raw, tools.STR) + assert isinstance(uuid, tools.STR) + + if status is False: + assert astatus is False + + if status is True: + assert astatus in [True, False] + + assert not cnx + + def validate_settings(self, settings, check_value): + """Pass.""" + for name, item in settings.items(): + item_name = item.pop("name") + item_type = item.pop("type") + item_title = item.pop("title") + item_format = item.pop("format", "") + item_description = item.pop("description", "") + item_enum = item.pop("enum", []) + item_default = item.pop("default", "") + item_items = item.pop("items", {}) + item_required = item.pop("required") + + if check_value: + item_value = item.pop("value") + assert isinstance(item_value, tools.SIMPLE) or item_value in [None, []] + + assert isinstance(item_name, tools.STR) and item_name + assert isinstance(item_type, tools.STR) and item_type + assert isinstance(item_title, tools.STR) and item_title + assert isinstance(item_items, dict) + assert isinstance(item_default, tools.SIMPLE) or item_default in [None, []] + assert isinstance(item_enum, tools.LIST) + for x in item_enum: + assert isinstance(x, tools.STR) + assert isinstance(item_format, tools.STR) + assert isinstance(item_description, tools.STR) + assert isinstance(item_required, bool) + assert item_type in ["number", "integer", "string", "bool", "array", "file"] + assert not item + + def validate_adapter(self, adapter): + """Pass.""" + assert isinstance(adapter, dict) + + adv_settings = adapter.pop("adv_settings") + cnx = adapter.pop("cnx") + cnx_bad = adapter.pop("cnx_bad") + cnx_count = adapter.pop("cnx_count") + cnx_count_bad = adapter.pop("cnx_count_bad") + cnx_count_ok = adapter.pop("cnx_count_ok") + cnx_ok = adapter.pop("cnx_ok") + cnx_settings = adapter.pop("cnx_settings") + features = adapter.pop("features") + name = adapter.pop("name") + name_plugin = adapter.pop("name_plugin") + name_raw = adapter.pop("name_raw") + node_id = adapter.pop("node_id") + node_name = adapter.pop("node_name") + settings = adapter.pop("settings") + status = adapter.pop("status") + status_raw = adapter.pop("status_raw") + + assert isinstance(name, tools.STR) + assert isinstance(name_raw, tools.STR) + assert isinstance(name_plugin, tools.STR) + assert isinstance(node_name, tools.STR) + assert isinstance(node_id, tools.STR) + assert isinstance(status_raw, tools.STR) + assert isinstance(features, tools.LIST) + for x in features: + assert isinstance(x, tools.STR) + assert isinstance(cnx_count, tools.INT) + assert isinstance(cnx_count_ok, tools.INT) + assert isinstance(cnx_count_bad, tools.INT) + assert isinstance(status, bool) or status is None + assert isinstance(cnx_settings, dict) + assert isinstance(settings, dict) + assert isinstance(adv_settings, dict) + + self.validate_settings(settings, True) + self.validate_settings(adv_settings, True) + self.validate_settings(cnx_settings, False) + + for cnxs in [cnx, cnx_ok, cnx_bad]: + assert isinstance(cnxs, tools.LIST) + for connection in [x for x in cnxs if x]: + self.validate_cnx( + aname=name, + aname_raw=name_raw, + anid=node_id, + anname=node_name, + astatus=status, + cnx=connection, + ) + + assert not adapter + + +class TestRawAdapters(object): + """Pass.""" + + def test_adapters(self, apiobj): + """Pass.""" + adapters = apiobj._get() + assert isinstance(adapters, dict) + + for name, instances in adapters.items(): + assert name.endswith("_adapter") + assert isinstance(instances, tools.LIST) + + for instance in instances: + self.validate_instance(name, instance) + + def validate_client(self, name, client, instance_status): + """Pass.""" + assert isinstance(client, dict) + + client_config = client.pop("client_config") + uuid = client.pop("uuid") + client_id = client.pop("client_id") + error = client.pop("error") + node_id = client.pop("node_id") + status = client.pop("status") + date_fetched = client.pop("date_fetched") + + assert isinstance(client_config, dict) and client_config + assert isinstance(client_id, tools.STR) + assert isinstance(date_fetched, tools.STR) + assert isinstance(uuid, tools.STR) and uuid + + if not client_id: + msg = "Client for {} has an empty client_id {}" + msg = msg.format(name, client_id) + warnings.warn(msg) + + assert isinstance(error, tools.STR) or error is None + assert isinstance(node_id, tools.STR) and node_id + assert isinstance(status, tools.STR) and status in [ + "warning", + "error", + "success", + ] + + if status == "error": + assert instance_status == "warning" + + assert not client + + def validate_schema(self, name, schema): + """Pass.""" + assert isinstance(schema, dict) + + items = schema.pop("items") + required = schema.pop("required") + schema_type = schema.pop("type") + + assert isinstance(items, tools.LIST) and items + assert isinstance(required, tools.LIST) + assert schema_type == "array" + + for req in required: + assert isinstance(req, tools.STR) + item_names = [x["name"] for x in items] + + # FUTURE: schema's are defining required items that dont exist in items + if req not in item_names: + msg = "Schema for {} has required item {!r} not in defined items {}" + msg = msg.format(name, req, item_names) + warnings.warn(msg) + + for item in items: + assert isinstance(item, dict) + item_name = item.pop("name") + item_type = item.pop("type") + item_title = item.pop("title") + item_format = item.pop("format", "") + item_description = item.pop("description", "") + item_enum = item.pop("enum", []) + item_default = item.pop("default", "") + item_items = item.pop("items", {}) + + assert isinstance(item_name, tools.STR) and item_name + assert isinstance(item_type, tools.STR) and item_type + assert isinstance(item_title, tools.STR) and item_title + assert isinstance(item_items, dict) + assert isinstance(item_default, tools.SIMPLE) or item_default is None + assert isinstance(item_enum, tools.LIST) + for x in item_enum: + assert isinstance(x, tools.STR) + assert isinstance(item_format, tools.STR) + assert isinstance(item_description, tools.STR) + assert item_type in ["number", "integer", "string", "bool", "array", "file"] + assert not item + + assert not schema + + def validate_instance(self, name, instance): + """Pass.""" + node_id = instance.pop("node_id") + unique_plugin_name = instance.pop("unique_plugin_name") + node_name = instance.pop("node_name") + supported_features = instance.pop("supported_features") + clients = instance.pop("clients") + config = instance.pop("config") + schema = instance.pop("schema") + status = instance.pop("status") + + assert not instance + + assert isinstance(node_id, tools.STR) and node_id + assert isinstance(unique_plugin_name, tools.STR) and unique_plugin_name + assert isinstance(node_name, tools.STR) and node_name + assert status in ["warning", "success", None, ""] + assert isinstance(supported_features, tools.LIST) + for x in supported_features: + assert isinstance(x, tools.STR) + assert isinstance(clients, tools.LIST) + assert isinstance(config, dict) and config + assert isinstance(schema, dict) and schema + + self.validate_schema(name, schema) + + assert len(config) in [1, 2] + assert "AdapterBase" in config + + for config_name, item in config.items(): + item_config = item.pop("config") + item_schema = item.pop("schema") + item_pretty_name = item_schema.pop("pretty_name") + + assert isinstance(item_config, dict) and item_config + assert isinstance(item_pretty_name, tools.STR) and item_pretty_name + assert isinstance(item_schema, dict) and item_schema + + self.validate_schema(name, item_schema) + + assert not item + + for client in clients: + self.validate_client(name, client, status) diff --git a/axonius_api_client/tests/tests_api/test_api_enforcements.py b/axonius_api_client/tests/tests_api/test_api_enforcements.py new file mode 100644 index 00000000..b80453f3 --- /dev/null +++ b/axonius_api_client/tests/tests_api/test_api_enforcements.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonapi.api.users_devices.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import warnings + +import pytest + +import axonius_api_client as axonapi +from axonius_api_client import exceptions, tools + +from .. import utils + +LINUX_QUERY = 'specific_data.data.os.type == "Linux"' +SHELL_ACTION_NAME = "Badwolf Shell Action" +SHELL_ACTION_CMD = "echo 'Badwolf' > /tmp/badwolf.txt" +DEPLOY_ACTION_NAME = "Badwolf Deploy Action" +DEPLOY_FILE_NAME = "badwolf.sh" +DEPLOY_FILE_CONTENTS = b"#!/bin/bash\necho badwolf!" + +CREATE_EC_NAME = "Badwolf EC Example" +CREATE_EC_TRIGGERS = [ + { + "name": "Trigger", + "view": { + "name": "Users Created in Last 30 Days", + "entity": "users", + }, # FUTURE: public create will need to get sq! + "conditions": { + "new_entities": False, + "previous_entities": False, + "above": 1, + "below": 0, + }, # FUTURE: need public build_trigger method + "period": "never", + "run_on": "AllEntities", + } +] +CREATE_EC_ACTION_MAIN = { + "name": "Badwolf Create Notification", + "action": {"action_name": "create_notification", "config": {}}, +} + + +@pytest.fixture(scope="module") +def apiobj(request): + """Pass.""" + auth = utils.get_auth(request) + + with pytest.warns(exceptions.BetaWarning): + api = axonapi.Enforcements(auth=auth) + + utils.check_apiobj(authobj=auth, apiobj=api) + + utils.check_apiobj_children( + apiobj=api, runaction=axonapi.api.enforcements.RunAction + ) + + utils.check_apiobj_xref( + apiobj=api, + users=axonapi.api.users_devices.Users, + devices=axonapi.api.users_devices.Devices, + ) + + return api + + +class TestEnforcements(object): + """Pass.""" + + def test__get(self, apiobj): + """Pass.""" + data = apiobj._get() + assert isinstance(data, dict) + + assets = data["assets"] + assert isinstance(assets, tools.LIST) + + for asset in assets: + assert isinstance(asset, dict) + + def test_get(self, apiobj): + """Pass.""" + data = apiobj.get() + assert isinstance(data, list) + for found in data: + assert isinstance(found["uuid"], tools.STR) + assert isinstance(found["actions.main"], tools.STR) + assert isinstance(found["name"], tools.STR) + assert isinstance(found["date_fetched"], tools.STR) + assert isinstance(found["last_updated"], tools.STR) + assert "triggers.last_triggered" in found + assert "triggers.times_triggered" in found + + def test_get_maxpages(self, apiobj): + """Pass.""" + found = apiobj.get(max_pages=1, page_size=1) + assert isinstance(found, list) + assert len(found) == 1 + + def test_create_get_delete(self, apiobj): + """Pass.""" + old_found = apiobj.get_by_name(CREATE_EC_NAME, eq_single=False) + if old_found: + msg = "Enforcement named {} already exists from previous test, deleting: {}" + msg = msg.format(CREATE_EC_NAME, old_found) + warnings.warn(msg) + deleted = apiobj.delete(rows=old_found) + assert isinstance(deleted, dict) + assert isinstance(deleted["deleted"], tools.INT) + assert deleted["deleted"] == 1 + + created = apiobj._create( + name=CREATE_EC_NAME, main=CREATE_EC_ACTION_MAIN, triggers=CREATE_EC_TRIGGERS + ) + assert isinstance(created, tools.STR) + + found = apiobj.get_by_name(CREATE_EC_NAME) + """ + { + "actions.main": "Badwolf Create Notification", + "date_fetched": "2019-09-10 23:17:07+00:00", + "last_updated": "Tue, 10 Sep 2019 23:17:07 GMT", + "name": "Badwolf EC Example", + "triggers.last_triggered": null, + "triggers.times_triggered": 0, + "triggers.view.name": "Users Created in Last 30 Days", + "uuid": "5d782ef380ded0001bbe3c47" + } + """ + assert isinstance(found, dict) + assert found["uuid"] == created + assert found["actions.main"] == CREATE_EC_ACTION_MAIN["name"] + assert found["name"] == CREATE_EC_NAME + assert isinstance(found["date_fetched"], tools.STR) + assert isinstance(found["last_updated"], tools.STR) + assert "triggers.last_triggered" in found + assert "triggers.times_triggered" in found + assert found["triggers.view.name"] == CREATE_EC_TRIGGERS[0]["view"]["name"] + + found_by_id = apiobj.get_by_id(found["uuid"]) + assert isinstance(found_by_id, dict) + + deleted = apiobj.delete(rows=found_by_id) + assert isinstance(deleted, dict) + assert isinstance(deleted["deleted"], tools.INT) + assert deleted["deleted"] == 1 + + with pytest.raises(exceptions.ValueNotFound): + apiobj.get_by_id(found["uuid"]) + + with pytest.raises(exceptions.ValueNotFound): + apiobj.get_by_name(found["name"]) + + notfound = apiobj.get_by_id(found["uuid"], match_error=False) + assert notfound is None + + notmatches = apiobj.get_by_name("NOT:{}".format(found["name"])) + assert not any([x["name"] == found["name"] for x in notmatches]) + + allobjs = apiobj.get() + if allobjs: + name = allobjs[0]["name"] + onere = "RE:{}".format(name[0]) + rematches = apiobj.get_by_name(onere) + assert any([x["name"] == name for x in rematches]) + + +class TestRunActions(object): + """Pass.""" + + def test__get(self, apiobj): + """Pass.""" + data = apiobj.runaction._get() + for i in ["deploy", "shell", "upload_file"]: + assert i in data + + # FUTURE: + # this returns nothing... + # AND no action shows up in GUI for dvc + # AND no task shows up in EC + # BUT: Extended Data Tab shows stuff, but i dont know how to query for that yet + def test__shell(self, apiobj): + """Pass.""" + devices = apiobj.devices._get(query=LINUX_QUERY, page_size=1, row_start=0) + ids = [x["internal_axon_id"] for x in devices["assets"]] + + if not ids: + reason = "No linux devices found!" + pytest.skip(reason) + + data = apiobj.runaction._shell( + action_name=SHELL_ACTION_NAME, ids=ids, command=SHELL_ACTION_CMD + ) + assert not data + + @pytest.fixture(scope="class") + def uploaded_file(self, apiobj): + """Pass.""" + data = apiobj.runaction._upload_file( + name=DEPLOY_FILE_NAME, content=DEPLOY_FILE_CONTENTS + ) + assert isinstance(data, dict) + assert isinstance(data["uuid"], tools.STR) + assert data["filename"] == DEPLOY_FILE_NAME + return data + + # FUTURE: returns nadda + def test__upload_deploy(self, apiobj, uploaded_file): + """Pass.""" + devices = apiobj.devices._get(query=LINUX_QUERY, page_size=1, row_start=0) + ids = [x["internal_axon_id"] for x in devices["assets"]] + + if not ids: + reason = "No linux devices found!" + pytest.skip(reason) + + data = apiobj.runaction._deploy( + action_name=DEPLOY_ACTION_NAME, + ids=ids, + file_uuid=uploaded_file["uuid"], + file_name=uploaded_file["filename"], + params=None, + ) + assert not data diff --git a/axonius_api_client/tests/tests_api/test_api_mixins.py b/axonius_api_client/tests/tests_api/test_api_mixins.py new file mode 100644 index 00000000..c41d6c93 --- /dev/null +++ b/axonius_api_client/tests/tests_api/test_api_mixins.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonapi.api.users_devices.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import pytest +import requests + +import axonius_api_client as axonapi +from axonius_api_client import exceptions, tools + +from .. import utils + + +@pytest.fixture(scope="module") +def apiobj(request): + """Pass.""" + auth = utils.get_auth(request) + + api = axonapi.api.mixins.Mixins(auth=auth) + api._router = axonapi.api.routers.ApiV1.devices + + utils.check_apiobj(authobj=auth, apiobj=api) + + return api + + +class MockParser(axonapi.api.mixins.Parser): + """Pass.""" + + def parse(self): + """Pass.""" + return + + +class TestMixins(object): + """Pass.""" + + def test_json(self, apiobj): + """Test that JSON is returned when is_json=True.""" + response = apiobj._request( + path=apiobj._router.fields, + method="get", + raw=False, + is_json=True, + error_status=True, + ) + assert isinstance(response, dict) + + def test_raw(self, apiobj): + """Test that response is returned when raw=True.""" + response = apiobj._request( + path=apiobj._router.fields, + method="get", + raw=True, + is_json=True, + error_status=True, + ) + assert isinstance(response, requests.Response) + + def test_text(self, apiobj): + """Test that str is returned when raw=False and is_json=False.""" + response = apiobj._request( + path=apiobj._router.fields, + method="get", + raw=False, + is_json=False, + error_status=True, + ) + assert isinstance(response, tools.STR) + + def test_json_error(self, apiobj): + """Test exc thrown when json has error status.""" + with pytest.raises(exceptions.JsonError): + apiobj._request( + path=apiobj._router.root + "/badwolf", method="get", error_status=False + ) + + def test_no_json_error(self, apiobj): + """Test exc thrown when status code != 200.""" + with pytest.raises(exceptions.ResponseNotOk): + apiobj._request( + path=apiobj._router.root + "/badwolf", + method="get", + error_status=True, + is_json=False, + ) + + def test_json_invalid(self, apiobj): + """Test exc thrown when invalid json.""" + with pytest.raises(exceptions.JsonInvalid): + apiobj._request(path="", method="get") + + def test_json_invalid_text(self, apiobj): + """Test that str is returned when is_json=True and error_json_invalid=False.""" + response = apiobj._request(path="", method="get", error_json_invalid=False) + assert isinstance(response, tools.STR) + + def test_child(self, apiobj): + """Pass.""" + child = axonapi.api.mixins.Child(parent=apiobj) + assert format(apiobj) in format(child) + assert repr(apiobj) in repr(child) + + def test_child_parser(self, apiobj): + """Pass.""" + child = MockParser(parent=apiobj, raw={}) + assert format(apiobj) in format(child) + assert repr(apiobj) in repr(child) diff --git a/axonius_api_client/tests/api/test_routers.py b/axonius_api_client/tests/tests_api/test_api_routers.py similarity index 55% rename from axonius_api_client/tests/api/test_routers.py rename to axonius_api_client/tests/tests_api/test_api_routers.py index e1127f24..bb4fbd75 100644 --- a/axonius_api_client/tests/api/test_routers.py +++ b/axonius_api_client/tests/tests_api/test_api_routers.py @@ -1,18 +1,15 @@ # -*- coding: utf-8 -*- """Test suite for axonius_api_client.auth.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals -from axonius_api_client import api +import axonius_api_client as axonapi def test_api_v1_routers(): """Test api.routers.ApiV1.""" - api_routes = api.routers.ApiV1 + api_routes = axonapi.api.routers.ApiV1 for obj in api_routes.all_objects: - assert isinstance(obj, api.routers.Router) + assert isinstance(obj, axonapi.api.routers.Router) assert obj._object_type in format(obj) assert obj._object_type in repr(obj) for route in obj._routes: diff --git a/axonius_api_client/tests/tests_api/test_api_users_devices.py b/axonius_api_client/tests/tests_api/test_api_users_devices.py new file mode 100644 index 00000000..fdc0ca6c --- /dev/null +++ b/axonius_api_client/tests/tests_api/test_api_users_devices.py @@ -0,0 +1,1356 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonapi.api.users_devices.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import re + +import pytest + +import axonius_api_client as axonapi +from axonius_api_client import constants, exceptions, tools + +from .. import utils + +FIELD_FORMATS = ["discrete", "image", "date-time", "table", "ip", "subnet", "version"] +SCHEMA_FIELD_FORMATS = [ + "image", + "date-time", + "table", + "logo", + "tag", + "ip", + "subnet", + "version", +] +FIELD_TYPES = ["string", "bool", "array", "integer", "number"] + +QUERY_ID = '((internal_axon_id == "{internal_axon_id}"))'.format +QUERY_EQ = '(({f} == "{v}"))'.format +QUERY_FIELD_EXISTS = '(({field} == ({{"$exists":true,"$ne": ""}})))'.format +""" +# multi +((internal_axon_id == ({"$exists":true,"$ne":""}))) + and +((specific_data.data.username == ({"$exists":true,"$ne":""}))) + and +((specific_data.data.mail == ({"$exists":true,"$ne":""}))) + +# single +((internal_axon_id == ({"$exists":true,"$ne":""}))) + +""" + +USERS_TEST_DATA = { + "adapters": [ + {"search": "generic", "exp": "generic"}, + {"search": "active_directory_adapter", "exp": "active_directory"}, + {"search": "active_directory", "exp": "active_directory"}, + ], + "single_field": { + "search": "generic:username", + "exp": "specific_data.data.username", + }, + "fields": [ + {"search": "username", "exp": ["specific_data.data.username"]}, + {"search": "generic:username", "exp": ["specific_data.data.username"]}, + {"search": "mail", "exp": ["specific_data.data.mail"]}, + {"search": "generic:mail", "exp": ["specific_data.data.mail"]}, + { + "search": "generic:mail,username", + "exp": ["specific_data.data.mail", "specific_data.data.username"], + }, + { + "search": "active_directory:username", + "exp": ["adapters_data.active_directory_adapter.username"], + }, + { + "search": "adapters_data.active_directory_adapter.username", + "exp": ["adapters_data.active_directory_adapter.username"], + }, + { + "search": "*,*,username", + "exp": ["specific_data", "specific_data.data.username"], + }, + ], + "val_fields": [ + { + "search": ["active_directory:username", "generic:username", "mail"], + "exp": [ + "adapters_data.active_directory_adapter.username", + "specific_data.data.username", + "specific_data.data.mail", + ], + } + ], +} + +DEVICES_TEST_DATA = { + "adapters": [ + {"search": "generic", "exp": "generic"}, + {"search": "active_directory_adapter", "exp": "active_directory"}, + {"search": "active_directory", "exp": "active_directory"}, + ], + "single_field": { + "search": "generic:hostname", + "exp": "specific_data.data.hostname", + }, + "fields": [ + { + "search": "network_interfaces.ips", + "exp": ["specific_data.data.network_interfaces.ips"], + }, + { + "search": "generic:network_interfaces.ips", + "exp": ["specific_data.data.network_interfaces.ips"], + }, + {"search": "hostname", "exp": ["specific_data.data.hostname"]}, + {"search": "generic:hostname", "exp": ["specific_data.data.hostname"]}, + { + "search": "generic:hostname,network_interfaces.ips", + "exp": [ + "specific_data.data.hostname", + "specific_data.data.network_interfaces.ips", + ], + }, + { + "search": "active_directory:hostname", + "exp": ["adapters_data.active_directory_adapter.hostname"], + }, + { + "search": "adapters_data.active_directory_adapter.hostname", + "exp": ["adapters_data.active_directory_adapter.hostname"], + }, + { + "search": "*,*,hostname", + "exp": ["specific_data", "specific_data.data.hostname"], + }, + ], + "val_fields": [ + { + "search": [ + "active_directory:hostname", + "generic:hostname", + "network_interfaces.ips", + ], + "exp": [ + "adapters_data.active_directory_adapter.hostname", + "specific_data.data.hostname", + "specific_data.data.network_interfaces.ips", + ], + } + ], +} + + +class Base(object): + """Pass.""" + + @pytest.fixture(scope="class") + def apiobj(self, request, apicls): + """Pass.""" + auth = utils.get_auth(request) + + api = apicls(auth=auth) + + assert isinstance(api._default_fields, tools.LIST) + + utils.check_apiobj(authobj=auth, apiobj=api) + + utils.check_apiobj_children( + apiobj=api, + labels=axonapi.api.users_devices.Labels, + saved_query=axonapi.api.users_devices.SavedQuery, + fields=axonapi.api.users_devices.Fields, + reports=axonapi.api.users_devices.Reports, + ) + + utils.check_apiobj_xref(apiobj=api, adapters=axonapi.api.adapters.Adapters) + + api_type = api._router._object_type + + if api_type == "users": + api.TEST_DATA = USERS_TEST_DATA + else: + api.TEST_DATA = DEVICES_TEST_DATA + + api.ALL_FIELDS = api.fields.get() + + return api + + def get_single_asset( + self, apiobj, with_fields=None, fields=None, query=None, refetch=None + ): + """Pass.""" + if not query and refetch: + query = QUERY_ID(**refetch) + + if not query: + if not with_fields: + with_fields = apiobj._default_fields + + query_fields = tools.listify(with_fields) + query_fields = [x for x in query_fields if x not in ["labels"]] + query_lines = [QUERY_FIELD_EXISTS(field=x) for x in query_fields] + query = " and ".join(query_lines) + + if not fields: + fields = apiobj._default_fields + + data = apiobj._get(query=query, fields=fields, page_size=1) + assert isinstance(data, dict) + + assets = data["assets"] + assert len(assets) == 1 + + assert isinstance(assets, tools.LIST) + asset = assets[0] + + return asset + + def build_long_query(self, apiobj): + """Pass.""" + single_field = apiobj.TEST_DATA["single_field"]["exp"] + + qtmpl = '({} == "{}")'.format + vtmpl = "badwolf_{}".format + + long_query = [qtmpl(single_field, vtmpl(i)) for i in range(0, 1000)] + + return "not " + " or ".join(long_query) + + +@pytest.mark.parametrize("apicls", [axonapi.api.Users, axonapi.Devices], scope="class") +class TestBoth(Base): + """Pass.""" + + def test__count_post_false(self, apiobj): + """Pass.""" + data = apiobj._count(use_post=False) + assert isinstance(data, tools.INT) + + def test__count_query_len_forces_post(self, apiobj): + """Pass.""" + long_query = self.build_long_query(apiobj) + + data = apiobj._count(query=long_query, use_post=False) + assert isinstance(data, tools.INT) + + response = apiobj._auth._http._LAST_RESPONSE + assert response.request.method == "POST" + + def test__get_post_false(self, apiobj): + """Pass.""" + data = apiobj._get(page_size=1, use_post=False) + assert isinstance(data, dict) + assert isinstance(data["assets"], tools.LIST) + assert len(data["assets"]) == 1 + assert isinstance(data["assets"][0], dict) + + def test__get_page_size_over_max(self, apiobj): + """Pass.""" + data = apiobj._get(page_size=3000, use_post=False) + assert isinstance(data, dict) + assert isinstance(data["assets"], tools.LIST) + + response = apiobj._auth._http._LAST_RESPONSE + assert "limit=2000" in response.request.url + + def test__get_query_len_forces_post(self, apiobj): + """Pass.""" + long_query = self.build_long_query(apiobj) + fields = [apiobj.TEST_DATA["single_field"]["exp"]] + + data = apiobj._get(query=long_query, fields=fields, page_size=1, use_post=False) + assert isinstance(data, dict) + assert isinstance(data["assets"], tools.LIST) + + total = data["page"]["totalResources"] + + # FUTURE: overcome use_post ignoring limit + if total < constants.MAX_PAGE_SIZE: + assert len(data["assets"]) == total + else: + assert len(data["assets"]) == constants.MAX_PAGE_SIZE + + response = apiobj._auth._http._LAST_RESPONSE + assert response.request.method == "POST" + + def test__get_by_id(self, apiobj): + """Pass.""" + asset = self.get_single_asset(apiobj=apiobj) + data = apiobj._get_by_id(id=asset["internal_axon_id"]) + assert isinstance(data, dict) + + def test_count(self, apiobj): + """Pass.""" + data = apiobj.count() + assert isinstance(data, tools.INT) + + def test_get(self, apiobj): + """Pass.""" + data = apiobj.get(max_rows=1) + assert isinstance(data, tools.LIST) + assert len(data) == 1 + + def test_get_maxpages(self, apiobj): + """Pass.""" + data = apiobj.get(max_rows=22, max_pages=1) + assert isinstance(data, tools.LIST) + assert len(data) == 22 + + def test_get_id(self, apiobj): + """Pass.""" + asset = self.get_single_asset(apiobj=apiobj, fields=apiobj._default_fields) + + data = apiobj.get_by_id(id=asset["internal_axon_id"]) + assert isinstance(data, dict) + assert data["internal_axon_id"] == asset["internal_axon_id"] + + def test_get_id_error(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.get_by_id(id="badwolf") + + def test_count_by_saved_query(self, apiobj): + """Pass.""" + sqs = apiobj.saved_query.get() + sq = sqs[0] + sq_name = sq["name"] + data = apiobj.count_by_saved_query(name=sq_name) + assert isinstance(data, tools.INT) + + def test_get_by_saved_query(self, apiobj): + """Pass.""" + sqs = apiobj.saved_query.get() + sq = sqs[0] + sq_name = sq["name"] + data = apiobj.get_by_saved_query(name=sq_name, max_rows=1) + assert isinstance(data, tools.LIST) + + def test_get_by_field_value(self, apiobj): + """Pass.""" + asset = self.get_single_asset(apiobj=apiobj, fields=apiobj._default_fields) + assert isinstance(asset, dict) + field = apiobj.TEST_DATA["single_field"]["exp"] + value = asset[field] + + assert value + found = apiobj.get_by_value( + value=value, field=field, fields=apiobj._default_fields, match_count=1 + ) + assert found[field] == value + + def test_get_by_field_value_list(self, apiobj): + """Pass.""" + asset = self.get_single_asset(apiobj=apiobj, fields=apiobj._default_fields) + assert isinstance(asset, dict) + field = apiobj.TEST_DATA["single_field"]["exp"] + value = asset[field] + value_list = tools.listify(value) + assert value + found = apiobj.get_by_value( + value=value_list, field=field, fields=apiobj._default_fields, match_count=1 + ) + found_value = found[field] + + assert tools.listify(found_value) == value_list + + def test_get_by_field_value_regex(self, apiobj): + """Pass.""" + field = apiobj.TEST_DATA["single_field"]["exp"] + value_re = "[a-zA-Z]" + found = apiobj.get_by_value( + value="RE:{}".format(value_re), field=field, match_count=1, max_rows=1 + ) + assert isinstance(found, dict) + assert any( + [ + re.search(value_re, f) + for f in tools.listify(obj=found[field], dictkeys=False) + ] + ) + + def test_get_by_field_value_not(self, apiobj): + """Pass.""" + count = apiobj.count() + asset = self.get_single_asset(apiobj=apiobj, fields=apiobj._default_fields) + assert isinstance(asset, dict) + field = apiobj.TEST_DATA["single_field"]["exp"] + + asset_value = asset[field] + if isinstance(asset_value, tools.LIST): + value = ["NOT:{}".format(x) for x in asset_value] + else: + value = "NOT:{}".format(asset_value) + found = apiobj.get_by_value( + value=value, field=field, fields=apiobj._default_fields, match_error=False + ) + assert isinstance(found, tools.LIST) + assert len(found) == count - 1 + + def test_get_by_field_value_match_error(self, apiobj): + """Pass.""" + field = apiobj.TEST_DATA["single_field"]["exp"] + with pytest.raises(exceptions.ValueNotFound): + apiobj.get_by_value( + value="BaDWoLf_8675309", + field=field, + match_count=1, + max_rows=2, + match_error=True, + ) + + +class Single(Base): + """Pass.""" + + def get_by_specifics(self, apiobj, specfield, specmethod): + """Pass.""" + asset = self.get_single_asset(apiobj=apiobj, fields=specfield) + asset_value = asset[specfield] + value = tools.listify(obj=asset_value)[0] + query_post = "and {}".format(QUERY_FIELD_EXISTS(field=specfield)) + found = getattr(apiobj, specmethod)( + value=value, query_post=query_post, match_count=1, fields=specfield + ) + assert isinstance(found, dict) + found_value = found[specfield] + assert tools.listify(found_value) == tools.listify(asset_value) + + +@pytest.mark.parametrize("apicls", [axonapi.api.Users], scope="class") +class TestUsers(Single): + """Pass.""" + + @pytest.mark.parametrize( + "specfield,specmethod", + [ + ["specific_data.data.username", "get_by_username"], + ["specific_data.data.mail", "get_by_mail"], + ], + scope="class", + ) + def test_get_by_specifics(self, apiobj, specfield, specmethod): + """Pass.""" + self.get_by_specifics(apiobj=apiobj, specfield=specfield, specmethod=specmethod) + + +@pytest.mark.parametrize("apicls", [axonapi.api.Devices], scope="class") +class TestDevices(Single): + """Pass.""" + + @pytest.mark.parametrize( + "specfield,specmethod", + [ + ["specific_data.data.hostname", "get_by_hostname"], + ["specific_data.data.network_interfaces.mac", "get_by_mac"], + ["specific_data.data.network_interfaces.ips", "get_by_ip"], + ], + scope="class", + ) + def test_get_by_specifics(self, apiobj, specfield, specmethod): + """Pass.""" + self.get_by_specifics(apiobj=apiobj, specfield=specfield, specmethod=specmethod) + + def test_get_by_subnet(self, apiobj): + """Pass.""" + specfield = "specific_data.data.network_interfaces.subnets" + findfield = "specific_data.data.network_interfaces.ips" + withfields = [findfield, specfield] + asset = self.get_single_asset( + apiobj=apiobj, with_fields=withfields, fields=withfields + ) + assert specfield in asset, list(asset) + asset_value = asset[specfield] + + value = tools.listify(obj=asset_value)[0] + + found = apiobj.get_by_subnet( + value=value, + max_rows=1, + fields=findfield, + query_post=" and {}".format(QUERY_FIELD_EXISTS(field=findfield)), + ) + + assert isinstance(found, tools.LIST) + + found_value = tools.listify(obj=found[0][findfield])[0] + assert found_value == tools.listify(obj=asset[findfield], dictkeys=False)[0] + + def test_get_by_subnet_not(self, apiobj): + """Pass.""" + specfield = "specific_data.data.network_interfaces.subnets" + findfield = "specific_data.data.network_interfaces.ips" + withfields = [findfield, specfield] + asset = self.get_single_asset( + apiobj=apiobj, with_fields=withfields, fields=withfields + ) + assert specfield in asset, list(asset) + asset_value = asset[specfield] + + value = tools.listify(obj=asset_value)[0] + + found = apiobj.get_by_subnet( + value="NOT:{}".format(value), + max_rows=1, + fields=findfield, + query_post=" and {}".format(QUERY_FIELD_EXISTS(field=findfield)), + ) + # could do value checking here, but we'd have to get every asset + # lets not do that... + assert isinstance(found, tools.LIST) + + +@pytest.mark.parametrize("apicls", [axonapi.api.Users, axonapi.Devices], scope="class") +class TestReports(Base): + """Pass.""" + + @pytest.fixture(scope="class") + def report_data(self, apiobj): + """Pass.""" + rows = apiobj.get(max_rows=20) + assert isinstance(rows, tools.LIST) + assert len(rows) == 20 + return rows + + def test_missing_adapters(self, apiobj, report_data): + """Pass.""" + report = apiobj.reports.missing_adapters(rows=report_data) + assert isinstance(report, tools.LIST) + for item in report: + assert isinstance(item, dict) + assert isinstance(item["missing"], tools.LIST) + assert isinstance(item["missing_nocnx"], tools.LIST) + + +@pytest.mark.parametrize("apicls", [axonapi.api.Users, axonapi.Devices], scope="class") +class TestFields(Base): + """Pass.""" + + def test__get(self, apiobj): + """Pass.""" + fields = apiobj.fields._get() + assert isinstance(fields, dict) + assert isinstance(fields["generic"], tools.LIST) + assert isinstance(fields["specific"], dict) + assert isinstance(fields["schema"], dict) + + def test_get(self, apiobj): + """Pass.""" + fields = apiobj.fields.get() + assert isinstance(fields, dict) + assert isinstance(fields["generic"], dict) + + def test_find_adapter(self, apiobj): + """Pass.""" + for info in apiobj.TEST_DATA["adapters"]: + isearch = info["search"] + iexp = info["exp"] + searches = [ + isearch, + isearch + ("_adapter" if not isearch.endswith("_adapter") else ""), + isearch.upper(), + isearch + " ", + " " + isearch + " ", + iexp, + iexp.upper(), + iexp + " ", + " " + iexp + " ", + ] + for search in searches: + name, obj_fields = apiobj.fields.find_adapter( + adapter=search, all_fields=apiobj.ALL_FIELDS + ) + assert isinstance(name, tools.STR) + assert isinstance(obj_fields, dict) + assert name == info["exp"] + + def test_find_adapter_error_true(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.fields.find_adapter( + adapter="badwolf", all_fields=apiobj.ALL_FIELDS, error=True + ) + + def test_find_adapter_error_false(self, apiobj): + """Pass.""" + name, fields = apiobj.fields.find_adapter( + adapter="badwolf", all_fields=apiobj.ALL_FIELDS, error=False + ) + assert name is None + assert fields == {} + + def test_find(self, apiobj): + """Pass.""" + for info in apiobj.TEST_DATA["fields"]: + isearch = info["search"] + searches = [isearch, isearch.upper(), re.sub(":", " : ", isearch)] + for search in searches: + found = apiobj.fields.find(field=search, all_fields=apiobj.ALL_FIELDS) + assert isinstance(found, tools.LIST) + for x in found: + assert isinstance(x, tools.STR) + assert found == info["exp"] + + def test_find_manual(self, apiobj): + """Pass.""" + found = apiobj.fields.find(field="MANUAL:badwolf", all_fields=apiobj.ALL_FIELDS) + assert found == "badwolf" + + def test_find_bad_adapter(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.fields.find(field="badwolf:badwolf", all_fields=apiobj.ALL_FIELDS) + + def test_find_bad_adapter_noerr(self, apiobj): + """Pass.""" + found = apiobj.fields.find( + field="badwolf:badwolf", all_fields=apiobj.ALL_FIELDS, error=False + ) + assert isinstance(found, tools.LIST) + assert not found + + def test_find_bad_field(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.fields.find("generic:badwolf", all_fields=apiobj.ALL_FIELDS) + + def test_find_bad_field_noerr(self, apiobj): + """Pass.""" + found = apiobj.fields.find( + field="generic:badwolf", all_fields=apiobj.ALL_FIELDS, error=False + ) + assert isinstance(found, tools.LIST) + assert not found + + def test_validatedefault_false(self, apiobj): + """Pass.""" + for info in apiobj.TEST_DATA["val_fields"]: + isearch = info["search"] + iexp = info["exp"] + found = apiobj.fields.validate( + fields=isearch, all_fields=apiobj.ALL_FIELDS, default=False + ) + assert isinstance(found, tools.LIST) + assert found == iexp + + def test_validatedefault_true(self, apiobj): + """Pass.""" + apifields = apiobj._default_fields + + for info in apiobj.TEST_DATA["val_fields"]: + isearch = info["search"] + iexp = list(info["exp"]) + found = apiobj.fields.validate( + fields=isearch, all_fields=apiobj.ALL_FIELDS, default=True + ) + assert isinstance(found, tools.LIST) + + for x in apifields: + if x not in iexp: + iexp.append(x) + + assert sorted(found) == sorted(iexp) + + def test_validate_ignores(self, apiobj): + """Pass.""" + found = apiobj.fields.validate( + fields=[None, {}, [], ""], all_fields=apiobj.ALL_FIELDS, default=False + ) + assert isinstance(found, tools.LIST) + assert not found + + def test_validate_nofields_default_false(self, apiobj): + """Pass.""" + found = apiobj.fields.validate(all_fields=apiobj.ALL_FIELDS, default=False) + assert isinstance(found, tools.LIST) + assert not found + + def test_validate_nofields_default_true(self, apiobj): + """Pass.""" + found = apiobj.fields.validate(all_fields=apiobj.ALL_FIELDS, default=True) + assert isinstance(found, tools.LIST) + assert found == apiobj._default_fields + + +@pytest.mark.parametrize("apicls", [axonapi.api.Users, axonapi.Devices], scope="class") +class TestLabels(Base): + """Pass.""" + + def test__get(self, apiobj): + """Pass.""" + fields = apiobj.labels._get() + assert isinstance(fields, tools.LIST) + for x in fields: + assert isinstance(x, tools.STR) + + def test_get(self, apiobj): + """Pass.""" + fields = apiobj.labels._get() + assert isinstance(fields, tools.LIST) + for x in fields: + assert isinstance(x, tools.STR) + + def test__add_get_remove(self, apiobj): + """Pass.""" + fields = ["labels"] + labels = ["badwolf"] + + # get a single asset to add a label to + asset_tolabel = self.get_single_asset( + apiobj=apiobj, fields=fields, query=None, refetch=None + ) + + id_tolabel = asset_tolabel["internal_axon_id"] + assert isinstance(id_tolabel, tools.STR) + + # add the label to the asset + result_tolabel = apiobj.labels._add(labels=labels, ids=[id_tolabel]) + assert result_tolabel == 1 + + # re-get the asset and check that it has the label + asset_haslabel = self.get_single_asset( + apiobj=apiobj, fields=fields, query=None, refetch=asset_tolabel + ) + + id_haslabel = asset_haslabel["internal_axon_id"] + assert isinstance(id_haslabel, tools.STR) + + for label in labels: + assert label in asset_haslabel["labels"] + + # check that the label is in all the labels on the system + alllabels = apiobj.labels._get() + assert isinstance(alllabels, tools.LIST) + + for label in alllabels: + assert isinstance(label, tools.STR) + + for label in labels: + assert label in alllabels + + # remove the label from an asset + result_nolabel = apiobj.labels._remove(labels=labels, ids=[id_haslabel]) + assert result_nolabel == 1 + + # re-get the asset and check that it has the label + asset_nolabel = self.get_single_asset( + apiobj=apiobj, fields=fields, query=None, refetch=asset_haslabel + ) + id_nolabel = asset_nolabel["internal_axon_id"] + assert isinstance(id_nolabel, tools.STR) + + assert id_tolabel == id_nolabel == id_haslabel + + for label in labels: + assert label not in asset_nolabel.get("labels", []), asset_nolabel + + def test_add_get_remove(self, apiobj): + """Pass.""" + fields = ["labels"] + labels = ["badwolf"] + + # get a single asset to add a label to + asset_tolabel = self.get_single_asset( + apiobj=apiobj, fields=fields, query=None, refetch=None + ) + + id_tolabel = asset_tolabel["internal_axon_id"] + assert isinstance(id_tolabel, tools.STR) + + # add the label to the asset + result_tolabel = apiobj.labels.add(labels=labels, rows=[asset_tolabel]) + assert result_tolabel == 1 + + # re-get the asset and check that it has the label + asset_haslabel = self.get_single_asset( + apiobj=apiobj, fields=fields, query=None, refetch=asset_tolabel + ) + + id_haslabel = asset_haslabel["internal_axon_id"] + assert isinstance(id_haslabel, tools.STR) + + for label in labels: + assert label in asset_haslabel["labels"] + + # check that the label is in all the labels on the system + alllabels = apiobj.labels.get() + assert isinstance(alllabels, tools.LIST) + + for label in alllabels: + assert isinstance(label, tools.STR) + + for label in labels: + assert label in alllabels + + # remove the label from the asset + nolabel_result = apiobj.labels.remove(labels=labels, rows=[asset_haslabel]) + assert nolabel_result == 1 + + # re-get the asset and check that it has the label + asset_nolabel = self.get_single_asset( + apiobj=apiobj, fields=fields, query=None, refetch=asset_haslabel + ) + id_nolabel = asset_nolabel["internal_axon_id"] + assert isinstance(id_nolabel, tools.STR) + + assert id_tolabel == id_nolabel == id_haslabel + + for label in labels: + assert label not in asset_nolabel.get("labels", []), asset_nolabel + + +@pytest.mark.parametrize("apicls", [axonapi.api.Users, axonapi.Devices], scope="class") +class TestSavedQuery(Base): + """Pass.""" + + def test__get(self, apiobj): + """Pass.""" + data = apiobj.saved_query._get() + assert isinstance(data, dict) + + assets = data["assets"] + assert isinstance(assets, tools.LIST) + + for asset in assets: + assert isinstance(asset, dict) + + assert asset["query_type"] in ["saved"] + + str_keys = [ + "associated_user_name", + "date_fetched", + "name", + "query_type", + "timestamp", + "user_id", + "uuid", + ] + + for str_key in str_keys: + val = asset.pop(str_key) + assert isinstance(val, tools.STR) + + predefined = asset.pop("predefined", False) + assert isinstance(predefined, bool) + + view = asset.pop("view") + assert isinstance(view, dict) + + colsizes = view.pop("coloumnSizes", []) + assert isinstance(colsizes, tools.LIST) + + colfilters = view.pop("colFilters", {}) + assert isinstance(colfilters, dict) + for k, v in colfilters.items(): + assert isinstance(k, tools.STR) + assert isinstance(v, tools.STR) + + for x in colsizes: + assert isinstance(x, tools.INT) + + fields = view.pop("fields") + assert isinstance(fields, tools.LIST) + + for x in fields: + assert isinstance(x, tools.STR) + + page = view.pop("page", 0) + assert isinstance(page, tools.INT) + + pagesize = view.pop("pageSize", 0) + assert isinstance(pagesize, tools.INT) + + sort = view.pop("sort") + assert isinstance(sort, dict) + + sort_desc = sort.pop("desc") + assert isinstance(sort_desc, bool) + + sort_field = sort.pop("field") + assert isinstance(sort_field, tools.STR) + + query = view.pop("query") + assert isinstance(query, dict) + + qfilter = query.pop("filter") + assert isinstance(qfilter, tools.STR) + + qexprs = query.pop("expressions", []) + assert isinstance(qexprs, tools.LIST) + + historical = view.pop("historical", None) + assert historical is None or isinstance(historical, tools.SIMPLE) + # FUTURE: what else besides None?? bool? + + for qexpr in qexprs: + assert isinstance(qexpr, dict) + + compop = qexpr.pop("compOp") + field = qexpr.pop("field") + idx = qexpr.pop("i", 0) + leftbracket = qexpr.pop("leftBracket") + rightbracket = qexpr.pop("rightBracket") + logicop = qexpr.pop("logicOp") + notflag = qexpr.pop("not") + value = qexpr.pop("value") + obj = qexpr.pop("obj", False) + nesteds = qexpr.pop("nested", []) + fieldtype = qexpr.pop("fieldType", "") + + assert isinstance(compop, tools.STR) + assert isinstance(field, tools.STR) + assert isinstance(idx, tools.INT) + assert isinstance(leftbracket, bool) + assert isinstance(rightbracket, bool) + assert isinstance(logicop, tools.STR) + assert isinstance(notflag, bool) + assert isinstance(value, tools.SIMPLE) or value is None + assert isinstance(obj, bool) + assert isinstance(nesteds, tools.LIST) + assert isinstance(fieldtype, tools.STR) + + for nested in nesteds: + assert isinstance(nested, dict) + + ncondition = nested.pop("condition") + assert isinstance(ncondition, tools.STR) + + nexpr = nested.pop("expression") + assert isinstance(nexpr, dict) + + nidx = nested.pop("i") + assert isinstance(nidx, tools.INT) + + assert not nested, list(nested) + + assert not qexpr, list(qexpr) + + assert not query, list(query) + assert not sort, list(sort) + assert not view, list(view) + assert not asset, list(asset) + + def test__get_query(self, apiobj): + """Pass.""" + query = 'name == regex(".*", "i")' + data = apiobj.saved_query._get(query=query) + assert isinstance(data, dict) + + assets = data["assets"] + assert isinstance(assets, tools.LIST) + assert len(data["assets"]) == data["page"]["totalResources"] + + def test_get(self, apiobj): + """Pass.""" + rows = apiobj.saved_query.get() + assert isinstance(rows, tools.LIST) + for row in rows: + assert isinstance(row, dict) + + def test_get_maxpages(self, apiobj): + """Pass.""" + data = apiobj.saved_query.get(max_pages=1, max_rows=1) + assert isinstance(data, tools.LIST) + assert len(data) == 1 + + def test_get_empty(self, apiobj): + """Pass.""" + query = 'name == "badwolf_notfound"' + data = apiobj.saved_query.get(query=query) + assert isinstance(data, tools.LIST) + assert not data + + def test_get_by_id(self, apiobj): + """Pass.""" + data = apiobj.saved_query.get() + found = apiobj.saved_query.get_by_id(value=data[0]["uuid"]) + assert isinstance(found, dict) + + def test_get_by_id_notfound_noerr(self, apiobj): + """Pass.""" + found = apiobj.saved_query.get_by_id(value="badwolf", match_error=False) + assert found is None + + def test_get_by_id_notfound(self, apiobj): + """Pass.""" + with pytest.raises(exceptions.ValueNotFound): + apiobj.saved_query.get_by_id(value="badwolf") + + def test_get_by_name(self, apiobj): + """Pass.""" + data = apiobj.saved_query.get() + found = apiobj.saved_query.get_by_name(value=data[0]["name"]) + assert isinstance(found, dict) + + def test_get_by_name_regex(self, apiobj): + """Pass.""" + found = apiobj.saved_query.get_by_name(value="RE:.*") + assert isinstance(found, tools.LIST) + assert len(found) >= 1 + + def test_get_by_name_notflag(self, apiobj): + """Pass.""" + data = apiobj.saved_query.get() + found = apiobj.saved_query.get_by_name(value="NOT:{}".format(data[0]["name"])) + assert isinstance(found, tools.LIST) + assert len(found) == len(data) - 1 + + def test__add_get_delete(self, apiobj): + """Pass.""" + name = "badwolf_test__add_get_delete" + + asset = self.get_single_asset(apiobj=apiobj, query=None, refetch=None) + + query = QUERY_ID(**asset) + + fields = [apiobj.TEST_DATA["single_field"]["exp"]] + + added = apiobj.saved_query._add(name=name, query=query, fields=fields) + assert isinstance(added, tools.STR) + + got = apiobj.saved_query._get(query='name == "{}"'.format(name)) + assert isinstance(got, dict) + assert isinstance(got["assets"], tools.LIST) + assert len(got["assets"]) == 1 + assert isinstance(got["assets"][0], dict) + + deleted = apiobj.saved_query._delete(ids=[added]) + assert not deleted + + re_got = apiobj.saved_query._get(query='name == "{}"'.format(name)) + assert isinstance(re_got, dict) + assert isinstance(re_got["assets"], tools.LIST) + assert len(re_got["assets"]) == 0 + + def test_add_delete(self, apiobj): + """Pass.""" + name = "badwolf_test_add_delete" + + asset = self.get_single_asset(apiobj=apiobj, query=None, refetch=None) + + query = QUERY_ID(**asset) + + added = apiobj.saved_query.add(name=name, query=query) + assert isinstance(added, dict) + assert added["name"] == name + assert added["view"]["query"]["filter"] == query + + deleted = apiobj.saved_query.delete(rows=added) + assert not deleted + + with pytest.raises(exceptions.ValueNotFound): + apiobj.saved_query.get_by_name(name) + + def test_add_delete_sort(self, apiobj): + """Pass.""" + name = "badwolf_test_add_delete_sort" + single_field = apiobj.TEST_DATA["single_field"] + asset = self.get_single_asset(apiobj=apiobj, query=None, refetch=None) + + query = QUERY_ID(**asset) + + added = apiobj.saved_query.add( + name=name, query=query, sort=single_field["search"] + ) + assert isinstance(added, dict) + assert added["name"] == name + assert added["view"]["query"]["filter"] == query + assert added["view"]["sort"]["field"] == single_field["exp"] + + deleted = apiobj.saved_query.delete(rows=added) + assert not deleted + + with pytest.raises(exceptions.ValueNotFound): + apiobj.saved_query.get_by_name(name) + + def test_add_delete_colfilter(self, apiobj): + """Pass.""" + name = "badwolf_test_add_delete_colfilter" + single_field = apiobj.TEST_DATA["single_field"] + asset = self.get_single_asset(apiobj=apiobj, query=None, refetch=None) + + column_filters = {single_field["search"]: "a"} + exp_column_filters = {single_field["exp"]: "a"} + + query = QUERY_ID(**asset) + + added = apiobj.saved_query.add( + name=name, query=query, column_filters=column_filters + ) + assert isinstance(added, dict) + assert added["name"] == name + assert added["view"]["query"]["filter"] == query + assert added["view"]["colFilters"] == exp_column_filters + + deleted = apiobj.saved_query.delete(rows=added) + assert not deleted + + with pytest.raises(exceptions.ValueNotFound): + apiobj.saved_query.get_by_name(name) + + +@pytest.mark.parametrize( + "apicls", [(axonapi.api.Users), (axonapi.Devices)], scope="class" +) +class TestParsedFields(Base): + """Pass.""" + + def test_fields(self, apiobj): + """Pass.""" + raw = apiobj.fields._get() + parser = axonapi.api.users_devices.ParserFields(raw=raw, parent=apiobj) + fields = parser.parse() + + with pytest.raises(exceptions.ApiError): + parser._exists("generic", fields, "dumb test") + + assert isinstance(fields, dict) + + for aname, afields in fields.items(): + assert not aname.endswith("_adapter") + assert isinstance(afields, dict) + + if aname == "generic": + gall_data = afields.pop("all_data") + assert isinstance(gall_data, dict) + + gall_data_name = gall_data.pop("name") + gall_data_type = gall_data.pop("type") + gall_data_prefix = gall_data.pop("adapter_prefix") + gall_data_title = gall_data.pop("title") + + assert not gall_data, list(gall_data) + assert gall_data_name == "specific_data.data" + assert gall_data_type == "array" + assert gall_data_prefix == "specific_data" + assert gall_data_title == "All data subsets for generic adapter" + + gall = afields.pop("all") + assert isinstance(gall, dict) + + gall_name = gall.pop("name") + gall_type = gall.pop("type") + gall_prefix = gall.pop("adapter_prefix") + gall_title = gall.pop("title") + + assert not gall, list(gall) + assert gall_name == "specific_data" + assert gall_type == "array" + assert gall_prefix == "specific_data" + assert gall_title == "All data for generic adapter" + + else: + graw = afields.pop("raw") + assert isinstance(graw, dict) + assert graw["name"].endswith(".raw") + + gall = afields.pop("all") + assert isinstance(gall, dict) + assert gall["name"] == "adapters_data.{}_adapter".format(aname) + + for fname, finfo in afields.items(): + self.val_field(fname, finfo, aname) + + def val_field(self, fname, finfo, aname): + """Pass.""" + assert isinstance(finfo, dict) + + # common + name = finfo.pop("name") + type = finfo.pop("type") + prefix = finfo.pop("adapter_prefix") + title = finfo.pop("title") + + assert isinstance(name, tools.STR) and name + assert isinstance(title, tools.STR) and title + assert isinstance(prefix, tools.STR) and prefix + assert isinstance(type, tools.STR) and type + + # uncommon + items = finfo.pop("items", {}) + sort = finfo.pop("sort", False) + unique = finfo.pop("unique", False) + branched = finfo.pop("branched", False) + enums = finfo.pop("enum", []) + description = finfo.pop("description", "") + dynamic = finfo.pop("dynamic", False) + format = finfo.pop("format", "") + + assert isinstance(items, dict) + assert isinstance(sort, bool) + assert isinstance(unique, bool) + assert isinstance(branched, bool) + assert isinstance(enums, tools.LIST) + assert isinstance(description, tools.STR) + assert isinstance(dynamic, bool) + assert isinstance(format, tools.STR) + + assert not finfo, list(finfo) + + assert type in FIELD_TYPES, type + + if name not in ["labels", "adapters", "internal_axon_id"]: + if aname == "generic": + assert name.startswith("specific_data") + else: + assert name.startswith(prefix) + + for enum in enums: + assert isinstance(enum, tools.STR) or tools.is_int(enum) + + if format: + assert format in FIELD_FORMATS, format + + val_items(aname=aname, items=items) + + +@pytest.mark.parametrize( + "apicls", [(axonapi.api.Users), (axonapi.Devices)], scope="class" +) +class TestRawFields(Base): + """Pass.""" + + def test_fields(self, apiobj): + """Pass.""" + raw = apiobj.fields._get() + assert isinstance(raw, dict) + + schema = raw.pop("schema") + generic = raw.pop("generic") + specific = raw.pop("specific") + + assert not raw, list(raw) + assert isinstance(schema, dict) + assert isinstance(generic, tools.LIST) + assert isinstance(specific, dict) + + generic_schema = schema.pop("generic") + specific_schema = schema.pop("specific") + + assert isinstance(generic_schema, dict) + assert isinstance(specific_schema, dict) + assert not schema, list(schema) + + self.val_schema(aname="generic", schema=generic_schema) + + self.val_fields(aname="generic", afields=generic) + + for aname, afields in specific.items(): + self.val_fields(aname=aname, afields=afields) + + aschema = specific_schema.pop(aname) + + assert isinstance(aschema, dict) + + self.val_schema(aname=aname, schema=aschema) + + assert not specific_schema, list(specific_schema) + + def val_schema(self, aname, schema): + """Pass.""" + assert isinstance(schema, dict) + + items = schema.pop("items") + required = schema.pop("required") + type = schema.pop("type") + + assert not schema, list(schema) + + assert isinstance(items, tools.LIST) + assert isinstance(required, tools.LIST) + assert type == "array" + + for req in required: + assert isinstance(req, tools.STR) + + for item in items: + assert item + val_items(aname=aname, items=item) + + def val_fields(self, aname, afields): + """Pass.""" + assert isinstance(afields, tools.LIST) + + for field in afields: + # common + name = field.pop("name") + title = field.pop("title") + type = field.pop("type") + + assert isinstance(name, tools.STR) and name + assert isinstance(title, tools.STR) and title + assert isinstance(type, tools.STR) and type + + assert type in ["array", "string", "integer", "number", "bool"] + + # uncommon + branched = field.pop("branched", False) + description = field.pop("description", "") + enums = field.pop("enum", []) + format = field.pop("format", "") + items = field.pop("items", {}) + sort = field.pop("sort", False) + unique = field.pop("unique", False) + dynamic = field.pop("dynamic", False) + + assert isinstance(branched, bool) + assert isinstance(description, tools.STR) + assert isinstance(enums, tools.LIST) + assert isinstance(format, tools.STR) + assert isinstance(items, dict) + assert isinstance(sort, bool) + assert isinstance(unique, bool) + assert isinstance(dynamic, bool) + + assert not field, list(field) + + for enum in enums: + assert isinstance(enum, tools.STR) or tools.is_int(enum) + + val_items(aname=aname, items=items) + + +def val_items(aname, items): + """Pass.""" + assert isinstance(items, dict) + + if items: + # common + type = items.pop("type") + + assert isinstance(type, tools.STR) and type + assert type in FIELD_TYPES, type + + # uncommon + enums = items.pop("enum", []) + format = items.pop("format", "") + iitems = items.pop("items", []) + name = items.pop("name", "") + title = items.pop("title", "") + description = items.pop("description", "") + branched = items.pop("branched", False) + dynamic = items.pop("dynamic", False) + + if format: + assert format in SCHEMA_FIELD_FORMATS, format + + assert isinstance(enums, tools.LIST) + assert isinstance(iitems, tools.LIST) or isinstance(iitems, dict) + assert isinstance(format, tools.STR) + assert isinstance(name, tools.STR) + assert isinstance(title, tools.STR) + assert isinstance(description, tools.STR) + assert isinstance(branched, bool) + assert isinstance(dynamic, bool) + + assert not items, list(items) + + for enum in enums: + assert isinstance(enum, tools.STR) or tools.is_int(enum) + + if isinstance(iitems, dict): + val_items(aname=aname, items=iitems) + else: + for iitem in iitems: + val_items(aname=aname, items=iitem) diff --git a/axonius_api_client/tests/tests_cli/__init__.py b/axonius_api_client/tests/tests_cli/__init__.py new file mode 100644 index 00000000..10e5a108 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- +"""Test suite.""" +from __future__ import absolute_import, division, print_function, unicode_literals diff --git a/axonius_api_client/tests/tests_cli/test_cli_cmd_shell.py b/axonius_api_client/tests/tests_cli/test_cli_cmd_shell.py new file mode 100644 index 00000000..1f8cdc75 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_cmd_shell.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import os + +import pytest +from click.testing import CliRunner + +from axonius_api_client import cli, exceptions, tools + +from .. import utils + + +class TestCmdShell(object): + """Pass.""" + + def test_prompt(self, request, monkeypatch): + """Pass.""" + runner = CliRunner(mix_stderr=False) + + url = request.config.getoption("--ax-url") + key = request.config.getoption("--ax-key") + secret = request.config.getoption("--ax-secret") + + monkeypatch.delenv("AX_URL", raising=False) + monkeypatch.delenv("AX_KEY", raising=False) + monkeypatch.delenv("AX_SECRET", raising=False) + monkeypatch.setattr(cli.context, "load_dotenv", utils.mock_load_dotenv) + prompt_input = "\n".join([url, key, secret, "exit()"]) + + with runner.isolated_filesystem(): + histpath = tools.pathlib.Path(os.getcwd()) + histfile = histpath / cli.context.HISTFILE + monkeypatch.setattr(cli.context, "HISTPATH", format(histpath)) + + with pytest.warns(exceptions.BetaWarning): + result1 = runner.invoke(cli=cli.cli, args=["shell"], input=prompt_input) + + assert histfile.is_file(), list(histpath.iterdir()) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + outlines1 = stdout1.splitlines() + + assert outlines1[0] == "URL of Axonius instance: {}".format(url) + assert outlines1[1] == "API Key of user in Axonius instance: " + assert outlines1[2] == "API Secret of user in Axonius instance: " + + utils.check_stderr_lines(result1) + + def test_no_prompt(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + prompt_input = "\n".join(["exit()"]) + + with runner.isolated_filesystem(): + histpath = tools.pathlib.Path(os.getcwd()) + histfile = histpath / cli.context.HISTFILE + monkeypatch.setattr(cli.context, "HISTPATH", format(histpath)) + + with pytest.warns(exceptions.BetaWarning): + result1 = runner.invoke(cli=cli.cli, args=["shell"], input=prompt_input) + + assert histfile.is_file(), list(histpath.iterdir()) + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + assert exit_code1 == 0 + + utils.check_stderr_lines(result1) diff --git a/axonius_api_client/tests/tests_cli/test_cli_context.py b/axonius_api_client/tests/tests_cli/test_cli_context.py new file mode 100644 index 00000000..96530cc2 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_context.py @@ -0,0 +1,565 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import os + +import pytest +from click.testing import CliRunner + +from axonius_api_client import cli, connect, tools + +from .. import utils + + +def badwolf_cb(x, **kwargs): + """Pass.""" + return ["a", "b"] + + +def to_json(ctx, raw_data, **kwargs): + """Pass.""" + return tools.json_dump(obj=raw_data, **kwargs) + + +class TestJoin(object): + """Pass.""" + + def test_kv(self): + """Pass.""" + x = cli.context.join_kv({"a": "b", "c": "d"}) + assert x == "\n a: b\n c: d" + + def test_tv(self): + """Pass.""" + x = cli.context.join_tv( + {"a": {"title": "a", "value": 1}, "b": {"title": "b", "value": 2}} + ) + assert x == "a: 1\nb: 2" + + def test_cr(self): + """Pass.""" + x = cli.context.join_cr(["a", "b"]) + assert x == "a\nb" + + +class TestToJson(object): + """Pass.""" + + def test_default(self): + """Pass.""" + x = cli.context.to_json(ctx=None, raw_data=[]) + assert x == "[]" + + +class TestCheckEmpty(object): + """Pass.""" + + @pytest.mark.parametrize("value", tools.EMPTY, scope="class") + def test_empty_value(self, value): + """Pass.""" + cli.context.check_empty( + ctx=None, + this_data=[], + prev_data=[], + value_type="badwolf", + value=value, + objtype="wolves", + known_cb=None, + known_cb_key="bad", + ) + + def test_empty_data(self, capsys): + """Pass.""" + ctx = cli.context.Context() + + with pytest.raises(SystemExit): + cli.context.check_empty( + ctx=ctx, + this_data=[], + prev_data=[{"a": "1", "b": "2"}], + value_type="badwolf", + value=["d", "e"], + objtype="wolves", + known_cb=badwolf_cb, + known_cb_key="x", + ) + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 5 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0] == "** ERROR: Valid wolves:" + assert stderr[1] == " a" + assert stderr[2] == " b" + assert stderr[3] == "" + assert stderr[4] == "** ERROR: No wolves found when searching by badwolf: d, e" + + def test_not_empty(self, capsys): + """Pass.""" + ctx = cli.context.Context() + + cli.context.check_empty( + ctx=ctx, + this_data=[{"a": "1"}], + prev_data=[{"a": "1", "b": "2"}], + value_type="badwolf", + value=["a"], + objtype="wolves", + known_cb=badwolf_cb, + known_cb_key="x", + ) + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0] == "** Found 1 wolves by badwolf: a" + + +class TestCliJsonFromStream(object): + """Pass.""" + + def test_stdin_empty(self, monkeypatch, capsys): + """Pass.""" + ctx = cli.context.Context() + stream = tools.six.StringIO() + stream.name = "" + monkeypatch.setattr(stream, "isatty", lambda: True) + with pytest.raises(SystemExit): + cli.context.json_from_stream(ctx=ctx, stream=stream, src="--badwolf") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + exp0 = "** ERROR: No input provided on for --badwolf" + assert stderr[0] == exp0 + + def test_file_empty(self, monkeypatch, capsys): + """Pass.""" + ctx = cli.context.Context() + stream = tools.six.StringIO() + stream.name = "/bad/wolf" + monkeypatch.setattr(stream, "isatty", lambda: False) + content = "" + stream.write(content) + stream.seek(0) + with pytest.raises(SystemExit): + cli.context.json_from_stream(ctx=ctx, stream=stream, src="--badwolf") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 2 + + stdout = captured.out.splitlines() + assert not stdout + + exp0 = "** Read {} bytes from /bad/wolf for --badwolf".format(len(content)) + exp1 = "** ERROR: Empty content supplied in /bad/wolf for --badwolf" + assert stderr[0] == exp0 + assert stderr[1] == exp1 + + def test_json_error(self, monkeypatch, capsys): + """Pass.""" + ctx = cli.context.Context() + stream = tools.six.StringIO() + stream.name = "/bad/wolf" + monkeypatch.setattr(stream, "isatty", lambda: False) + content = "{{{}}}}" + stream.write(content) + stream.seek(0) + with pytest.raises(SystemExit): + cli.context.json_from_stream(ctx=ctx, stream=stream, src="--badwolf") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 3 + + stdout = captured.out.splitlines() + assert not stdout + + exp0 = "** Read {} bytes from /bad/wolf for --badwolf".format(len(content)) + exp1 = "** ERROR: WRAPPED EXCEPTION: json.decoder.JSONDecodeError" + exp2 = "Expecting property name enclosed in double quotes: line 1 column 2 (char 1)" # noqa + assert stderr[0] == exp0 + assert stderr[1] == exp1 + assert stderr[2] == exp2 + + def test_json_success(self, monkeypatch, capsys): + """Pass.""" + ctx = cli.context.Context() + stream = tools.six.StringIO() + stream.name = "/bad/wolf" + monkeypatch.setattr(stream, "isatty", lambda: False) + content = '[{"x": "v"}]' + stream.write(content) + stream.seek(0) + cli.context.json_from_stream(ctx=ctx, stream=stream, src="--badwolf") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 2 + + stdout = captured.out.splitlines() + assert not stdout + + exp0 = "** Read {} bytes from /bad/wolf for --badwolf".format(len(content)) + assert stderr[0] == exp0 + assert stderr[1].startswith("** Loaded JSON content from") + + +class TestCliDictwriter(object): + """Pass.""" + + def test_default(self): + """Pass.""" + rows = [{"x": "1"}] + data = cli.context.dictwriter(rows=rows) + assert data in ['"x"\r\n"1"\r\n', '"x"\n"1"\n'] + + +class TestCliWriteHistFile(object): + """Pass.""" + + def test_default(self, monkeypatch): + """Pass.""" + runner = CliRunner(mix_stderr=False) + + with runner.isolated_filesystem(): + histpath = tools.pathlib.Path(os.getcwd()) + histfile = histpath / cli.context.HISTFILE + monkeypatch.setattr(cli.context, "HISTPATH", format(histpath)) + cli.context.write_hist_file() + assert histfile.is_file(), list(histpath.iterdir()) + + +class TestCliExcWrap(object): + """Pass.""" + + def test_exc_wrap_true(self, capsys): + """Pass.""" + with pytest.raises(SystemExit): + with cli.context.exc_wrap(wraperror=True): + raise utils.MockError("badwolf") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 2 + + stdout = captured.out.splitlines() + assert not stdout + + exp0 = "** ERROR: WRAPPED EXCEPTION: {c.__module__}.{c.__name__}" + exp0 = exp0.format(c=utils.MockError) + assert stderr[0] == exp0 + assert stderr[1] == "badwolf" + + def test_exc_wrap_false(self, capsys): + """Pass.""" + with pytest.raises(utils.MockError): + with cli.context.exc_wrap(wraperror=False): + raise utils.MockError("badwolf") + + +class TestCliSpawnShell(object): + """Pass.""" + + def test_default(self, monkeypatch): + """Pass.""" + runner = CliRunner(mix_stderr=False) + + monkeypatch.setattr("sys.stdin", tools.six.StringIO("exit()")) + + with runner.isolated_filesystem(): + histpath = tools.pathlib.Path(os.getcwd()) + monkeypatch.setattr(cli.context, "HISTPATH", format(histpath)) + + with pytest.raises(SystemExit): + cli.context.spawn_shell() + + +class TestCliRegisterReadline(object): + """Pass.""" + + def test_default(self, monkeypatch): + """Pass.""" + runner = CliRunner(mix_stderr=False) + + with runner.isolated_filesystem(): + histpath = tools.pathlib.Path(os.getcwd()) + histfile = histpath / cli.context.HISTFILE + monkeypatch.setattr(cli.context, "HISTPATH", format(histpath)) + + cli.context.register_readline() + assert histfile.is_file(), list(histpath.iterdir()) + + def test_exc(self, monkeypatch, capsys): + """Pass.""" + monkeypatch.setattr(cli.context, "readline", utils.MockError) + runner = CliRunner(mix_stderr=False) + + with runner.isolated_filesystem(): + histpath = tools.pathlib.Path(os.getcwd()) + histfile = histpath / cli.context.HISTFILE + monkeypatch.setattr(cli.context, "HISTPATH", format(histpath)) + + cli.context.register_readline() + + assert histfile.is_file(), list(histpath.iterdir()) + + captured = capsys.readouterr() + assert ( + captured.err.splitlines()[0] + == "** ERROR: Unable to register history and autocomplete:" + ) + + +class TestCliLoadEnv(object): + """Pass.""" + + def test_axenv(self, monkeypatch): + """Pass.""" + runner = CliRunner(mix_stderr=False) + with runner.isolated_filesystem(): + with open("test.env", "w") as f: + f.write("AX_TEST=badwolf1\n") + monkeypatch.delenv("AX_TEST", raising=False) + monkeypatch.setenv("AX_ENV", "test.env") + cli.context.load_dotenv() + assert os.environ["AX_TEST"] == "badwolf1" + + def test_default(self, monkeypatch): + """Pass.""" + runner = CliRunner(mix_stderr=False) + with runner.isolated_filesystem(): + with open(".env", "w") as f: + f.write("AX_TEST=badwolf2\n") + monkeypatch.delenv("AX_TEST", raising=False) + monkeypatch.delenv("AX_ENV", raising=False) + cli.context.load_dotenv() + assert os.environ["AX_TEST"] == "badwolf2" + + +class TestCliContext(object): + """Pass.""" + + def test_init(self): + """Pass.""" + obj = cli.context.Context() + assert format(obj) + assert repr(obj) + assert obj.wraperror is True + + def test_export_stdout(self, capsys): + """Pass.""" + obj = cli.context.Context() + obj.export(data="badwolf") + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert not stderr + + stdout = captured.out.splitlines() + assert len(stdout) == 1 + + assert stdout[0] == "badwolf" + + def test_export_file(self, capsys): + """Pass.""" + runner = CliRunner(mix_stderr=False) + obj = cli.context.Context() + + with runner.isolated_filesystem(): + obj.export(data="badwolf", export_file="badwolf.test") + assert os.path.isfile("badwolf.test") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0].startswith("** Exported file") + assert "created" in stderr[0] + + def test_export_file_exists(self, capsys): + """Pass.""" + runner = CliRunner(mix_stderr=False) + obj = cli.context.Context() + + with runner.isolated_filesystem(): + with open("badwolf.test", "w") as f: + f.write("badwolf") + + with pytest.raises(SystemExit): + obj.export(data="badwolf", export_file="badwolf.test") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0].startswith("** ERROR: Export file") + + def test_export_file_overwrite(self, capsys): + """Pass.""" + runner = CliRunner(mix_stderr=False) + obj = cli.context.Context() + + with runner.isolated_filesystem(): + with open("badwolf.test", "w") as f: + f.write("badwolf1") + + obj.export( + data="badwolf2", export_file="badwolf.test", export_overwrite=True + ) + assert os.path.isfile("badwolf.test") + with open("badwolf.test", "r") as f: + assert f.read() == "badwolf2" + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0].startswith("** Exported file") + assert "overwritten" in stderr[0] + + def test_echo_ok(self, capsys): + """Pass.""" + cli.context.Context.echo_ok("badwolf") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0] == "** badwolf" + + def test_echo_warn(self, capsys): + """Pass.""" + cli.context.Context.echo_warn("badwolf") + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0] == "** WARNING: badwolf" + + def test_echo_error_abort(self, capsys): + """Pass.""" + with pytest.raises(SystemExit): + cli.context.Context.echo_error("badwolf", abort=True) + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0] == "** ERROR: badwolf" + + def test_echo_error_noabort(self, capsys): + """Pass.""" + cli.context.Context.echo_error("badwolf", abort=False) + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + assert stderr[0] == "** ERROR: badwolf" + + def test_start_client(self, request): + """Pass.""" + obj = cli.context.Context() + url = request.config.getoption("--ax-url") + key = request.config.getoption("--ax-key") + secret = request.config.getoption("--ax-secret") + assert obj.obj is None + client = obj.start_client(url=url, key=key, secret=secret) + assert isinstance(client, connect.Connect) + assert client == obj.obj + + def test_handle_export(self, capsys): + """Pass.""" + obj = cli.context.Context() + obj.handle_export( + raw_data={}, + formatters={"json": to_json}, + export_format="json", + export_file=None, + export_path=None, + export_overwrite=False, + ) + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert not stderr + + stdout = captured.out.splitlines() + assert len(stdout) == 1 + + assert stdout[0] == "{}" + + def test_handle_export_invalid(self, capsys): + """Pass.""" + obj = cli.context.Context() + with pytest.raises(SystemExit): + obj.handle_export( + raw_data={}, + formatters={"json": to_json}, + export_format="jsox", + export_file=None, + export_path=None, + export_overwrite=False, + ) + + captured = capsys.readouterr() + + stderr = captured.err.splitlines() + assert len(stderr) == 1 + + stdout = captured.out.splitlines() + assert not stdout + + exp0 = "** ERROR: Export format {!r} is unsupported".format("jsox") + assert stderr[0].startswith(exp0) diff --git a/axonius_api_client/tests/tests_cli/test_cli_grp_adapters.py b/axonius_api_client/tests/tests_cli/test_cli_grp_adapters.py new file mode 100644 index 00000000..8e94ef34 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_grp_adapters.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +from axonius_api_client import cli, tools + +from .. import utils + + +class TestCmdGet(object): + """Pass.""" + + def test_json(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get", "--export-format", "json"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + def test_csv(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get", "--export-format", "csv"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = [ + "name", + "node_name", + "node_id", + "status_raw", + "cnx_count", + "cnx_count_ok", + "cnx_count_bad", + ] + utils.check_csv_cols(stdout1, csv_cols1) + + def test_csv_settings(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get", "--include-settings", "--export-format", "csv"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = [ + "name", + "node_name", + "node_id", + "status_raw", + "cnx_count", + "cnx_count_ok", + "cnx_count_bad", + "adapter_settings", + "advanced_settings", + ] + utils.check_csv_cols(stdout1, csv_cols1) + + def test_find_fail(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get", "--name", "badwolf", "--export-format", "json"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert not stdout1 + assert stderr1 + assert exit_code1 != 0 + + errlines1 = stderr1.splitlines() + + assert errlines1[-1].startswith("** ERROR: No adapters found when searching by") diff --git a/axonius_api_client/tests/tests_cli/test_cli_grp_cnx.py b/axonius_api_client/tests/tests_cli/test_cli_grp_cnx.py new file mode 100644 index 00000000..8f84b637 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_grp_cnx.py @@ -0,0 +1,414 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +from axonius_api_client import cli, tools # , exceptions + +from .. import utils + +# import pytest + + +class TestGrpCnx(object): + """Pass.""" + + def test_get_json(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + args2 = ["adapters", "cnx", "get", "--rows", "-", "--export-format", "json"] + result2 = runner.invoke(cli=cli.cli, args=args2, input=stdout1) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.LIST) + + def test_get_csv(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + args2 = ["adapters", "cnx", "get", "--rows", "-", "--export-format", "csv"] + result2 = runner.invoke(cli=cli.cli, args=args2, input=stdout1) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + csv_cols2 = ["adapter_name", "node_name", "id", "uuid", "status_raw", "error"] + utils.check_csv_cols(stdout2, csv_cols2) + + def test_get_csv_settings(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + args2 = [ + "adapters", + "cnx", + "get", + "--rows", + "-", + "--export-format", + "csv", + "--include-settings", + ] + result2 = runner.invoke(cli=cli.cli, args=args2, input=result1.stdout) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + csv_cols2 = [ + "adapter_name", + "node_name", + "id", + "uuid", + "status_raw", + "error", + "settings", + ] + utils.check_csv_cols(stdout2, csv_cols2) + + def test_non_adapters_json(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + content = [{"x": "a"}] + + args1 = ["adapters", "cnx", "get", "--rows", "-"] + result1 = runner.invoke(cli=cli.cli, args=args1, input=tools.json_dump(content)) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert not stdout1 + assert stderr1 + assert exit_code1 != 0 + + errlines1 = stderr1.splitlines() + + exp = "** ERROR: No 'cnx' key found in adapter with keys: {}".format( + list(content[0]) + ) + assert errlines1[-1] == exp + + def test_add_check_discover_delete_csv(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + csv_file = "badwolf.csv" + csv_contents = "id,hostname\nbadwolf9131,badwolf\n" + + with runner.isolated_filesystem(): + with open(csv_file, "w") as f: + f.write(csv_contents) + + args1 = [ + "adapters", + "cnx", + "add", + "--adapter", + "csv", + "--config", + "user_id={}".format(csv_file), + "--config", + "csv={}".format(csv_file), + "--no-prompt-opt", + ] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, dict) + + args2 = ["adapters", "cnx", "check", "--rows", "-", "--no-error"] + result2 = runner.invoke(cli=cli.cli, args=args2, input=stdout1) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.LIST) + + args3 = ["adapters", "cnx", "discover", "--rows", "-", "--no-error"] + result3 = runner.invoke(cli=cli.cli, args=args3, input=stdout2) + + stderr3 = result3.stderr + stdout3 = result3.stdout + exit_code3 = result3.exit_code + + assert stdout3 + assert stderr3 + assert exit_code3 == 0 + + json3 = tools.json_load(stdout3) + assert isinstance(json3, tools.LIST) + + args4 = ["adapters", "cnx", "delete", "--rows", "-", "--force", "--wait", "0"] + result4 = runner.invoke(cli=cli.cli, args=args4, input=stdout3) + + stderr4 = result4.stderr + stdout4 = result4.stdout + exit_code4 = result4.exit_code + + assert stdout4 + assert stderr4 + assert exit_code4 == 0 + + json4 = tools.json_load(stdout4) + assert isinstance(json4, tools.LIST) + + def test_add_delete_ad_config_args(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + csv_file = "badwolf.csv" + csv_contents = "id,hostname\nbadwolf9131,badwolf\n" + + # + configs = { + "dc_name": "badwolf", + "user": "badwolf", + "password": "badwolf", + "dns_server_address": "badwolf", + "alternative_dns_suffix": "badwolf", + "use_ssl": "Unencrypted", + "ca_file": csv_file, + "cert_file": csv_file, + "private_key": csv_file, + "fetch_disabled_devices": "y", + "fetch_disabled_users": "y", + "is_ad_gc": "y", + "ldap_ou_whitelist": "badwolf1,badwolf2", + } + + with runner.isolated_filesystem(): + with open(csv_file, "w") as f: + f.write(csv_contents) + + args1 = [ + "adapters", + "cnx", + "add", + "--adapter", + "active_directory", + "--no-error", + ] + for k, v in configs.items(): + args1.append("--config") + args1.append("{}={}".format(k, v)) + + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, dict) + + args2 = ["adapters", "cnx", "delete", "--rows", "-", "--force", "--wait", "0"] + result2 = runner.invoke(cli=cli.cli, args=args2, input=stdout1) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.LIST) + + def test_add_delete_ad_config_prompt_skips(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + csv_file = "badwolf.csv" + csv_contents = "id,hostname\nbadwolf9131,badwolf\n" + + # + skips = ["ca_file", "cert_file", "private_key"] + + configs = [ + "badwolf", # dc_name + "badwolf", # user + "badwolf", # password + "badwolf", # dns_server_address + "badwolf", # alternative_dns_suffix + "Unencrypted", # use_ssl + # csv_file, # ca_file + # csv_file, # cert_file + # csv_file, # private_key + "y", # fetch_disabled_devices + "y", # fetch_disabled_users + "y", # is_ad_gc + "badwolf1,badwolf2", # ldap_ou_whitelist + ] + + with runner.isolated_filesystem(): + with open(csv_file, "w") as f: + f.write(csv_contents) + + args1 = [ + "adapters", + "cnx", + "add", + "--adapter", + "active_directory", + "--no-error", + ] + for s in skips: + args1 += ["--skip", s] + + result1 = runner.invoke(cli=cli.cli, args=args1, input="\n".join(configs)) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json_start_idx = stdout1.index("{") + stdout1_stripped = stdout1[json_start_idx:] + + json1 = tools.json_load(stdout1_stripped) + assert isinstance(json1, dict) + + args2 = ["adapters", "cnx", "delete", "--rows", "-", "--force", "--wait", "0"] + result2 = runner.invoke(cli=cli.cli, args=args2, input=stdout1_stripped) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.LIST) + + def test_check_csv(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = ["adapters", "get", "-n", "active_directory"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + args2 = ["adapters", "cnx", "get", "-r", "-", "-xt", "json"] + result2 = runner.invoke(cli=cli.cli, args=args2, input=stdout1) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.LIST) + + runner = utils.load_clirunner(request, monkeypatch) + + args3 = ["adapters", "cnx", "check", "-r", "-", "-ne", "-xt", "csv"] + result3 = runner.invoke(cli=cli.cli, args=args3, input=stdout2) + + stderr3 = result3.stderr + stdout3 = result3.stdout + exit_code3 = result3.exit_code + + assert stdout3 + assert stderr3 + assert exit_code3 == 0 + + csv_cols3 = ["adapter_name", "node_name", "id", "uuid", "status_raw", "error"] + utils.check_csv_cols(stdout3, csv_cols3) diff --git a/axonius_api_client/tests/tests_cli/test_cli_grp_labels.py b/axonius_api_client/tests/tests_cli/test_cli_grp_labels.py new file mode 100644 index 00000000..c129e1a0 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_grp_labels.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import pytest + +from axonius_api_client import cli, tools + +from .. import utils + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdGet(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "labels", "get", "--export-format", "json"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + def test_csv(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "labels", "get", "--export-format", "csv"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + stderr1 = result1.stderr + stdout1 = result1.stdout + exit_code1 = result1.exit_code + + assert not stdout1 + assert stderr1 + assert exit_code1 != 0 + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdAddRemove(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + cmd, + "get", + "--query", + "(adapters > size(0))", + "--export-format", + "json", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + args2 = [ + cmd, + "labels", + "add", + "--label", + "badwolf1", + "--label", + "badwolf2", + "--export-format", + "json", + ] + result2 = runner.invoke(cli=cli.cli, args=args2, input=result1.stdout) + + stderr2 = result2.stderr + stdout2 = result2.stdout + exit_code2 = result2.exit_code + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.INT) + assert json2 == 1 + + args3 = [ + cmd, + "labels", + "remove", + "--label", + "badwolf1", + "--label", + "badwolf2", + "--export-format", + "json", + ] + result3 = runner.invoke(cli=cli.cli, args=args3, input=result1.stdout) + + stderr3 = result3.stderr + stdout3 = result3.stdout + exit_code3 = result3.exit_code + + assert stdout3 + assert stderr3 + assert exit_code3 == 0 + + json3 = tools.json_load(stdout3) + assert isinstance(json3, tools.INT) + assert json3 == 1 diff --git a/axonius_api_client/tests/tests_cli/test_cli_grp_objects.py b/axonius_api_client/tests/tests_cli/test_cli_grp_objects.py new file mode 100644 index 00000000..8e40a942 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_grp_objects.py @@ -0,0 +1,538 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import pytest + +from axonius_api_client import api, cli, tools + +from .. import utils + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdCount(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + cmd, + "count", + "--query", + "(adapters > size(0))", + "--export-format", + "json", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(result1.stdout) + assert isinstance(json1, tools.INT) + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdCountBySQ(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "saved-query", "get"] + result1 = runner.invoke(cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + name = json1[0]["name"] + + args2 = [cmd, "count-by-saved-query", "--name", name, "--export-format", "json"] + + result2 = runner.invoke(cli=cli.cli, args=args2) + + exit_code2 = result2.exit_code + stdout2 = result2.stdout + stderr2 = result2.stderr + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.INT) + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdGetBySQ(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "saved-query", "get"] + result1 = runner.invoke(cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + + assert isinstance(json1, tools.LIST) + name = json1[0]["name"] + + args2 = [cmd, "get-by-saved-query", "--name", name, "--max-rows", "1"] + + result2 = runner.invoke(cli=cli.cli, args=args2) + + exit_code2 = result2.exit_code + stdout2 = result2.stdout + stderr2 = result2.stderr + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.LIST) + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdFields(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "fields", "--export-format", "json"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, dict) + + def test_csv(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "fields", "--export-format", "csv"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + utils.check_csv_cols(stdout1, ["generic"]) + + def test_get_exc_wrap(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + monkeypatch.setattr(api.users_devices.Fields, "get", utils.mock_failure) + + args1 = [cmd, "fields"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert not stdout1 + assert stderr1 + assert exit_code1 != 0 + + errlines1 = stderr1.splitlines() + assert len(errlines1) == 4 + assert ( + errlines1[-2] + == "** ERROR: WRAPPED EXCEPTION: axonius_api_client.tests.utils.MockError" + ) + assert errlines1[-1] == "badwolf" + + def test_get_exc_nowrap(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + monkeypatch.setattr(api.users_devices.Fields, "get", utils.mock_failure) + args1 = ["--no-wraperror", cmd, "fields"] + with pytest.raises(utils.MockError): + runner.invoke(cli=cli.cli, args=args1, catch_exceptions=False) + + def test_adapter_re(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "fields", "--adapter-re", "generic"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, dict) + assert list(json1) == ["generic"] + + def test_adapter_fields_re(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "fields", "--adapter-re", "generic", "--field-re", "name"] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, dict) + for k, v in json1.items(): + assert k == "generic" + for i in v: + assert "name" in i + + def test_adapter_fields_re_err(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "fields", "--adapter-re", "generic", "--field-re", "badwolf"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert not stdout1 + assert stderr1 + assert exit_code1 != 0 + + errlines1 = stderr1.splitlines() + assert errlines1[-1].startswith("** ERROR: No fields found matching ") + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdGet(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + cmd, + "get", + "--query", + "(adapters > size(0))", + "--export-format", + "json", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + def test_csv(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + if cmd == "devices": + args1 = [ + cmd, + "get", + "--query", + '((specific_data.data.installed_software == ({"$exists":true,"$ne":""})))', # noqa + "--field", + "installed_software", + "--export-format", + "csv", + "--max-rows", + "1", + ] + else: + args1 = [ + cmd, + "get", + "--query", + "(adapters > size(0))", + "--export-format", + "csv", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = ["internal_axon_id"] + utils.check_csv_cols(stdout1, csv_cols1) + + def test_csv_complex(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + args1 = [ + cmd, + "get", + "--query", + "(adapters > size(0))", + "--field", + "all", + "--export-format", + "csv", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = ["internal_axon_id"] + utils.check_csv_cols(stdout1, csv_cols1) + + +class TestCmdGetBySubnet(object): + """Pass.""" + + def test_json(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + "devices", + "get-by-subnet", + "--value", + "10.0.0.0/8", + "--export-format", + "json", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + def test_csv(self, request, monkeypatch): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + "devices", + "get-by-subnet", + "--value", + "10.0.0.0/8", + "--export-format", + "csv", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = ["internal_axon_id"] + utils.check_csv_cols(stdout1, csv_cols1) + + +@pytest.mark.parametrize("get_by", ["get-by-hostname", "get-by-ip", "get-by-mac"]) +class TestCmdGetByDevices(object): + """Pass.""" + + def test_json(self, request, monkeypatch, get_by): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + "devices", + get_by, + "--value", + "RE:.*", + "--export-format", + "json", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + def test_csv(self, request, monkeypatch, get_by): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + "devices", + get_by, + "--value", + "RE:.*", + "--export-format", + "csv", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = ["internal_axon_id"] + utils.check_csv_cols(stdout1, csv_cols1) + + +@pytest.mark.parametrize("get_by", ["get-by-mail", "get-by-username"]) +class TestCmdGetByUsers(object): + """Pass.""" + + def test_json(self, request, monkeypatch, get_by): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + "users", + get_by, + "--value", + "RE:.*", + "--export-format", + "json", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + def test_csv(self, request, monkeypatch, get_by): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + "users", + get_by, + "--value", + "RE:.*", + "--export-format", + "csv", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = ["internal_axon_id"] + utils.check_csv_cols(stdout1, csv_cols1) diff --git a/axonius_api_client/tests/tests_cli/test_cli_grp_reports.py b/axonius_api_client/tests/tests_cli/test_cli_grp_reports.py new file mode 100644 index 00000000..41c79a89 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_grp_reports.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import pytest + +from axonius_api_client import cli, tools + +from .. import utils + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdMissingAdapters(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + cmd, + "get", + "--query", + "(adapters > size(0))", + "--export-format", + "json", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + args2 = [ + cmd, + "reports", + "missing-adapters", + "--rows", + "-", + "--export-format", + "json", + ] + + result2 = runner.invoke(cli=cli.cli, args=args2, input=result1.stdout) + + exit_code2 = result2.exit_code + stdout2 = result2.stdout + stderr2 = result2.stderr + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, tools.LIST) + + def test_csv(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [ + cmd, + "get", + "--query", + "(adapters > size(0))", + "--export-format", + "json", + "--max-rows", + "1", + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + args2 = [ + cmd, + "reports", + "missing-adapters", + "--rows", + "-", + "--export-format", + "csv", + ] + result2 = runner.invoke(cli=cli.cli, args=args2, input=result1.stdout) + + exit_code2 = result2.exit_code + stdout2 = result2.stdout + stderr2 = result2.stderr + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + csv_cols2 = ["missing", "missing_nocnx", "adapters"] + utils.check_csv_cols(stdout2, csv_cols2) diff --git a/axonius_api_client/tests/tests_cli/test_cli_grp_saved_query.py b/axonius_api_client/tests/tests_cli/test_cli_grp_saved_query.py new file mode 100644 index 00000000..574a1545 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_grp_saved_query.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import pytest + +from axonius_api_client import cli, tools + +from .. import utils + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdAddGetDelete(object): + """Pass.""" + + def test_json_cf(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + name = "badwolf" + query = "(adapters > size(0))" + field = "labels" + cfilter = "generic:{}=a".format(field) + + args1 = [ + cmd, + "saved-query", + "add", + "--name", + name, + "--query", + query, + "--sort-field", + field, + "--column-filter", + cfilter, + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, dict) + assert json1["name"] == name + + args2 = [cmd, "saved-query", "get-by-name", "--name", name] + + result2 = runner.invoke(cli=cli.cli, args=args2) + + exit_code2 = result2.exit_code + stdout2 = result2.stdout + stderr2 = result2.stderr + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, dict) + assert json2["name"] == name + + args3 = [cmd, "saved-query", "delete", "--wait", "0"] + + result3 = runner.invoke(cli=cli.cli, args=args3, input=result2.stdout) + + exit_code3 = result3.exit_code + stdout3 = result3.stdout + stderr3 = result3.stderr + assert not stdout3.strip() + assert stderr3 + assert exit_code3 == 0 + + assert not result3.stdout + + def test_json_no_cf(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + name = "badwolf" + query = "(adapters > size(0))" + field = "labels" + + args1 = [ + cmd, + "saved-query", + "add", + "--name", + name, + "--query", + query, + "--sort-field", + field, + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, dict) + assert json1["name"] == name + + args2 = [cmd, "saved-query", "get-by-name", "--name", name] + + result2 = runner.invoke(cli=cli.cli, args=args2) + + exit_code2 = result2.exit_code + stdout2 = result2.stdout + stderr2 = result2.stderr + + assert stdout2 + assert stderr2 + assert exit_code2 == 0 + + json2 = tools.json_load(stdout2) + assert isinstance(json2, dict) + assert json2["name"] == name + + args3 = [cmd, "saved-query", "delete", "--wait", "0"] + + result3 = runner.invoke(cli=cli.cli, args=args3, input=result2.stdout) + + exit_code3 = result3.exit_code + stdout3 = result3.stdout + stderr3 = result3.stderr + assert not stdout3.strip() + assert stderr3 + assert exit_code3 == 0 + + assert not result3.stdout + + def test_bad_cf(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + name = "badwolf" + query = "(adapters > size(0))" + field = "labels" + cfilter = "generic:{}".format(field) + + args1 = [ + cmd, + "saved-query", + "add", + "--name", + name, + "--query", + query, + "--sort-field", + field, + "--column-filter", + cfilter, + ] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = not result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 != 0 + + +@pytest.mark.parametrize("cmd", ["devices", "users"]) +class TestCmdGet(object): + """Pass.""" + + def test_json(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "saved-query", "get", "--export-format", "json"] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + json1 = tools.json_load(stdout1) + assert isinstance(json1, tools.LIST) + + def test_csv(self, request, monkeypatch, cmd): + """Pass.""" + runner = utils.load_clirunner(request, monkeypatch) + + args1 = [cmd, "saved-query", "get", "--max-rows", "1", "--export-format", "csv"] + + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + stderr1 = result1.stderr + + assert stdout1 + assert stderr1 + assert exit_code1 == 0 + + csv_cols1 = ["name", "date_fetched", "timestamp"] + utils.check_csv_cols(stdout1, csv_cols1) diff --git a/axonius_api_client/tests/tests_cli/test_cli_help.py b/axonius_api_client/tests/tests_cli/test_cli_help.py new file mode 100644 index 00000000..ca41e7b1 --- /dev/null +++ b/axonius_api_client/tests/tests_cli/test_cli_help.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import pytest +from click.testing import CliRunner + +from axonius_api_client import cli + + +class TestCliHelp(object): + """Pass.""" + + @pytest.mark.parametrize( + "cmd", + [ + [], + ["devices"], + ["devices", "count"], + ["devices", "count-by-saved-query"], + ["devices", "reports"], + ["devices", "saved-query"], + ["devices", "saved-query", "get"], + ["devices", "saved-query", "add"], + ["devices", "saved-query", "delete"], + ["devices", "labels"], + ["devices", "labels", "get"], + ["devices", "labels", "add"], + ["devices", "labels", "remove"], + ["devices", "reports", "missing-adapters"], + ["devices", "fields"], + ["devices", "get"], + ["devices", "get-by-saved-query"], + ["devices", "get-by-ip"], + ["devices", "get-by-hostname"], + ["devices", "get-by-mac"], + ["devices", "get-by-subnet"], + ["users"], + ["users", "count"], + ["users", "count-by-saved-query"], + ["users", "reports"], + ["users", "reports", "missing-adapters"], + ["users", "saved-query"], + ["users", "saved-query", "get"], + ["users", "saved-query", "add"], + ["users", "saved-query", "delete"], + ["users", "labels"], + ["users", "labels", "get"], + ["users", "labels", "add"], + ["users", "labels", "remove"], + ["users", "fields"], + ["users", "get"], + ["users", "get-by-saved-query"], + ["users", "get-by-username"], + ["users", "get-by-mail"], + ["adapters"], + ["adapters", "get"], + ["adapters", "cnx"], + ["adapters", "cnx", "get"], + ["adapters", "cnx", "add"], + ["adapters", "cnx", "delete"], + ["adapters", "cnx", "check"], + ["adapters", "cnx", "discover"], + ["shell"], + ], + ) + def test_cli_help(self, cmd): + """Pass.""" + runner = CliRunner(mix_stderr=False) + + args1 = cmd + ["--help"] + result1 = runner.invoke(cli=cli.cli, args=args1) + + exit_code1 = result1.exit_code + stdout1 = result1.stdout + # stderr1 = result1.stderr + + assert stdout1 + # assert stderr1 + assert exit_code1 == 0 diff --git a/axonius_api_client/tests/tests_pkg/__init__.py b/axonius_api_client/tests/tests_pkg/__init__.py new file mode 100644 index 00000000..10e5a108 --- /dev/null +++ b/axonius_api_client/tests/tests_pkg/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- +"""Test suite.""" +from __future__ import absolute_import, division, print_function, unicode_literals diff --git a/axonius_api_client/tests/tests_pkg/test_auth.py b/axonius_api_client/tests/tests_pkg/test_auth.py new file mode 100644 index 00000000..e495a0fb --- /dev/null +++ b/axonius_api_client/tests/tests_pkg/test_auth.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import pytest + +import axonius_api_client as axonapi +from axonius_api_client import exceptions + +from .. import utils + + +class TestApiKey(object): + """Test axonius_api_client.auth.""" + + def test_valid_creds(self, request): + """Test str/repr has URL.""" + http = axonapi.Http(url=utils.get_url(request), certwarn=False) + + auth = axonapi.ApiKey(http=http, **utils.get_key_creds(request)) + + auth.login() + + assert auth.is_logged_in + assert "url" in format(auth) + assert "url" in repr(auth) + + def test_logout(self, request): + """Test no exc when logout() after login().""" + http = axonapi.Http(url=utils.get_url(request), certwarn=False) + + auth = axonapi.ApiKey(http=http, **utils.get_key_creds(request)) + + auth.login() + + assert auth.is_logged_in + + auth.logout() + + assert not auth.is_logged_in + + def test_login_already_logged_in(self, request): + """Test exc thrown when login() and login() already called.""" + http = axonapi.Http(url=utils.get_url(request), certwarn=False) + + auth = axonapi.ApiKey(http=http, **utils.get_key_creds(request)) + + auth.login() + + with pytest.raises(exceptions.AlreadyLoggedIn): + auth.login() + + def test_logout_not_logged_in(self, request): + """Test exc thrown when logout() but login() not called.""" + http = axonapi.Http(url=utils.get_url(request), certwarn=False) + + auth = axonapi.ApiKey(http=http, **utils.get_key_creds(request)) + + with pytest.raises(exceptions.NotLoggedIn): + auth.logout() + + def test_invalid_creds(self, request): + """Test str/repr has URL.""" + http = axonapi.Http(url=utils.get_url(request), certwarn=False) + + bad = "badwolf" + + auth = axonapi.ApiKey(http=http, key=bad, secret=bad) + + with pytest.raises(exceptions.InvalidCredentials): + auth.login() + + def test_http_lock_fail(self, request): + """Test using an http client from another authmethod throws exc.""" + http = axonapi.Http(url=utils.get_url(request), certwarn=False) + + auth = axonapi.ApiKey(http=http, **utils.get_key_creds(request)) + + assert auth.http._auth_lock + + with pytest.raises(exceptions.AuthError): + auth = axonapi.ApiKey(http=http, **utils.get_key_creds(request)) diff --git a/axonius_api_client/tests/tests_pkg/test_connect.py b/axonius_api_client/tests/tests_pkg/test_connect.py new file mode 100644 index 00000000..3423f72c --- /dev/null +++ b/axonius_api_client/tests/tests_pkg/test_connect.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import logging + +import pytest + +import axonius_api_client as axonapi +from axonius_api_client import connect, exceptions + +from .. import utils + +BAD_CRED = "tardis" + + +class TestConnect(object): + """Pass.""" + + def test_no_start(self, request): + """Pass.""" + ax_url = utils.get_url(request) + + c = connect.Connect(url=ax_url, key=BAD_CRED, secret=BAD_CRED) + + assert "Not connected" in format(c) + assert "Not connected" in repr(c) + assert c._handler_file is None + assert c._handler_con is None + + def test_no_start_logs(self, request): + """Pass.""" + ax_url = utils.get_url(request) + + c = connect.Connect( + url=ax_url, key=BAD_CRED, secret=BAD_CRED, log_console=True, log_file=True + ) + + assert "Not connected" in format(c) + assert "Not connected" in repr(c) + assert isinstance(c._handler_file, logging.Handler) + assert isinstance(c._handler_con, logging.Handler) + + def test_start(self, request): + """Pass.""" + ax_url = utils.get_url(request) + + c = connect.Connect(url=ax_url, certwarn=False, **utils.get_key_creds(request)) + + c.start() + + assert "Connected" in format(c) + assert "Connected" in repr(c) + with pytest.warns(exceptions.BetaWarning): + format(c.enforcements) + format(c.users) + format(c.devices) + format(c.adapters) + + def test_invalid_creds(self, request): + """Pass.""" + ax_url = utils.get_url(request) + + c = connect.Connect(url=ax_url, key=BAD_CRED, secret=BAD_CRED, certwarn=False) + + c._http._CONNECT_TIMEOUT = 1 + + with pytest.raises(exceptions.ConnectError) as exc: + c.start() + + assert isinstance(exc.value.exc, exceptions.InvalidCredentials) + + def test_connect_timeout(self): + """Pass.""" + c = connect.Connect( + url="127.0.0.99", key=BAD_CRED, secret=BAD_CRED, certwarn=False + ) + + c._http._CONNECT_TIMEOUT = 1 + + with pytest.raises(exceptions.ConnectError) as exc: + c.start() + + assert isinstance( + exc.value.exc, axonapi.http.requests.exceptions.ConnectTimeout + ) + + def test_connect_error(self): + """Pass.""" + c = connect.Connect( + url="https://127.0.0.1:3919", key=BAD_CRED, secret=BAD_CRED, certwarn=False + ) + + c._http._CONNECT_TIMEOUT = 1 + + with pytest.raises(exceptions.ConnectError) as exc: + c.start() + assert isinstance( + exc.value.exc, axonapi.http.requests.exceptions.ConnectionError + ) + + def test_invalid_creds_nowrap(self, request): + """Pass.""" + ax_url = utils.get_url(request) + + c = connect.Connect( + url=ax_url, key=BAD_CRED, secret=BAD_CRED, certwarn=False, wraperror=False + ) + + c._http._CONNECT_TIMEOUT = 1 + + with pytest.raises(exceptions.InvalidCredentials): + c.start() + + def test_other_exc(self, request): + """Pass.""" + c = connect.Connect( + url="127.0.0.1", key=BAD_CRED, secret=BAD_CRED, certwarn=False + ) + + c._http._CONNECT_TIMEOUT = 1 + c._auth._creds = None + + with pytest.raises(exceptions.ConnectError): + c.start() + + def test_reason(self): + """Pass.""" + exc = Exception("badwolf") + + reason = connect.Connect._get_exc_reason(exc) + + assert format(reason) == "badwolf" diff --git a/axonius_api_client/tests/tests_pkg/test_exceptions.py b/axonius_api_client/tests/tests_pkg/test_exceptions.py new file mode 100644 index 00000000..07d4db2a --- /dev/null +++ b/axonius_api_client/tests/tests_pkg/test_exceptions.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +from axonius_api_client import exceptions, tools + +# FUTURE: Test subclassing/strings/etc + + +class TestKnownCb(object): + """Test axonius_api_client.tools.join_url.""" + + def known_cb_error(self, **kwargs): + """Pass.""" + raise Exception() + + def known_cb(self, **kwargs): + """Pass.""" + return list(kwargs) + + def test_known_cb_error(self): + """Pass.""" + x = exceptions.known_cb(known=self.known_cb_error) + assert isinstance(x, tools.LIST) + assert len(x) == 1 + assert "failed with exception" in x[0] + assert format(self.known_cb_error) in x[0] + + def test_known_cb(self): + """Pass.""" + x = exceptions.known_cb(known=[1, 2, 3]) + assert x == [1, 2, 3] diff --git a/axonius_api_client/tests/tests_pkg/test_http.py b/axonius_api_client/tests/tests_pkg/test_http.py new file mode 100644 index 00000000..312e17ed --- /dev/null +++ b/axonius_api_client/tests/tests_pkg/test_http.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.http.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import logging +import sys + +import pytest +import requests + +import axonius_api_client as axonapi +from axonius_api_client import exceptions + +from .. import utils + +InsecureRequestWarning = requests.urllib3.exceptions.InsecureRequestWarning + + +class TestParserUrl(object): + """Test axonapi.http.ParserUrl.""" + + def test_schemehostport443(self): + """Test a proper URL gets parsed the same.""" + u = axonapi.http.ParserUrl("https://host:443/blah") + assert u.hostname == "host" + assert u.port == 443 + assert u.scheme == "https" + assert u.parsed.path == "/blah" + assert u.url_full == "https://host:443/blah" + assert u.url == "https://host:443" + + def test_str_repr(self): + """Test str/repr has URL path.""" + u = axonapi.http.ParserUrl("https://host:443/blah") + assert u.parsed.path in format(u) + assert u.parsed.path in repr(u) + + def test_schemehost_noport443(self): + """Test port gets added for https scheme.""" + u = axonapi.http.ParserUrl("https://host") + assert u.hostname == "host" + assert u.port == 443 + assert u.scheme == "https" + + def test_host_noschemeport(self): + """Test exc when no port or scheme in URL.""" + exc = exceptions.HttpError + match = "no.*'port'" + with pytest.raises(exc, match=match): + axonapi.http.ParserUrl("host", default_scheme="") + + def test_unknownschemehost_noport(self): + """Test exc when no port and non http/https scheme.""" + exc = exceptions.HttpError + match = "no.*'port'" + with pytest.raises(exc, match=match): + axonapi.http.ParserUrl("httpx://host") + + def test_hostport443_withslash(self): + """Test scheme added with port 443 and no scheme in URL.""" + u = axonapi.http.ParserUrl("host:443/") + assert u.hostname == "host" + assert u.port == 443 + assert u.scheme == "https" + + def test_hostport443_noscheme(self): + """Test scheme added with port 443 and no scheme in URL.""" + u = axonapi.http.ParserUrl("host:443", default_scheme="") + assert u.hostname == "host" + assert u.port == 443 + assert u.scheme == "https" + + def test_hostport80_noscheme(self): + """Test scheme added with port 80 and no scheme in URL.""" + u = axonapi.http.ParserUrl("host:80", default_scheme="") + assert u.hostname == "host" + assert u.port == 80 + assert u.scheme == "http" + + def test_schemehost_noport80(self): + """Test port added with no port and http scheme in URL.""" + u = axonapi.http.ParserUrl("http://host") + assert u.hostname == "host" + assert u.port == 80 + assert u.scheme == "http" + + +class TestHttp(object): + """Test axonapi.http.Http.""" + + def test_str_repr(self, request): + """Test str/repr has URL.""" + ax_url = utils.get_url(request) + + http = axonapi.Http(url=ax_url) + + assert ax_url in format(http) + assert ax_url in repr(http) + + def test_parsed_url(self, request): + """Test url=ParserUrl() works.""" + ax_url = utils.get_url(request) + + parsed_url = axonapi.http.ParserUrl(url=ax_url, default_scheme="https") + + http = axonapi.Http(url=parsed_url) + + assert ax_url in format(http) + assert ax_url in repr(http) + + def test_user_agent(self, request): + """Test user_agent has version in it.""" + ax_url = utils.get_url(request) + + http = axonapi.Http(url=ax_url) + assert axonapi.version.__version__ in http.user_agent + + def test_certwarn_true(self, request, httpbin_secure): + """Test quiet_urllib=False shows warning from urllib3.""" + url = httpbin_secure.url + http = axonapi.Http(url=url, certwarn=True, save_history=True) + + with pytest.warns(InsecureRequestWarning): + http() + + def test_certwarn_false(self, request, httpbin_secure): + """Test quiet_urllib=False shows warning from urllib3.""" + url = httpbin_secure.url + http = axonapi.Http(url=url, certwarn=False) + + http() + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="requires python3.6 or higher" + ) + def test_verify_ca_bundle(self, request, httpbin_secure, httpbin_ca_bundle): + """Test quiet_urllib=False no warning from urllib3 when using ca bundle.""" + url = httpbin_secure.url + http = axonapi.Http(url=url, certwarn=False) + response = http() + assert response.status_code == 200 + + def test_save_last_true(self, request): + """Test last req/resp with save_last=True.""" + ax_url = utils.get_url(request) + + http = axonapi.Http(url=ax_url, save_last=True, certwarn=False) + response = http() + assert response == http._LAST_RESPONSE + assert response.request == http._LAST_REQUEST + + def test_save_last_false(self, request): + """Test last req/resp with save_last=False.""" + ax_url = utils.get_url(request) + + http = axonapi.Http(url=ax_url, save_last=False, certwarn=False) + + http() + + assert not http._LAST_RESPONSE + assert not http._LAST_REQUEST + + def test_save_history(self, request): + """Test last resp added to history with save_history=True.""" + ax_url = utils.get_url(request) + + http = axonapi.Http(url=ax_url, save_history=True, certwarn=False) + + response = http() + + assert response in http._HISTORY + + def test_log_req_attrs_true(self, request, caplog): + """Test verbose logging of request attrs when log_request_attrs=True.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_request_attrs=True, certwarn=False, log_level="debug" + ) + + http() + + assert len(caplog.records) == 1 + + entries = ["request.*{}.*headers".format(http.url)] + utils.log_check(caplog, entries) + + def test_log_req_attrs_false(self, request, caplog): + """Test brief logging of request attrs when log_request_attrs=False.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_request_attrs=False, certwarn=False, log_level="debug" + ) + + http() + + assert len(caplog.records) == 1 + + entries = ["request.*{}".format(http.url)] + utils.log_check(caplog, entries) + + def test_log_req_attrs_none(self, request, caplog): + """Test no logging of request attrs when log_request_attrs=None.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_request_attrs=None, certwarn=False, log_level="debug" + ) + + http() + + assert not caplog.records + + def test_log_resp_attrs_true(self, request, caplog): + """Test verbose logging of response attrs when log_response_attrs=True.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_response_attrs=True, certwarn=False, log_level="debug" + ) + + http() + + assert len(caplog.records) == 1 + + entries = ["response.*{}.*headers".format(http.url)] + utils.log_check(caplog, entries) + + def test_log_resp_attrs_false(self, request, caplog): + """Test brief logging of response attrs when log_response_attrs=False.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_response_attrs=False, certwarn=False, log_level="debug" + ) + http() + + assert len(caplog.records) == 1 + + entries = ["response.*{}".format(http.url)] + utils.log_check(caplog, entries) + + def test_log_response_attrs_none(self, request, caplog): + """Test no logging of response attrs when log_response_attrs=None.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_response_attrs=None, certwarn=False, log_level="debug" + ) + + http() + + assert not caplog.records + + def test_log_resp_body_true(self, request, caplog): + """Test logging of response body when log_response_body=True.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_response_body=True, certwarn=False, log_level="debug" + ) + + http() + + assert len(caplog.records) == 1 + + entries = ["response body:.*"] + utils.log_check(caplog, entries) + + def test_log_resp_body_false(self, request, caplog): + """Test no logging of response body when log_response_body=False.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_response_body=False, certwarn=False, log_level="debug" + ) + + http() + + assert not caplog.records + + def test_log_req_body_true(self, request, caplog): + """Test logging of request body when log_request_body=True.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_request_body=True, certwarn=False, log_level="debug" + ) + + http() + + assert len(caplog.records) == 1 + + entries = ["request body:.*"] + utils.log_check(caplog, entries) + + def test_log_req_body_false(self, request, caplog): + """Test no logging of request body when log_request_body=False.""" + caplog.set_level(logging.DEBUG) + + ax_url = utils.get_url(request) + + http = axonapi.Http( + url=ax_url, log_request_body=False, certwarn=False, log_level="debug" + ) + + http() + + assert not caplog.records diff --git a/axonius_api_client/tests/tests_pkg/test_logs.py b/axonius_api_client/tests/tests_pkg/test_logs.py new file mode 100644 index 00000000..5eaf6da8 --- /dev/null +++ b/axonius_api_client/tests/tests_pkg/test_logs.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import logging +import time + +import pytest + +import axonius_api_client as axonapi +from axonius_api_client import exceptions, logs, tools + + +class TestLogs(object): + """Test logs.""" + + def test_gmtime(self): + """Pass.""" + logs.gmtime() + assert logging.Formatter.converter == time.gmtime + + def test_localtime(self): + """Pass.""" + logs.localtime() + assert logging.Formatter.converter == time.localtime + + def test_get_obj_log(self): + """Pass.""" + log = logs.get_obj_log(obj=self, level="warning") + assert log.name == "axonius_api_client.tests.tests_pkg.test_logs.TestLogs" + assert log.level == logging.WARNING + + def test_str_level_int(self): + """Pass.""" + assert logs.str_level(level=10) == "DEBUG" + + def test_str_level_str_int(self): + """Pass.""" + assert logs.str_level(level="10") == "DEBUG" + + def test_str_level_str(self): + """Pass.""" + assert logs.str_level(level="debug") == "DEBUG" + + def test_str_level_fail(self): + """Pass.""" + with pytest.raises(exceptions.ToolsError): + logs.str_level(level="xx") + + def test_add_del_stderr(self): + """Pass.""" + h = logs.add_stderr(obj=logs.LOG) + assert h.name == axonapi.constants.LOG_NAME_STDERR + assert ( + logs.str_level(level=h.level).lower() == axonapi.constants.LOG_LEVEL_CONSOLE + ) + assert isinstance(h, logging.StreamHandler) + assert h in logs.LOG.handlers + + dh = logs.del_stderr(obj=logs.LOG) + assert isinstance(dh, dict) + assert logs.LOG.name in dh + assert isinstance(dh[logs.LOG.name], tools.LIST) + assert h in dh[logs.LOG.name] + assert h not in logs.LOG.handlers + + def test_add_del_stdout(self): + """Pass.""" + h = logs.add_stdout(obj=logs.LOG) + assert h.name == axonapi.constants.LOG_NAME_STDOUT + assert ( + logs.str_level(level=h.level).lower() == axonapi.constants.LOG_LEVEL_CONSOLE + ) + assert isinstance(h, logging.StreamHandler) + assert h in logs.LOG.handlers + + dh = logs.del_stdout(obj=logs.LOG) + assert isinstance(dh, dict) + assert logs.LOG.name in dh + assert isinstance(dh[logs.LOG.name], tools.LIST) + assert h in dh[logs.LOG.name] + assert h not in logs.LOG.handlers + + def test_add_del_null(self): + """Pass.""" + logs.del_null(obj=logs.LOG) + h = logs.add_null(obj=logs.LOG) + assert h.name == "NULL" + assert isinstance(h, logging.NullHandler) + assert h in logs.LOG.handlers + + fh = logs.add_null(obj=logs.LOG) + assert fh is None + + dh = logs.del_null(obj=logs.LOG) + + assert isinstance(dh, dict) + assert isinstance(dh[logs.LOG.name], tools.LIST) + + assert logs.LOG.name in dh + f = dh.pop(logs.LOG.name) + + assert h in f + assert h not in logs.LOG.handlers + assert not dh + + def test_add_del_file(self): + """Pass.""" + h = logs.add_file(obj=logs.LOG) + assert h.name == axonapi.constants.LOG_NAME_FILE + assert logs.str_level(level=h.level).lower() == axonapi.constants.LOG_LEVEL_FILE + assert isinstance(h, logs.logging.handlers.RotatingFileHandler) + assert h in logs.LOG.handlers + assert getattr(h, "PATH", None) + assert isinstance(h.PATH, tools.pathlib.Path) + + dh = logs.del_file(logs.LOG) + assert isinstance(dh, dict) + assert logs.LOG.name in dh + assert isinstance(dh[logs.LOG.name], tools.LIST) + assert h in dh[logs.LOG.name] + assert h not in logs.LOG.handlers diff --git a/axonius_api_client/tests/tests_pkg/test_tools.py b/axonius_api_client/tests/tests_pkg/test_tools.py new file mode 100644 index 00000000..0300dbfa --- /dev/null +++ b/axonius_api_client/tests/tests_pkg/test_tools.py @@ -0,0 +1,840 @@ +# -*- coding: utf-8 -*- +"""Test suite for axonius_api_client.tools.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import tempfile + +import pytest + +from axonius_api_client import exceptions, tools + + +class TestCoerce(object): + """Test axonius_api_client.tools.join_url.""" + + def test_int(self): + """Pass.""" + with pytest.raises(exceptions.ToolsError): + tools.coerce_int("badwolf") + + assert tools.coerce_int("456") == 456 + + def test_bool(self): + """Pass.""" + with pytest.raises(exceptions.ToolsError): + tools.coerce_bool("badwolf") + + assert tools.coerce_bool("y") is True + assert tools.coerce_bool("yes") is True + assert tools.coerce_bool("true") is True + assert tools.coerce_bool("1") is True + assert tools.coerce_bool(1) is True + assert tools.coerce_bool("t") is True + assert tools.coerce_bool(True) is True + assert tools.coerce_bool("n") is False + assert tools.coerce_bool("no") is False + assert tools.coerce_bool("false") is False + assert tools.coerce_bool("0") is False + assert tools.coerce_bool(0) is False + assert tools.coerce_bool("f") is False + assert tools.coerce_bool(False) is False + + +class TestJoinUrl(object): + """Test axonius_api_client.tools.join_url.""" + + def test_url(self): + """Test url gets joined properly no matter the slashes.""" + r = tools.join_url("https://test.com") + assert r == "https://test.com/" + r = tools.join_url("https://test.com/") + assert r == "https://test.com/" + r = tools.join_url("https://test.com////") + assert r == "https://test.com/" + r = tools.join_url("https://test.com", "") + assert r == "https://test.com/" + r = tools.join_url("https://test.com", "", "") + assert r == "https://test.com/" + r = tools.join_url("https://test.com", "/", "") + assert r == "https://test.com/" + r = tools.join_url("https://test.com", "/", "/") + assert r == "https://test.com/" + + def test_url_path(self): + """Test url, path gets joined properly no matter the slashes.""" + r = tools.join_url("https://test.com", "a") + assert r == "https://test.com/a" + r = tools.join_url("https://test.com", "/a") + assert r == "https://test.com/a" + r = tools.join_url("https://test.com", "//a") + assert r == "https://test.com/a" + r = tools.join_url("https://test.com", "a/") + assert r == "https://test.com/a/" + r = tools.join_url("https://test.com", "a/b") + assert r == "https://test.com/a/b" + r = tools.join_url("https://test.com", "a/b", "") + assert r == "https://test.com/a/b" + r = tools.join_url("https://test.com", "a/b/", "") + assert r == "https://test.com/a/b/" + r = tools.join_url("https://test.com", "a/b", "/") + assert r == "https://test.com/a/b/" + r = tools.join_url("https://test.com", "a/b", "/////") + assert r == "https://test.com/a/b/" + + def test_url_path_route(self): + """Test url, path, route gets joined properly no matter the slashes.""" + r = tools.join_url("https://test.com", "a", "b") + assert r == "https://test.com/a/b" + r = tools.join_url("https://test.com", "/a", "b") + assert r == "https://test.com/a/b" + r = tools.join_url("https://test.com", "//a", "b") + assert r == "https://test.com/a/b" + r = tools.join_url("https://test.com", "a", "b/c/d") + assert r == "https://test.com/a/b/c/d" + + +class TestJoinDot(object): + """Test axonius_api_client.tools.join_dot.""" + + def test_multi(self): + """Test dot join multi.""" + r = tools.join_dot(obj=["x", "a", "c"]) + assert r == "x.a.c" + + def test_multi_with_empty_false(self): + """Test dot join multi with empty=False.""" + r = tools.join_dot(obj=["x", "", "a", None, "c", []], empty=False) + assert r == "x.a.c" + + def test_multi_with_empty_true(self): + """Test dot join multi with empty=True.""" + r = tools.join_dot(obj=["x", "", "a", None, "c"], empty=True) + assert r == "x..a.None.c" + + def test_single(self): + """Test dot join single.""" + r = tools.join_dot(obj=["x"]) + assert r == "x" + + def test_non_list(self): + """Test dot join single.""" + r = tools.join_dot(obj="x") + assert r == "x" + + def test_empty_list(self): + """Test dot join single.""" + r = tools.join_dot(obj=[]) + assert r == "" + + def test_single_none(self): + """Test dot join with None.""" + r = tools.join_dot(obj=None) + assert r == "" + + +class TestJoinComma(object): + """Test axonius_api_client.tools.join_dot.""" + + def test_multi(self): + """Test comma join multi.""" + r = tools.join_comma(obj=["x", "a", "c"]) + assert r == "x, a, c" + + def test_multi_no_indent(self): + """Test comma join multi with indent=False.""" + r = tools.join_comma(obj=["x", "a", "c"], indent=False) + assert r == "x,a,c" + + def test_multi_with_empty_false(self): + """Test comma join multi with empty=False.""" + r = tools.join_comma(obj=["x", "", "a", None, "c", []], empty=False) + assert r == "x, a, c" + + def test_multi_with_empty_true(self): + """Test comma join list with multi items with empty=True.""" + r = tools.join_comma(obj=["x", "", "a", None, "c"], empty=True) + assert r == "x, , a, None, c" + + def test_single(self): + """Test comma join list with single item.""" + r = tools.join_comma(obj=["x"]) + assert r == "x" + + def test_non_list(self): + """Test comma non list.""" + r = tools.join_comma(obj="x") + assert r == "x" + + def test_empty_list(self): + """Test comma empty list.""" + r = tools.join_comma(obj=[]) + assert r == "" + + def test_single_none(self): + """Test comma join with None.""" + r = tools.join_comma(obj=None) + assert r == "" + + +class TestJoinCr(object): + """Test axonius_api_client.tools.join_cr.""" + + def test_multi(self): + """Test cr join multi.""" + r = tools.join_cr(obj=["x", "a", "c"]) + assert r == "\n x\n a\n c" + + def test_single(self): + """Test cr join multi.""" + r = tools.join_cr(obj=["x"]) + assert r == "\n x" + + def test_single_non_list(self): + """Test cr join list w/ single entry.""" + r = tools.join_cr(obj="x") + assert r == "\n x" + + def test_single_empty_list(self): + """Test cr join empty list.""" + r = tools.join_cr(obj=[]) + assert r == "" + + def test_single_none(self): + """Test cr join with None.""" + r = tools.join_cr(obj=None) + assert r == "" + + def test_post_and_pre(self): + """Test cr join with post and pre = True.""" + r = tools.join_cr(obj=["x", "a", "c"], pre=True, post=True) + assert r == "\n x\n a\n c\n " + + +class TestPath(object): + """Test axonius_api_client.tools.path.""" + + def test_str(self): + """Test resolve with a string.""" + r = tools.path(obj="/../badwolf") + assert isinstance(r, tools.pathlib.Path) + assert format(r) == format("/badwolf") + + def test_pathlib(self): + """Test resolve with a pathlib.Path.""" + r = tools.path(obj=tools.pathlib.Path(".")) + assert isinstance(r, tools.pathlib.Path) + + def test_user(self): + """Test resolve with ~.""" + r = tools.path(obj="~") + assert isinstance(r, tools.pathlib.Path) + assert format(r) == format(tools.pathlib.Path.home()) + + +class TestPathWrite(object): + """Test axonius_api_client.tools.path_write.""" + + def test_simple_pathlib(self, tmp_path): + """Test simple write with pathlib object.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + ret_path, ret_write = tools.path_write(obj=path, data=data) + assert ret_path.read_text() == data + assert format(ret_path) == format(path) + assert ret_write == len(data) + # FUTURE: unsure if these are same on windows + assert ret_path.stat().st_mode == 33152 + assert ret_path.parent.stat().st_mode == 16832 + + def test_simple_str(self, tmp_path): + """Test simple write with path as str.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + ret_path, ret_write = tools.path_write(obj=format(path), data=data) + assert ret_path.read_text() == data + assert format(ret_path) == format(path) + assert ret_write == len(data) + + def test_parent_fail(self, tmp_path): + """Test simple write with pathlib object.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + with pytest.raises(exceptions.ToolsError): + tools.path_write(obj=path, data=data, make_parent=False) + + def test_noperm_parent(self): + """Pass.""" + tmpdir = tools.pathlib.Path(tempfile.gettempdir()) + path = tmpdir / "file.txt" + data = "abc\n123\n" + ret_path, ret_write = tools.path_write(obj=path, data=data, overwrite=True) + assert ret_path.read_text() == data + + def test_overwrite_false(self, tmp_path): + """Test overwrite=False.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + tools.path_write(obj=path, data=data) + with pytest.raises(exceptions.ToolsError): + tools.path_write(obj=path, data=data, overwrite=False) + + def test_overwrite_true(self, tmp_path): + """Test overwrite=True.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + tools.path_write(obj=path, data=data) + tools.path_write(obj=path, data=data, overwrite=True) + assert path.is_file() + + def test_binary_true_nonbinary(self, tmp_path): + """Test binary=True with nonbinary data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + ret_path, ret_write = tools.path_write(obj=path, data=data, binary=True) + assert ret_path.read_text() == data + assert ret_path.read_bytes() == data.encode() + + def test_binary_true_binary(self, tmp_path): + """Test binary=True with binary data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = b"abc\n123\n" + ret_path, ret_write = tools.path_write(obj=path, data=data, binary=True) + assert ret_path.read_text() == data.decode() + assert ret_path.read_bytes() == data + + def test_binary_false_nonbinary(self, tmp_path): + """Test binary=False with nonbinary data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + ret_path, ret_write = tools.path_write(obj=path, data=data, binary=False) + assert ret_path.read_text() == data + assert ret_path.read_bytes() == data.encode() + + def test_binary_false_binary(self, tmp_path): + """Test binary=False with binary data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = b"abc\n123\n" + ret_path, ret_write = tools.path_write(obj=path, data=data, binary=False) + assert ret_path.read_text() == data.decode() + assert ret_path.read_bytes() == data + + def test_is_json_false_dotjson_nonjson(self, tmp_path): + """Test is_json=False with .json in filename and invalid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.json" + data = b"" + ret_path, ret_write = tools.path_write(obj=path, data=data, is_json=False) + assert ret_path.read_text() == data.decode() + + def test_is_json_false_dotjson_json(self, tmp_path): + """Test is_json=False with .json in filename and valid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.json" + data = {"x": 2} + ret_path, ret_write = tools.path_write(obj=path, data=data, is_json=False) + assert ret_path.read_text() == '{\n "x": 2\n}' + + def test_is_json_true_nonjson(self, tmp_path): + """Test is_json=True with .json not in filename and invalid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.text" + data = pytest + with pytest.raises(Exception): + tools.path_write(obj=path, data=data, is_json=True) + + def test_is_json_true_json(self, tmp_path): + """Test is_json=True with .json not in filename and valid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.text" + data = {"x": 2} + ret_path, ret_write = tools.path_write(obj=path, data=data, is_json=True) + assert ret_path.read_text() == '{\n "x": 2\n}' + + +class TestPathRead(object): + """Test axonius_api_client.tools.path_read.""" + + def test_pathlib(self, tmp_path): + """Test simple write with pathlib object.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + wret_path, ret_write = tools.path_write(obj=path, data=data) + rret_path, ret_read = tools.path_read(obj=path) + assert wret_path == rret_path + assert ret_read == data + + def test_str(self, tmp_path): + """Test simple write with str.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + wret_path, ret_write = tools.path_write(obj=path, data=data) + rret_path, ret_read = tools.path_read(obj=format(path)) + assert wret_path == rret_path + assert ret_read == data + + def test_binary_true(self, tmp_path): + """Test binary=True.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + wret_path, ret_write = tools.path_write(obj=path, data=data) + rret_path, ret_read = tools.path_read(obj=format(path), binary=True) + assert wret_path == rret_path + assert ret_read == data.encode() + + def test_binary_false(self, tmp_path): + """Test binary=False.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.txt" + data = "abc\n123\n" + wret_path, ret_write = tools.path_write(obj=path, data=data) + rret_path, ret_read = tools.path_read(obj=format(path), binary=False) + assert wret_path == rret_path + assert ret_read == data + + def test_is_json_true_nonjson(self, tmp_path): + """Test is_json=True with .json not in filename and invalid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.text" + data = "abc\n123\n" + wret_path, ret_write = tools.path_write(obj=path, data=data) + with pytest.raises(Exception): + tools.path_read(obj=path, is_json=True) + + def test_is_json_false_dotjson_nonjson(self, tmp_path): + """Test is_json=False with .json in filename and invalid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.json" + data = "abc\n123\n" + wret_path, ret_write = tools.path_write(obj=path, data=data) + rret_path, ret_read = tools.path_read(obj=path, is_json=False) + assert wret_path == rret_path + assert ret_read == data + + def test_is_json_false_dotjson_json(self, tmp_path): + """Test is_json=False with .json in filename and valid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.json" + data = {"x": 2} + wret_path, ret_write = tools.path_write(obj=path, data=data) + rret_path, ret_read = tools.path_read(obj=path, is_json=False) + assert wret_path == rret_path + assert ret_read == data + + def test_not_found(self, tmp_path): + """Test is_json=False with .json in filename and valid json data.""" + sub1 = tmp_path / "sub1" + sub2 = sub1 / "sub2" + path = sub2 / "file.json" + with pytest.raises(exceptions.ToolsError): + tools.path_read(obj=path) + + +class TestGrouper(object): + """Test tools.grouper.""" + + def test_iter(self): + """Simple test.""" + x = list(tools.grouper([1, 2, 3, 4, 5, 6], 2)) + assert x == [(1, 2), (3, 4), (5, 6)] + + def test_iter_off1(self): + """Simple test.""" + x = list(tools.grouper([1, 2, 3, 4, 5, 6, 7], 2)) + assert x == [(1, 2), (3, 4), (5, 6), (7, None)] + + def test_iter_off1_strfill(self): + """Simple test.""" + x = list(tools.grouper([1, 2, 3, 4, 5, 6, 7], 2, "x")) + assert x == [(1, 2), (3, 4), (5, 6), (7, "x")] + + +class TestNestDepth(object): + """Test tools.listify.""" + + def test_dict1(self): + """Simple test.""" + x = tools.nest_depth(obj={"x": 1, "y": 1}) + assert x == 1 + + def test_dict2(self): + """Simple test.""" + x = tools.nest_depth(obj={"x": 1, "y": {"z": 1}}) + assert x == 2 + + def test_tuple(self): + """Simple test.""" + x = tools.nest_depth(obj=(1, 2)) + assert x == 1 + + def test_list1(self): + """Simple test.""" + x = tools.nest_depth(obj=[1, 2]) + assert x == 1 + + def test_list2(self): + """Simple test.""" + x = tools.nest_depth(obj=[[1], [2]]) + assert x == 2 + + @pytest.mark.parametrize("val", [1, "1", None], scope="class") + def test_not_complex(self, val): + """Simple test.""" + x = tools.nest_depth(obj=val) + assert x == 0 + + def test_mix(self): + """Simple test.""" + x = tools.nest_depth(obj=["1", ["a", {}]]) + assert x == 3 + + +class TestValuesMatch(object): + """Test tools.values_match.""" + + def test_no_case_no_regex(self): + """Simple test.""" + x = tools.values_match(checks="x", values="x", ignore_case=False) + assert x + x = tools.values_match(checks="x", values="X", ignore_case=False) + assert not x + x = tools.values_match(checks="x", values=["X"], ignore_case=False) + assert not x + x = tools.values_match(checks="x", values=["X", "x"], ignore_case=False) + assert x + x = tools.values_match(checks="x", values=["X", "y"], ignore_case=False) + assert not x + + def test_case_no_regex(self): + """Simple test.""" + x = tools.values_match(checks="x", values="X", ignore_case=True) + assert x + x = tools.values_match(checks="x", values="x", ignore_case=True) + assert x + x = tools.values_match(checks="x", values="xxxxx", ignore_case=True) + assert not x + x = tools.values_match(checks="x", values=["X"], ignore_case=True) + assert x + x = tools.values_match(checks="x", values=["X", "x"], ignore_case=True) + assert x + x = tools.values_match(checks="x", values=["A", "a"], ignore_case=True) + assert not x + + def test_case_regex(self): + """Simple test.""" + x = tools.values_match(checks="RE:x", values="X", ignore_case=True) + assert x + x = tools.values_match(checks="RE:.*", values="X", ignore_case=True) + assert x + x = tools.values_match(checks="RE:x", values="x", ignore_case=True) + assert x + x = tools.values_match(checks="RE:x", values="xxxxx", ignore_case=True) + assert x + x = tools.values_match(checks="RE:x", values=["X"], ignore_case=True) + assert x + x = tools.values_match(checks="RE:x", values=["X", "x"], ignore_case=True) + assert x + x = tools.values_match(checks="RE:x", values=["A", "a"], ignore_case=True) + assert not x + + def test_no_case_regex(self): + """Simple test.""" + x = tools.values_match(checks="RE:x", values="X", ignore_case=False) + assert not x + x = tools.values_match(checks="RE:.*", values="X", ignore_case=False) + assert x + x = tools.values_match(checks="RE:x", values="x", ignore_case=False) + assert x + x = tools.values_match(checks="RE:x", values="xxxxx", ignore_case=False) + assert x + x = tools.values_match(checks="RE:x", values=["X"], ignore_case=False) + assert not x + x = tools.values_match(checks="RE:x", values=["X", "x"], ignore_case=False) + assert x + x = tools.values_match(checks="RE:x", values=["A", "a"], ignore_case=False) + assert not x + + +class TestListify(object): + """Test tools.listify.""" + + def test_dict_keys(self): + """Simple test.""" + x = tools.listify(obj={"x": 1, "y": 1}, dictkeys=True) + assert x == ["x", "y"] + + def test_dict_notkeys(self): + """Simple test.""" + x = tools.listify(obj={"x": 1, "y": 1}, dictkeys=False) + assert x == [{"x": 1, "y": 1}] + + def test_tuple(self): + """Simple test.""" + x = tools.listify(obj=(1, 2)) + assert x == [1, 2] + + def test_list(self): + """Simple test.""" + x = tools.listify(obj=[1, 2]) + assert x == [1, 2] + + def test_int(self): + """Simple test.""" + x = tools.listify(obj=1) + assert x == [1] + + def test_str(self): + """Simple test.""" + x = tools.listify(obj="1") + assert x == ["1"] + + def test_none(self): + """Simple test.""" + x = tools.listify(obj=None) + assert x == [] + + +class TestIsInt(object): + """Test tools.is_*.""" + + @pytest.mark.parametrize("ok", [0, 4], scope="class") + @pytest.mark.parametrize("bad", ["1", False, True, b"1"], scope="class") + def test_int_digit_false(self, ok, bad): + """Simple test.""" + assert tools.is_int(obj=ok, digit=False) + assert not tools.is_int(obj=bad, digit=False) + + @pytest.mark.parametrize("ok", [0, 4, "1", b"1"], scope="class") + @pytest.mark.parametrize("bad", [False, True, {}, "x", b"x"], scope="class") + def test_int_digit_true(self, ok, bad): + """Simple test.""" + assert tools.is_int(obj=ok, digit=True) + assert not tools.is_int(obj=bad, digit=True) + + +class TestStripLeft(object): + """Test tools.strip_left.""" + + def test_left_str(self): + """Simple test.""" + x = "badwolfbadwolf" + y = tools.strip_left(obj=x, fix="badwolf") + assert y == "badwolf" + + def test_left_list(self): + """Simple test.""" + x = ["badwolfbadwolf", "badwolf", "abadwolfbadwolf"] + y = tools.strip_left(obj=x, fix="badwolf") + assert y == ["badwolf", "", "abadwolfbadwolf"] + + def test_left_str_nomatch(self): + """Simple test.""" + x = "abadwolfbadwolf" + y = tools.strip_left(obj=x, fix="badwolf") + assert y == "abadwolfbadwolf" + + +class TestStripRight(object): + """Test tools.strip_right.""" + + def test_right_str(self): + """Simple test.""" + x = "badwolfbadwolf" + y = tools.strip_right(obj=x, fix="badwolf") + assert y == "badwolf" + + def test_right_str_nomatch(self): + """Simple test.""" + x = "badwolfbadwolfa" + y = tools.strip_right(obj=x, fix="badwolf") + assert y == "badwolfbadwolfa" + + def test_right_list(self): + """Simple test.""" + x = ["badwolfbadwolf", "badwolf", "badwolfbadwolfa"] + y = tools.strip_right(obj=x, fix="badwolf") + assert y == ["badwolf", "", "badwolfbadwolfa"] + + +class TestJsonLoad(object): + """Test tools.json_load.""" + + def test_load(self): + """Simple test.""" + x = "{}" + y = tools.json_load(obj=x) + assert y == {} + + def test_load_error_false(self): + """Simple test.""" + x = "xxx" + y = tools.json_load(obj=x, error=False) + assert y == "xxx" + + def test_load_error_true(self): + """Simple test.""" + x = "xxx" + with pytest.raises(Exception): + tools.json_load(obj=x, error=True) + + +class TestJsonDump(object): + """Test tools.json_dump.""" + + def test_dump(self): + """Simple test.""" + x = {"x": 2} + y = tools.json_dump(obj=x) + assert y == '{\n "x": 2\n}' + + def test_dump_error_false(self): + """Simple test.""" + x = pytest + y = tools.json_dump(obj=x, error=False) + assert y == pytest + + def test_dump_error_true(self): + """Simple test.""" + x = pytest + with pytest.raises(Exception): + tools.json_dump(obj=x, error=True) + + +class TestJsonReload(object): + """Test tools.json_dump.""" + + def test_re_load(self): + """Simple test.""" + x = '{"x": 2}' + y = tools.json_reload(obj=x) + assert y == '{\n "x": 2\n}' + + def test_re_load_error_false(self): + """Simple test.""" + x = "{" + y = tools.json_reload(obj=x, error=False) + assert y == x + + def test_re_load_error_true(self): + """Simple test.""" + x = "{" + with pytest.raises(Exception): + tools.json_reload(obj=x, error=True) + + +class TestDtMinAgo(object): + """Test tools.dt_*.""" + + def test_min_ago_utc_str(self): + """Simple test.""" + then = format(tools.dt_now() - tools.timedelta(minutes=1)) + assert tools.dt_min_ago(obj=then) == 1 + + def test_min_ago_utc_dt(self): + """Simple test.""" + then = tools.dt_now() - tools.timedelta(minutes=1) + assert tools.dt_min_ago(obj=then) == 1 + + def test_min_ago_utc_dt_naive(self): + """Simple test.""" + then = tools.dt_now(None) - tools.timedelta(minutes=1) + assert tools.dt_min_ago(obj=then) == 1 + + def test_min_ago_utc_dtdelta(self): + """Simple test.""" + then = tools.timedelta(minutes=3) + assert tools.dt_min_ago(obj=then) == 3 + + def test_min_ago_naive(self): + """Simple test.""" + then = tools.datetime.now() - tools.timedelta(minutes=1) + assert tools.dt_min_ago(obj=format(then)) == 1 + + +class TestDtNow(object): + """Test tools.dt_*.""" + + def test_now(self): + """Pass.""" + now = tools.dt_now() + assert now.tzinfo + + def test_now_notz(self): + """Pass.""" + now = tools.dt_now(tz=None) + assert not now.tzinfo + + def test_now_delta(self): + """Pass.""" + then = tools.dt_now(delta=tools.timedelta(minutes=5)) + assert tools.dt_min_ago(then) == 5 + + +class TestDtParse(object): + """Test tools.dt_*.""" + + @pytest.mark.parametrize( + "val", + [format(tools.dt_now()), tools.dt_now(), tools.timedelta(minutes=1)], + scope="class", + ) + def test_val(self, val): + """Pass.""" + now = tools.dt_parse(obj=val) + assert isinstance(now, tools.datetime) + + def test_list(self): + """Pass.""" + now = [format(tools.dt_now())] + now = tools.dt_parse(obj=now) + assert isinstance(now, tools.LIST) + assert [isinstance(x, tools.datetime) for x in now] + + +class TestDtWithinMin(object): + """Test tools.dt_*.""" + + @pytest.mark.parametrize( + "val", [None, "x", False, True, {}, [], 6, "8", b"9"], scope="class" + ) + def test_bad(self, val): + """Pass.""" + then = tools.dt_now(delta=tools.timedelta(minutes=5)) + assert tools.dt_within_min(obj=then, n=val) is False + + @pytest.mark.parametrize("val", [0, 4, "1", b"2"], scope="class") + def test_ok(self, val): + """Pass.""" + then = tools.dt_now(delta=tools.timedelta(minutes=5)) + assert tools.dt_within_min(obj=then, n=val) is True diff --git a/axonius_api_client/tests/utils.py b/axonius_api_client/tests/utils.py new file mode 100644 index 00000000..0ca758f8 --- /dev/null +++ b/axonius_api_client/tests/utils.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +"""Test suite.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import csv +import re + +from click.testing import CliRunner + +import axonius_api_client as axonapi +from axonius_api_client import cli, tools + + +def log_check(caplog, entries): + """Check if entries match caplog.""" + msgs = [rec.message for rec in caplog.records] + for entry in entries: + if not any(re.search(entry, m) for m in msgs): + error = "Did not find entry in log: {!r}\nAll entries:\n{}" + error = error.format(entry, "\n".join(msgs)) + raise Exception(error) + + +def get_url(request): + """Pass.""" + return request.config.getoption("--ax-url") + + +def get_key_creds(request): + """Pass.""" + key = request.config.getoption("--ax-key") + secret = request.config.getoption("--ax-secret") + return {"key": key, "secret": secret} + + +def get_auth(request): + """Pass.""" + http = axonapi.Http(url=get_url(request), certwarn=False) + + auth = axonapi.ApiKey(http=http, **get_key_creds(request)) + auth.login() + return auth + + +def check_apiobj(authobj, apiobj): + """Pass.""" + url = authobj._http.url + authclsname = format(authobj.__class__.__name__) + + assert authclsname in format(apiobj) + assert authclsname in repr(apiobj) + assert url in format(apiobj) + assert url in repr(apiobj) + + assert isinstance(apiobj._router, axonapi.api.routers.Router) + + +def check_apiobj_children(apiobj, **kwargs): + """Pass.""" + for k, v in kwargs.items(): + attr = getattr(apiobj, k) + attrclsname = format(attr.__class__.__name__) + + assert isinstance(attr, axonapi.api.mixins.Child) + assert isinstance(attr, v) + assert attrclsname in format(attr) + assert attrclsname in repr(attr) + + +def check_apiobj_xref(apiobj, **kwargs): + """Pass.""" + for k, v in kwargs.items(): + attr = getattr(apiobj, k) + + assert isinstance(attr, axonapi.api.mixins.Model) + assert isinstance(attr, v) + + +def load_clirunner(request, monkeypatch): + """Pass.""" + runner = CliRunner(mix_stderr=False) + + url = request.config.getoption("--ax-url") + key = request.config.getoption("--ax-key") + secret = request.config.getoption("--ax-secret") + + monkeypatch.setenv("AX_URL", url) + monkeypatch.setenv("AX_KEY", key) + monkeypatch.setenv("AX_SECRET", secret) + monkeypatch.setattr(cli.context, "load_dotenv", mock_load_dotenv) + return runner + + +def check_stderr_lines(result): + """Pass.""" + stderr = result.stderr.splitlines() + + assert stderr[0] == ( + "** WARNING: Unverified HTTPS request! Set AX_CERT environment variable " + "or --cert option to the path of a CA bundle!" + ), stderr + assert stderr[1].startswith("** Connected to "), stderr + + +def mock_load_dotenv(): + """Pass.""" + pass + + +class MockError(Exception): + """Pass.""" + + +def mock_failure(*args, **kwargs): + """Pass.""" + raise MockError("badwolf") + + +def check_csv_cols(content, cols): + """Pass.""" + QUOTING = csv.QUOTE_NONNUMERIC + fh = tools.six.StringIO() + fh.write(content) + fh.seek(0) + reader = csv.DictReader(fh, quoting=QUOTING) + rows = [] + for row in reader: + rows.append(row) + for x in cols: + assert x in row, "column {!r} not in {}".format(x, list(row)) + return rows diff --git a/axonius_api_client/tools.py b/axonius_api_client/tools.py index 7188d481..fab88613 100644 --- a/axonius_api_client/tools.py +++ b/axonius_api_client/tools.py @@ -1,16 +1,146 @@ # -*- coding: utf-8 -*- -"""Axonius API Client package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +"""Axonius API Client utility tools module.""" +from __future__ import absolute_import, division, print_function, unicode_literals import json +import re +from datetime import datetime, timedelta +import dateutil.parser +import dateutil.relativedelta +import dateutil.tz import six +from . import exceptions -def urljoin(url, *parts): +if six.PY2: + import pathlib2 as pathlib # pragma: no cover +else: + import pathlib + + +COMPLEX = (dict, list, tuple) +EMPTY = [None, "", [], {}, ()] +LIST = (tuple, list) +STR = six.string_types +INT = six.integer_types +BYTES = six.binary_type +SIMPLE = tuple(list(STR) + [int, bool, float]) +SIMPLE_NONE = tuple(list(SIMPLE) + [None]) +YES = [True, 1, "1", "true", "t", "yes", "y"] +NO = [False, 0, "0", "false", "f", "no", "n"] + + +def listify(obj, dictkeys=False): + """Pass.""" + if isinstance(obj, tuple): + return list(obj) + + if obj is None: + return [] + + if isinstance(obj, SIMPLE): + return [obj] + + if isinstance(obj, dict): + if dictkeys: + return list(obj) + + return [obj] + + return obj + + +def grouper(iterable, n, fillvalue=None): + """Chunk up iterables.""" + return six.moves.zip_longest(*([iter(iterable)] * n), fillvalue=fillvalue) + + +def nest_depth(obj): + """Pass.""" + if isinstance(obj, dict): + obj = list(obj.values()) + + if isinstance(obj, LIST): + calcs = [nest_depth(obj=x) for x in obj if isinstance(obj, COMPLEX)] + if calcs: + return 1 + max(calcs) + return 1 + return 0 + + +def values_match(checks, values, ignore_case=True): + """Pass.""" + if ignore_case: + re_flags = re.I + else: + re_flags = 0 + + checks = listify(obj=checks, dictkeys=False) + if checks in EMPTY: + return True + + for check in checks: + check = format(check) + if check.startswith("RE:"): + re_text = strip_left(obj=check, fix="RE:").strip() + re_pattern = re.compile(re_text, re_flags) + re_method = re_pattern.search + else: + re_text = "^{}$".format(check) + re_pattern = re.compile(re_text, re_flags) + re_method = re_pattern.match + + for value in listify(obj=values, dictkeys=False): + if re_method(value): + return True + + return False + + +# TODO: test +def coerce_int(obj): + """Pass.""" + try: + return int(obj) + except Exception: + msg = "Supplied value {o!r} is not an integer." + msg = msg.format(o=obj) + raise exceptions.ToolsError(msg) + + +# TODO: test +def coerce_bool(obj): + """Pass.""" + coerce_obj = obj + + if isinstance(obj, STR): + coerce_obj.lower().strip() + + if coerce_obj in YES: + return True + + if coerce_obj in NO: + return False + + msg = "Supplied value {o!r} is not one of {y} for true or {n} for false." + msg = msg.format(o=coerce_obj, y=YES, n=NO) + raise exceptions.ToolsError(msg) + + +def is_int(obj, digit=False): + """Pass.""" + if digit: + if isinstance(obj, STR) and obj.isdigit(): + return True + + if isinstance(obj, BYTES) and obj.isdigit(): + return True + + return not isinstance(obj, bool) and isinstance(obj, INT) + + +def join_url(url, *parts): """Join a URL to any number of parts. Args: @@ -32,37 +162,224 @@ def urljoin(url, *parts): return url -def grouper(iterable, n, fillvalue=None): - """Chunk up iterables.""" - args = [iter(iterable)] * n - return six.moves.zip_longest(*args, fillvalue=fillvalue) +def join_dot(obj, empty=False, joiner="."): + """Pass.""" + obj = listify(obj=obj, dictkeys=True) + + if not empty: + obj = [x for x in obj if x not in EMPTY and format(x)] + + return joiner.join([format(x) for x in obj]) + + +def join_cr(obj, pre=True, post=False, indent=" ", joiner="\n"): + """Pass.""" + obj = listify(obj=obj, dictkeys=True) + + if indent: + joiner = "{}{}".format(joiner, indent) + + joined = joiner.join([format(x) for x in obj]) + + if joined: + if pre: + joined = joiner + joined + if post: + joined = joined + joiner + + return joined + + +def join_comma(obj, empty=False, indent=" ", joiner=","): + """Pass.""" + obj = listify(obj=obj, dictkeys=True) + if not empty: + obj = [x for x in obj if x not in EMPTY and format(x)] -def rstrip(obj, postfix): + if indent: + joiner = "{}{}".format(joiner, indent) + + return joiner.join([format(x) for x in obj]) + + +def strip_right(obj, fix): """Pass.""" - if isinstance(obj, (list, tuple)): - obj = [rstrip(x, postfix) for x in obj] - elif isinstance(obj, six.string_types): - plen = len(postfix) - obj = obj[:-plen] if obj.endswith(postfix) else obj + if isinstance(obj, LIST) and all([isinstance(x, STR) for x in obj]): + return [strip_right(obj=x, fix=fix) for x in obj] + + if isinstance(obj, STR): + plen = len(fix) + + if obj.endswith(fix): + return obj[:-plen] + return obj -def lstrip(obj, prefix): +def strip_left(obj, fix): """Pass.""" - if isinstance(obj, (list, tuple)): - obj = [lstrip(obj=x, prefix=prefix) for x in obj] - elif isinstance(obj, six.string_types): - plen = len(prefix) - obj = obj[plen:] if obj.startswith(prefix) else obj + if isinstance(obj, LIST) and all([isinstance(x, STR) for x in obj]): + return [strip_left(obj=x, fix=fix) for x in obj] + + if isinstance(obj, STR): + plen = len(fix) + + if obj.startswith(fix): + return obj[plen:] + return obj -def json_pretty(text): +def json_dump(obj, indent=2, sort_keys=False, error=True, **kwargs): """Pass.""" try: - text = json.dumps(json.loads(text), indent=2) + return json.dumps(obj, indent=indent, sort_keys=sort_keys, **kwargs) except Exception: - text = text or "" - text = (text or "").strip() - return text + if error: + raise + return obj + + +def json_load(obj, error=True, **kwargs): + """Pass.""" + try: + return json.loads(obj, **kwargs) + except Exception: + if error: + raise + return obj + + +def json_reload(obj, error=False, **kwargs): + """Pass.""" + obj = json_load(obj=obj, error=error) + if not isinstance(obj, STR): + obj = json_dump(obj=obj, error=error, **kwargs) + obj = obj or "" + if isinstance(obj, STR): + obj = obj.strip() + return obj + + +def dt_parse(obj): + """Pass.""" + if isinstance(obj, LIST) and all([isinstance(x, STR) for x in obj]): + return [dt_parse(obj=x) for x in obj] + + if isinstance(obj, datetime): + obj = format(obj) + + if isinstance(obj, timedelta): + obj = format(dt_now() - obj) + + return dateutil.parser.parse(obj) + + +def dt_now(delta=None, tz=dateutil.tz.tzutc()): + """Pass.""" + if isinstance(delta, timedelta): + return dt_parse(obj=delta) + + return datetime.now(tz) + + +def dt_sec_ago(obj): + """Pass.""" + obj = dt_parse(obj=obj) + now = dt_now(tz=obj.tzinfo) + return round((now - obj).total_seconds()) + + +def dt_min_ago(obj): + """Pass.""" + return round(dt_sec_ago(obj=obj) / 60) + + +def dt_within_min(obj, n=None): + """Pass.""" + if not is_int(obj=n, digit=True): + return False + + return dt_min_ago(obj=obj) >= int(n) + + +def path(obj): + """Pass.""" + return pathlib.Path(obj).expanduser().resolve(strict=False) + + +def path_read(obj, binary=False, is_json=False, **kwargs): + """Pass.""" + robj = path(obj=obj) + + if not robj.is_file(): + msg = "Supplied path='{o}' (resolved='{ro}') does not exist!" + msg = msg.format(o=obj, ro=robj) + raise exceptions.ToolsError(msg) + + if binary: + data = robj.read_bytes() + else: + data = robj.read_text() + + if is_json: + data = json_load(obj=data, **kwargs) + + if robj.suffix == ".json" and isinstance(data, STR): + kwargs.setdefault("error", False) + data = json_load(obj=data, **kwargs) + + return robj, data + + +def path_write( + obj, + data, + overwrite=False, + binary=False, + binary_encoding="utf-8", + is_json=False, + make_parent=True, + protect_file=0o600, + protect_parent=0o700, + **kwargs +): + """Pass.""" + obj = path(obj=obj) + + if is_json: + data = json_dump(obj=data, **kwargs) + + if obj.suffix == ".json" and not isinstance(data, STR): + kwargs.setdefault("error", False) + data = json_dump(obj=data, **kwargs) + + if binary: + if not isinstance(data, BYTES): + data = data.encode(binary_encoding) + method = obj.write_bytes + else: + if isinstance(data, BYTES): + data = data.decode(binary_encoding) + method = obj.write_text + + if obj.is_file() and overwrite is False: + error = "File '{path}' already exists and overwrite is False" + error = error.format(path=format(obj)) + raise exceptions.ToolsError(error) + + if not obj.parent.is_dir(): + if make_parent: + obj.parent.mkdir(mode=protect_parent, parents=True, exist_ok=True) + else: + error = "Directory '{path}' does not exist and make_parent is False" + error = error.format(path=format(obj.parent)) + raise exceptions.ToolsError(error) + + obj.touch() + + if protect_file: + obj.chmod(protect_file) + + return obj, method(data) diff --git a/axonius_api_client/version.py b/axonius_api_client/version.py index 618409bf..a15ae0fa 100644 --- a/axonius_api_client/version.py +++ b/axonius_api_client/version.py @@ -1,33 +1,43 @@ # -*- coding: utf-8 -*- """Version information for this package.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import absolute_import, division, print_function, unicode_literals -__version__ = "1.0.3" +__version__ = "2.0.0" +VERSION = __version__ """:obj:`str`: Version of package.""" __url__ = "https://github.com/Axonius/axonius_api_client" +URL = __url__ """:obj:`str`: URL of package.""" __author__ = "Axonius" +AUTHOR = __author__ """:obj:`str`: Auth of package.""" __title__ = "axonius_api_client" +TITLE = __title__ """:obj:`str`: Title of package.""" __project__ = "axonius_api_client" +PROJECT = __project__ """:obj:`str`: Name of package.""" __author_email__ = "apiclient@axonius.com" +AUTHOR_EMAIL = __author_email__ """:obj:`str`: Author email of package.""" __description__ = "Axonius API client for Python" +DESCRIPTION = __description__ """:obj:`str`: Description of package.""" +__docs__ = "https://axonius-api-client.readthedocs.io/en/latest/?" +DOCS = __docs__ +""":obj:`str`: Link to the documentation for this package.""" + __license__ = "MIT" +LICENSE = __license__ """:obj:`str`: License of package.""" __copyright__ = "Copyright Axonius 2019" +COPYRIGHT = __copyright__ """:obj:`str`: Copyright of package.""" diff --git a/axonshell_cli.py b/axonshell_cli.py new file mode 100755 index 00000000..3a0768e1 --- /dev/null +++ b/axonshell_cli.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Utilities for this package.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +if __name__ == "__main__": + import axonius_api_client as axonapi + + try: + ctx = axonapi.cli.cli() + except SystemExit: + pass diff --git a/axonshell_manual.py b/axonshell_manual.py new file mode 100755 index 00000000..3b38ab4c --- /dev/null +++ b/axonshell_manual.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python -i +# -*- coding: utf-8 -*- +"""Utilities for this package.""" +from __future__ import absolute_import, division, print_function, unicode_literals + +if __name__ == "__main__": + import os + + import axonius_api_client as axonapi + + tools = axonapi.tools + axonapi.cli.context.load_dotenv() + + AX_URL = os.environ["AX_URL"] + AX_KEY = os.environ["AX_KEY"] + AX_SECRET = os.environ["AX_SECRET"] + + def jdump(obj, **kwargs): + """JSON dump utility.""" + print(axonapi.tools.json_reload(obj, **kwargs)) + + ctx = axonapi.Connect( + url=AX_URL, + key=AX_KEY, + secret=AX_SECRET, + certwarn=False, + log_level_console="debug", + log_level_api="debug", + log_console=True, + ) + + with axonapi.cli.context.exc_wrap(wraperror=True): + ctx.start() diff --git a/docs/_templates/module.rst_t b/docs/_templates/module.rst_t new file mode 100644 index 00000000..f7417772 --- /dev/null +++ b/docs/_templates/module.rst_t @@ -0,0 +1,9 @@ +{%- if show_headings %} +{{- [basename, "module"] | join(' ') | e | heading }} + +{% endif -%} +.. automodule:: {{ qualname }} + :members: + :special-members: + :private-members: + :member-order: bysource diff --git a/docs/_templates/package.rst_t b/docs/_templates/package.rst_t new file mode 100644 index 00000000..ac17dfea --- /dev/null +++ b/docs/_templates/package.rst_t @@ -0,0 +1,45 @@ +{%- macro automodule(modname, options) -%} +.. automodule:: {{ modname }} + :no-members: +{%- endmacro %} + +{%- macro toctree(docnames) -%} +.. toctree:: +{% for docname in docnames %} + {{ docname }} +{%- endfor %} +{%- endmacro %} + +{%- if is_namespace %} +{{- [pkgname, "namespace"] | join(" ") | e | heading }} +{% else %} +{{- [pkgname, "package"] | join(" ") | e | heading }} +{% endif %} + +{%- if modulefirst and not is_namespace %} +{{ automodule(pkgname, automodule_options) }} +{% endif %} + +{%- if subpackages %} +{{ toctree(subpackages) }} +{% endif %} + +{%- if submodules %} +{% if separatemodules %} +{{ toctree(submodules) }} +{%- else %} +{%- for submodule in submodules %} +{% if show_headings %} +{{- [submodule, "module"] | join(" ") | e | heading(2) }} +{% endif %} +{{ automodule(submodule, automodule_options) }} +{% endfor %} +{%- endif %} +{% endif %} + +{%- if not modulefirst and not is_namespace %} +Module contents +--------------- + +{{ automodule(pkgname, automodule_options) }} +{% endif %} diff --git a/docs/_templates/toc.rst_t b/docs/_templates/toc.rst_t new file mode 100644 index 00000000..f0877eeb --- /dev/null +++ b/docs/_templates/toc.rst_t @@ -0,0 +1,8 @@ +{{ header | heading }} + +.. toctree:: + :maxdepth: {{ maxdepth }} +{% for docname in docnames %} + {{ docname }} +{%- endfor %} + diff --git a/docs/api_ref/axonius_api_client.api.adapters.rst b/docs/api_ref/axonius_api_client.api.adapters.rst new file mode 100644 index 00000000..1098e34b --- /dev/null +++ b/docs/api_ref/axonius_api_client.api.adapters.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.api.adapters module +======================================== + +.. automodule:: axonius_api_client.api.adapters + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.api.enforcements.rst b/docs/api_ref/axonius_api_client.api.enforcements.rst new file mode 100644 index 00000000..83944a60 --- /dev/null +++ b/docs/api_ref/axonius_api_client.api.enforcements.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.api.enforcements module +============================================ + +.. automodule:: axonius_api_client.api.enforcements + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.api.mixins.rst b/docs/api_ref/axonius_api_client.api.mixins.rst new file mode 100644 index 00000000..856d30b1 --- /dev/null +++ b/docs/api_ref/axonius_api_client.api.mixins.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.api.mixins module +====================================== + +.. automodule:: axonius_api_client.api.mixins + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.api.routers.rst b/docs/api_ref/axonius_api_client.api.routers.rst new file mode 100644 index 00000000..2ad72fdb --- /dev/null +++ b/docs/api_ref/axonius_api_client.api.routers.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.api.routers module +======================================= + +.. automodule:: axonius_api_client.api.routers + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.api.rst b/docs/api_ref/axonius_api_client.api.rst new file mode 100644 index 00000000..aa403ac7 --- /dev/null +++ b/docs/api_ref/axonius_api_client.api.rst @@ -0,0 +1,14 @@ +axonius\_api\_client.api package +================================ + +.. automodule:: axonius_api_client.api + :no-members: + + +.. toctree:: + + axonius_api_client.api.adapters + axonius_api_client.api.enforcements + axonius_api_client.api.mixins + axonius_api_client.api.routers + axonius_api_client.api.users_devices diff --git a/docs/api_ref/axonius_api_client.api.users_devices.rst b/docs/api_ref/axonius_api_client.api.users_devices.rst new file mode 100644 index 00000000..bd4e3688 --- /dev/null +++ b/docs/api_ref/axonius_api_client.api.users_devices.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.api.users\_devices module +============================================== + +.. automodule:: axonius_api_client.api.users_devices + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.auth.rst b/docs/api_ref/axonius_api_client.auth.rst new file mode 100644 index 00000000..83f60380 --- /dev/null +++ b/docs/api_ref/axonius_api_client.auth.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.auth module +================================ + +.. automodule:: axonius_api_client.auth + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.connect.rst b/docs/api_ref/axonius_api_client.connect.rst new file mode 100644 index 00000000..aaaed388 --- /dev/null +++ b/docs/api_ref/axonius_api_client.connect.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.connect module +=================================== + +.. automodule:: axonius_api_client.connect + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.constants.rst b/docs/api_ref/axonius_api_client.constants.rst new file mode 100644 index 00000000..512f85f9 --- /dev/null +++ b/docs/api_ref/axonius_api_client.constants.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.constants module +===================================== + +.. automodule:: axonius_api_client.constants + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.exceptions.rst b/docs/api_ref/axonius_api_client.exceptions.rst new file mode 100644 index 00000000..5fc847a1 --- /dev/null +++ b/docs/api_ref/axonius_api_client.exceptions.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.exceptions module +====================================== + +.. automodule:: axonius_api_client.exceptions + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.http.rst b/docs/api_ref/axonius_api_client.http.rst new file mode 100644 index 00000000..83405c84 --- /dev/null +++ b/docs/api_ref/axonius_api_client.http.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.http module +================================ + +.. automodule:: axonius_api_client.http + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.logs.rst b/docs/api_ref/axonius_api_client.logs.rst new file mode 100644 index 00000000..beaf657c --- /dev/null +++ b/docs/api_ref/axonius_api_client.logs.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.logs module +================================ + +.. automodule:: axonius_api_client.logs + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.rst b/docs/api_ref/axonius_api_client.rst new file mode 100644 index 00000000..95972c65 --- /dev/null +++ b/docs/api_ref/axonius_api_client.rst @@ -0,0 +1,21 @@ +axonius\_api\_client package +============================ + +.. automodule:: axonius_api_client + :no-members: + +.. toctree:: + + axonius_api_client.api + + +.. toctree:: + + axonius_api_client.auth + axonius_api_client.connect + axonius_api_client.constants + axonius_api_client.exceptions + axonius_api_client.http + axonius_api_client.logs + axonius_api_client.tools + axonius_api_client.version diff --git a/docs/api_ref/axonius_api_client.tools.rst b/docs/api_ref/axonius_api_client.tools.rst new file mode 100644 index 00000000..23f35834 --- /dev/null +++ b/docs/api_ref/axonius_api_client.tools.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.tools module +================================= + +.. automodule:: axonius_api_client.tools + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/axonius_api_client.version.rst b/docs/api_ref/axonius_api_client.version.rst new file mode 100644 index 00000000..64282da4 --- /dev/null +++ b/docs/api_ref/axonius_api_client.version.rst @@ -0,0 +1,8 @@ +axonius\_api\_client.version module +=================================== + +.. automodule:: axonius_api_client.version + :members: + :special-members: + :private-members: + :member-order: bysource \ No newline at end of file diff --git a/docs/api_ref/modules.rst b/docs/api_ref/modules.rst new file mode 100644 index 00000000..81a0699b --- /dev/null +++ b/docs/api_ref/modules.rst @@ -0,0 +1,7 @@ +axonius_api_client +================== + +.. toctree:: + :maxdepth: 4 + + axonius_api_client diff --git a/docs/conf.py b/docs/conf.py index bd50a5fd..96b30937 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -44,6 +44,7 @@ def strip(t): # noqa pkg_project = pkg.version.__project__ pkg_title = pkg.version.__title__ +keep_warnings = True # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. @@ -304,7 +305,10 @@ def strip(t): # noqa # by member type (value 'groupwise') # by source order (value 'bysource') # The default is alphabetical. - "member-order": "bysource" + "member-order": "bysource", + "special-members": "__init__, __call__", + "private-members": None, + "members": None, } # -- Options for intersphinx extension --------------------------------------- @@ -312,8 +316,7 @@ def strip(t): # noqa # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://docs.python.org/3", None), - # FUTURE: requests doc site down for now, re-enable later - # "requests": ("http://docs.python-requests.org/en/master/", None), + "requests": ("https://2.python-requests.org//en/master/", None), "urllib3": ("https://urllib3.readthedocs.io/en/latest/", None), } diff --git a/docs/index.rst b/docs/index.rst index 30e87b3a..8010a17c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -13,12 +13,18 @@ Installation This library is actively maintained on GitHub, where the code is `always available `_. -Supported & Tested Python versions +Supported Python versions ========================================================== -* 3.7.3, 3.6.8, 2.7.16 +* 3.7.4, 2.7.16 * Windows, OSX, Linux +Test Python versions +========================================================== + +* 3.7.4 +* OSX + Install into the system site-packages using pip ============================================================ @@ -61,7 +67,7 @@ Table of Contents root/release_strategy.rst root/changelog.rst root/todo.rst - root/api_ref/toc.rst + api_ref/modules.rst Indices and tables ############################################### diff --git a/docs/root/api_ref/api/exceptions.rst b/docs/root/api_ref/api/exceptions.rst deleted file mode 100644 index 5bfa84a9..00000000 --- a/docs/root/api_ref/api/exceptions.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.api.exceptions -=========================================================== - -.. automodule:: axonius_api_client.api.exceptions - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/api/init.rst b/docs/root/api_ref/api/init.rst deleted file mode 100644 index 6547e8eb..00000000 --- a/docs/root/api_ref/api/init.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.api -=========================================================== - -.. automodule:: axonius_api_client.api - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/api/models.rst b/docs/root/api_ref/api/models.rst deleted file mode 100644 index 2f514ac5..00000000 --- a/docs/root/api_ref/api/models.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.api.models -=========================================================== - -.. automodule:: axonius_api_client.api.models - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/api/routers.rst b/docs/root/api_ref/api/routers.rst deleted file mode 100644 index 68c3120c..00000000 --- a/docs/root/api_ref/api/routers.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.api.routers -=========================================================== - -.. automodule:: axonius_api_client.api.routers - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/api/toc.rst b/docs/root/api_ref/api/toc.rst deleted file mode 100644 index f70a7f37..00000000 --- a/docs/root/api_ref/api/toc.rst +++ /dev/null @@ -1,10 +0,0 @@ -axonius_api_client.api package -=========================================================== - -.. toctree:: - - init.rst - exceptions.rst - models.rst - routers.rst - utils.rst diff --git a/docs/root/api_ref/api/utils.rst b/docs/root/api_ref/api/utils.rst deleted file mode 100644 index 1f276727..00000000 --- a/docs/root/api_ref/api/utils.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.api.utils -=========================================================== - -.. automodule:: axonius_api_client.api.utils - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/auth/exceptions.rst b/docs/root/api_ref/auth/exceptions.rst deleted file mode 100644 index 05328689..00000000 --- a/docs/root/api_ref/auth/exceptions.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.auth.exceptions -=========================================================== - -.. automodule:: axonius_api_client.auth.exceptions - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/auth/init.rst b/docs/root/api_ref/auth/init.rst deleted file mode 100644 index e8edc85e..00000000 --- a/docs/root/api_ref/auth/init.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.auth -=========================================================== - -.. automodule:: axonius_api_client.auth - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/auth/models.rst b/docs/root/api_ref/auth/models.rst deleted file mode 100644 index 765e907a..00000000 --- a/docs/root/api_ref/auth/models.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.auth.models -=========================================================== - -.. automodule:: axonius_api_client.auth.models - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/auth/toc.rst b/docs/root/api_ref/auth/toc.rst deleted file mode 100644 index 29f5cf0e..00000000 --- a/docs/root/api_ref/auth/toc.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.auth package -=========================================================== - -.. toctree:: - - init.rst - exceptions.rst - models.rst diff --git a/docs/root/api_ref/http/exceptions.rst b/docs/root/api_ref/http/exceptions.rst deleted file mode 100644 index 7d49f024..00000000 --- a/docs/root/api_ref/http/exceptions.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.http.exceptions -=========================================================== - -.. automodule:: axonius_api_client.http.exceptions - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/http/init.rst b/docs/root/api_ref/http/init.rst deleted file mode 100644 index cb7ef572..00000000 --- a/docs/root/api_ref/http/init.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.http -=========================================================== - -.. automodule:: axonius_api_client.http - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/http/toc.rst b/docs/root/api_ref/http/toc.rst deleted file mode 100644 index ca2a19dd..00000000 --- a/docs/root/api_ref/http/toc.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.http package -=========================================================== - -.. toctree:: - - init.rst - exceptions.rst - urlparser.rst diff --git a/docs/root/api_ref/http/urlparser.rst b/docs/root/api_ref/http/urlparser.rst deleted file mode 100644 index 5c341d99..00000000 --- a/docs/root/api_ref/http/urlparser.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.http.urlparser -=========================================================== - -.. automodule:: axonius_api_client.http.urlparser - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/pkg/constants.rst b/docs/root/api_ref/pkg/constants.rst deleted file mode 100644 index 250f9717..00000000 --- a/docs/root/api_ref/pkg/constants.rst +++ /dev/null @@ -1,7 +0,0 @@ -axonius_api_client.constants -=========================================================== - -.. automodule:: axonius_api_client.constants - :members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/pkg/exceptions.rst b/docs/root/api_ref/pkg/exceptions.rst deleted file mode 100644 index 68becb93..00000000 --- a/docs/root/api_ref/pkg/exceptions.rst +++ /dev/null @@ -1,7 +0,0 @@ -axonius_api_client.exceptions -=========================================================== - -.. automodule:: axonius_api_client.exceptions - :members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/pkg/init.rst b/docs/root/api_ref/pkg/init.rst deleted file mode 100644 index 1615fa23..00000000 --- a/docs/root/api_ref/pkg/init.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client -=========================================================== - -.. automodule:: axonius_api_client - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/pkg/toc.rst b/docs/root/api_ref/pkg/toc.rst deleted file mode 100644 index 4d548245..00000000 --- a/docs/root/api_ref/pkg/toc.rst +++ /dev/null @@ -1,10 +0,0 @@ -axonius_api_client package -=========================================================== - -.. toctree:: - - init.rst - exceptions.rst - constants.rst - version.rst - tools.rst diff --git a/docs/root/api_ref/pkg/tools.rst b/docs/root/api_ref/pkg/tools.rst deleted file mode 100644 index 6daa8057..00000000 --- a/docs/root/api_ref/pkg/tools.rst +++ /dev/null @@ -1,8 +0,0 @@ -axonius_api_client.tools -=========================================================== - -.. automodule:: axonius_api_client.tools - :members: - :special-members: - :private-members: - :member-order: bysource diff --git a/docs/root/api_ref/pkg/version.rst b/docs/root/api_ref/pkg/version.rst deleted file mode 100644 index 49fd7318..00000000 --- a/docs/root/api_ref/pkg/version.rst +++ /dev/null @@ -1,18 +0,0 @@ -axonius_api_client.version -=========================================================== - -.. automodule:: axonius_api_client.version - :members: - :special-members: - :private-members: - :member-order: bysource - -.. autodata:: axonius_api_client.version.__version__ -.. autodata:: axonius_api_client.version.__url__ -.. autodata:: axonius_api_client.version.__author__ -.. autodata:: axonius_api_client.version.__title__ -.. autodata:: axonius_api_client.version.__project__ -.. autodata:: axonius_api_client.version.__author_email__ -.. autodata:: axonius_api_client.version.__description__ -.. autodata:: axonius_api_client.version.__license__ -.. autodata:: axonius_api_client.version.__copyright__ diff --git a/docs/root/api_ref/toc.rst b/docs/root/api_ref/toc.rst deleted file mode 100644 index 47fb84f1..00000000 --- a/docs/root/api_ref/toc.rst +++ /dev/null @@ -1,9 +0,0 @@ -API Reference -############################################### - -.. toctree:: - - pkg/toc.rst - api/toc.rst - auth/toc.rst - http/toc.rst diff --git a/docs/root/todo.rst b/docs/root/todo.rst index fc20f2c4..46b965b2 100644 --- a/docs/root/todo.rst +++ b/docs/root/todo.rst @@ -2,33 +2,14 @@ Todo Items ############################################### -Sprint schedule is every 3 weeks. Look for FUTURE for todo items. +Sprint schedule is every 6 weeks. Look for FUTURE for todo items. -1.0.1 +2.0.1 ============================================= - -* :teal:`release`: Initial release -* :green:`docs`: Add tags to version reference links - -1.0.2 -============================================= -* :blue:`api`: api.Adapters flush out. -* :green:`docs`: Add API examples. -* :blue:`api`: skip fields on error arg -* :blue:`api`: better fields cache impl - -1.0.3 -============================================= -* :blue:`shell`: Add argparser and shell scripts. * :green:`docs`: Add shell examples. +* :green:`docs`: Add API examples. -1.0.4 +2.1.0 ============================================= * :blue:`api`: api.Enforcements flush out. * :blue:`rest_api`: Flush out enforcement routes in REST API server. -* :green:`docs`: Document REST API workflows. - -1.0.5 -============================================= -* :blue:`api`: api.Actions flush out. -* :blue:`api`: api.Alerts flush out. diff --git a/examples/generate_fields.py b/examples/generate_fields.py deleted file mode 100755 index d1d66741..00000000 --- a/examples/generate_fields.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -"""Output all of the fields for all of the adapters in Axonius.""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import json -import logging -import os -import sys - -import axonius_api_client - -if "AX_URL" not in os.environ: - print("You must set the AX_URL environment variable!") - sys.exit(1) - -if "AX_KEY" not in os.environ: - print("You must set the AX_KEY environment variable!") - sys.exit(1) - -if "AX_SECRET" not in os.environ: - print("You must set the AX_SECRET environment variable!") - sys.exit(1) - -AX_URL = os.environ["AX_URL"] -AX_KEY = os.environ["AX_KEY"] -AX_SECRET = os.environ["AX_SECRET"] - - -def jdump(obj): - """JSON dump utility.""" - print(json.dumps(obj, indent=2)) - - -def gen_fields(fields): - """Generate a dict of all of the generic and adapter specific fields.""" - ret = { - adapter: [field["name"] for field in fields] - for adapter, fields in fields["specific"].items() - } - ret.update({"generic": [field["name"] for field in fields["generic"]]}) - return ret - - -logclient = False -loglevel = "warning" -logfile = None - -level_client = logging.DEBUG if logclient else logging.WARNING - -if loglevel == "debug": - logfmt = "%(levelname)-8s [%(name)s:%(funcName)s()] %(message)s" -else: - logfmt = "%(levelname)-8s %(message)s" - -level = getattr(logging, loglevel.upper()) -logging.basicConfig(format=logfmt, level=level, filename=logfile) - -http = axonius_api_client.http.HttpClient(url=AX_URL) -http._log.setLevel(level_client) - -auth = axonius_api_client.auth.AuthKey(http_client=http, key=AX_KEY, secret=AX_SECRET) - -auth.login() - -users = axonius_api_client.api.Users(auth=auth) -devices = axonius_api_client.api.Devices(auth=auth) -# enforcements = axonius_api_client.api.Enforcements(auth=auth) -# adapters = axonius_api_client.api.Adapters(auth=auth) -# actions = axonius_api_client.api.Actions(auth=auth) - -all_fields = { - "users": gen_fields(fields=users.get_fields()), - "devices": gen_fields(fields=devices.get_fields()), -} - -jdump(all_fields) diff --git a/requirements-lint.txt b/requirements-lint.txt index 69e03ef0..299dd50f 100644 --- a/requirements-lint.txt +++ b/requirements-lint.txt @@ -1,3 +1,5 @@ black; python_version > '3.6' flake8 bandit +pydocstyle +isort diff --git a/requirements.txt b/requirements.txt index 1d6130b9..aea4f8d8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,4 @@ requests[security,socks] +python-dotenv +python-dateutil +click diff --git a/setup.py b/setup.py index 4648883e..655a9c44 100644 --- a/setup.py +++ b/setup.py @@ -1,16 +1,15 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- """Package setup.""" -import os - import codecs +import os -from setuptools import setup -from setuptools import find_packages - +from setuptools import find_packages, setup +PROJECT = "axonius_api_client" +SHELL_CMD = "axonshell" HERE = os.path.abspath(os.path.dirname(__file__)) -VERSION_PATH = os.path.join(HERE, "axonius_api_client", "version.py") +VERSION_PATH = os.path.join(HERE, PROJECT, "version.py") ABOUT = {} @@ -36,7 +35,12 @@ package_data={"": ["LICENSE"]}, include_package_data=True, python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - install_requires=["requests[security,socks]"], + install_requires=[ + "requests[security,socks]", + "click", + "python-dotenv", + "python-dateutil", + ], keywords=["Axonius", "API Library"], tests_require=["pytest", "pytest-cov", "pytest-httpbin", "coverage"], license=ABOUT["__license__"], @@ -53,4 +57,5 @@ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", ], + entry_points={"console_scripts": ["{}={}.cli:cli".format(SHELL_CMD, PROJECT)]}, )