From d45eee42d6ae81c88b51a79ddb4945fc05db679d Mon Sep 17 00:00:00 2001 From: Artem Rys Date: Thu, 16 Feb 2023 16:14:27 +0100 Subject: [PATCH 1/2] feat: migrate from helmut to splunksplwrapper (#759) * feat: migrate from helmut to splunksplwrapper BREAKING CHANGE: migrate from helmut to splunksplwrapper * ci: some minor chagnes for CI * ci: update agreements workflow --- .github/workflows/agreements.yaml | 2 +- .github/workflows/build-test-release.yml | 12 +- .gitignore | 2 +- poetry.lock | 25 +- pyproject.toml | 2 +- pytest_splunk_addon/helmut/__init__.py | 23 - pytest_splunk_addon/helmut/app/__init__.py | 214 --- .../helmut/connector/__init__.py | 27 - pytest_splunk_addon/helmut/connector/base.py | 155 --- .../helmut/connector/httplib2_handler.py | 82 -- pytest_splunk_addon/helmut/connector/rest.py | 471 ------- pytest_splunk_addon/helmut/connector/sdk.py | 271 ---- .../helmut/exceptions/__init__.py | 90 -- .../helmut/exceptions/command_execution.py | 64 - .../helmut/exceptions/confs.py | 52 - .../helmut/exceptions/search.py | 28 - pytest_splunk_addon/helmut/exceptions/wait.py | 52 - pytest_splunk_addon/helmut/log/__init__.py | 98 -- pytest_splunk_addon/helmut/log/logging.conf | 57 - .../helmut/manager/__init__.py | 36 - .../helmut/manager/confs/__init__.py | 111 -- .../helmut/manager/confs/conf.py | 93 -- .../helmut/manager/confs/rest/__init__.py | 136 -- .../helmut/manager/confs/rest/conf.py | 193 --- .../helmut/manager/confs/rest/stanza.py | 88 -- .../helmut/manager/confs/sdk/__init__.py | 81 -- .../helmut/manager/confs/sdk/conf.py | 88 -- .../helmut/manager/confs/sdk/stanza.py | 89 -- .../helmut/manager/confs/stanza.py | 101 -- .../helmut/manager/indexes/__init__.py | 114 -- .../helmut/manager/indexes/index.py | 52 - .../helmut/manager/indexes/rest/__init__.py | 254 ---- .../helmut/manager/indexes/rest/index.py | 146 -- .../helmut/manager/indexes/sdk/__init__.py | 65 - .../helmut/manager/indexes/sdk/index.py | 143 -- .../helmut/manager/jobs/__init__.py | 83 -- .../helmut/manager/jobs/job.py | 108 -- .../helmut/manager/jobs/rest/__init__.py | 235 ---- .../helmut/manager/jobs/rest/job.py | 322 ----- .../helmut/manager/jobs/results.py | 269 ---- .../helmut/manager/jobs/sdk/__init__.py | 54 - .../helmut/manager/jobs/sdk/job.py | 272 ---- pytest_splunk_addon/helmut/manager/object.py | 35 - .../helmut/manager/roles/__init__.py | 119 -- .../helmut/manager/roles/role.py | 38 - .../helmut/manager/roles/sdk/__init__.py | 90 -- .../helmut/manager/roles/sdk/role.py | 64 - .../helmut/manager/saved_searches/__init__.py | 91 -- .../manager/saved_searches/saved_search.py | 74 - .../manager/saved_searches/sdk/__init__.py | 101 -- .../saved_searches/sdk/saved_search.py | 105 -- .../helmut/manager/users/__init__.py | 111 -- .../helmut/manager/users/sdk/__init__.py | 91 -- .../helmut/manager/users/sdk/user.py | 60 - .../helmut/manager/users/user.py | 54 - pytest_splunk_addon/helmut/misc/__init__.py | 20 - pytest_splunk_addon/helmut/misc/collection.py | 62 - .../helmut/misc/manager_utils.py | 33 - pytest_splunk_addon/helmut/splunk/__init__.py | 24 - pytest_splunk_addon/helmut/splunk/base.py | 672 --------- pytest_splunk_addon/helmut/splunk/cloud.py | 302 ---- pytest_splunk_addon/helmut/util/Constants.py | 196 --- pytest_splunk_addon/helmut/util/__init__.py | 22 - .../helmut/util/action_writer.py | 127 -- .../helmut/util/alerthelper.py | 503 ------- pytest_splunk_addon/helmut/util/attrdict.py | 21 - .../helmut/util/basefileutils.py | 69 - pytest_splunk_addon/helmut/util/rest_uris.py | 125 -- pytest_splunk_addon/helmut/util/restutils.py | 110 -- pytest_splunk_addon/helmut/util/rip.py | 579 -------- .../helmut/util/searchhelpers.py | 952 ------------- .../helmut/util/string_unicode_convert.py | 31 - pytest_splunk_addon/helmut/util/ymlparser.py | 102 -- pytest_splunk_addon/helmut_lib/SearchUtil.py | 1219 ----------------- pytest_splunk_addon/helmut_lib/__init__.py | 16 - pytest_splunk_addon/splunk.py | 15 +- .../fields_tests/test_templates.py | 4 +- pytest_splunk_addon/tools/cim_field_report.py | 16 +- 78 files changed, 39 insertions(+), 11074 deletions(-) delete mode 100644 pytest_splunk_addon/helmut/__init__.py delete mode 100644 pytest_splunk_addon/helmut/app/__init__.py delete mode 100644 pytest_splunk_addon/helmut/connector/__init__.py delete mode 100644 pytest_splunk_addon/helmut/connector/base.py delete mode 100644 pytest_splunk_addon/helmut/connector/httplib2_handler.py delete mode 100644 pytest_splunk_addon/helmut/connector/rest.py delete mode 100644 pytest_splunk_addon/helmut/connector/sdk.py delete mode 100644 pytest_splunk_addon/helmut/exceptions/__init__.py delete mode 100644 pytest_splunk_addon/helmut/exceptions/command_execution.py delete mode 100644 pytest_splunk_addon/helmut/exceptions/confs.py delete mode 100644 pytest_splunk_addon/helmut/exceptions/search.py delete mode 100644 pytest_splunk_addon/helmut/exceptions/wait.py delete mode 100644 pytest_splunk_addon/helmut/log/__init__.py delete mode 100644 pytest_splunk_addon/helmut/log/logging.conf delete mode 100644 pytest_splunk_addon/helmut/manager/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/conf.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/rest/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/rest/conf.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/rest/stanza.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/sdk/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/sdk/conf.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/sdk/stanza.py delete mode 100644 pytest_splunk_addon/helmut/manager/confs/stanza.py delete mode 100644 pytest_splunk_addon/helmut/manager/indexes/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/indexes/index.py delete mode 100644 pytest_splunk_addon/helmut/manager/indexes/rest/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/indexes/rest/index.py delete mode 100644 pytest_splunk_addon/helmut/manager/indexes/sdk/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/indexes/sdk/index.py delete mode 100644 pytest_splunk_addon/helmut/manager/jobs/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/jobs/job.py delete mode 100644 pytest_splunk_addon/helmut/manager/jobs/rest/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/jobs/rest/job.py delete mode 100644 pytest_splunk_addon/helmut/manager/jobs/results.py delete mode 100644 pytest_splunk_addon/helmut/manager/jobs/sdk/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/jobs/sdk/job.py delete mode 100644 pytest_splunk_addon/helmut/manager/object.py delete mode 100644 pytest_splunk_addon/helmut/manager/roles/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/roles/role.py delete mode 100644 pytest_splunk_addon/helmut/manager/roles/sdk/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/roles/sdk/role.py delete mode 100644 pytest_splunk_addon/helmut/manager/saved_searches/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/saved_searches/saved_search.py delete mode 100644 pytest_splunk_addon/helmut/manager/saved_searches/sdk/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/saved_searches/sdk/saved_search.py delete mode 100644 pytest_splunk_addon/helmut/manager/users/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/users/sdk/__init__.py delete mode 100644 pytest_splunk_addon/helmut/manager/users/sdk/user.py delete mode 100644 pytest_splunk_addon/helmut/manager/users/user.py delete mode 100644 pytest_splunk_addon/helmut/misc/__init__.py delete mode 100644 pytest_splunk_addon/helmut/misc/collection.py delete mode 100644 pytest_splunk_addon/helmut/misc/manager_utils.py delete mode 100644 pytest_splunk_addon/helmut/splunk/__init__.py delete mode 100644 pytest_splunk_addon/helmut/splunk/base.py delete mode 100644 pytest_splunk_addon/helmut/splunk/cloud.py delete mode 100644 pytest_splunk_addon/helmut/util/Constants.py delete mode 100644 pytest_splunk_addon/helmut/util/__init__.py delete mode 100644 pytest_splunk_addon/helmut/util/action_writer.py delete mode 100644 pytest_splunk_addon/helmut/util/alerthelper.py delete mode 100644 pytest_splunk_addon/helmut/util/attrdict.py delete mode 100644 pytest_splunk_addon/helmut/util/basefileutils.py delete mode 100644 pytest_splunk_addon/helmut/util/rest_uris.py delete mode 100644 pytest_splunk_addon/helmut/util/restutils.py delete mode 100644 pytest_splunk_addon/helmut/util/rip.py delete mode 100644 pytest_splunk_addon/helmut/util/searchhelpers.py delete mode 100644 pytest_splunk_addon/helmut/util/string_unicode_convert.py delete mode 100644 pytest_splunk_addon/helmut/util/ymlparser.py delete mode 100644 pytest_splunk_addon/helmut_lib/SearchUtil.py delete mode 100644 pytest_splunk_addon/helmut_lib/__init__.py diff --git a/.github/workflows/agreements.yaml b/.github/workflows/agreements.yaml index 572b02d86..4e10b4d00 100644 --- a/.github/workflows/agreements.yaml +++ b/.github/workflows/agreements.yaml @@ -7,7 +7,7 @@ on: jobs: call-workflow-agreements: - uses: splunk/addonfactory-github-workflows/.github/workflows/reusable-agreements.yaml@v1.2.1 + uses: splunk/addonfactory-github-workflows/.github/workflows/reusable-agreements.yaml@v1.3 secrets: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PAT_CLATOOL }} diff --git a/.github/workflows/build-test-release.yml b/.github/workflows/build-test-release.yml index 3af444d01..655314064 100644 --- a/.github/workflows/build-test-release.yml +++ b/.github/workflows/build-test-release.yml @@ -82,15 +82,6 @@ jobs: uses: edplato/trufflehog-actions-scan@v0.9j-beta with: scanArguments: "--max_dept 50 -x .github/workflows/exclude-patterns.txt" - - review-dog-misspell: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Run misspell with reviewdog - uses: reviewdog/action-misspell@v1.12 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} test-splunk-unit: name: Unit tests @@ -141,11 +132,10 @@ jobs: - test-splunk-doc - test-splunk-unit - review_secrets - - review-dog-misspell strategy: fail-fast: false matrix: - splunk-version: ["8.1", "9.0"] + splunk-version: ["8.1", "8.2", "9.0"] steps: - uses: actions/checkout@v3 with: diff --git a/.gitignore b/.gitignore index 2f9477f33..5b5acd244 100644 --- a/.gitignore +++ b/.gitignore @@ -23,7 +23,7 @@ coverage.xml .hypothesis/ .pytest_cache pytest_splunk_addon.log -helmut.log +splunksplwrapper.log !tests/e2e/addons/*/.log docs/_build/ /site/ diff --git a/poetry.lock b/poetry.lock index 3fad87ba3..cbe54abb8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -291,14 +291,14 @@ files = [ [[package]] name = "httplib2" -version = "0.20.4" +version = "0.21.0" description = "A comprehensive HTTP client library." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "httplib2-0.20.4-py3-none-any.whl", hash = "sha256:8b6a905cb1c79eefd03f8669fd993c36dc341f7c558f056cb5a33b5c2f458543"}, - {file = "httplib2-0.20.4.tar.gz", hash = "sha256:58a98e45b4b1a48273073f905d2961666ecf0fbac4250ea5b47aef259eb5c585"}, + {file = "httplib2-0.21.0-py3-none-any.whl", hash = "sha256:987c8bb3eb82d3fa60c68699510a692aa2ad9c4bd4f123e51dfb1488c14cdd01"}, + {file = "httplib2-0.21.0.tar.gz", hash = "sha256:fc144f091c7286b82bec71bdbd9b27323ba709cc612568d3000893bfd9cb4b34"}, ] [package.dependencies] @@ -977,6 +977,23 @@ files = [ {file = "splunk-sdk-1.7.2.tar.gz", hash = "sha256:36d27f83e7cc8309be6e3b92e46c547caea566b0c006238025073843e586e5a8"}, ] +[[package]] +name = "splunksplwrapper" +version = "1.0.1" +description = "Package to interact with Splunk" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "splunksplwrapper-1.0.1-py3-none-any.whl", hash = "sha256:3ba28266a6a093d2c39ccdf56edbd6528a84abebd0b7ff23cf24244a43a8f14d"}, + {file = "splunksplwrapper-1.0.1.tar.gz", hash = "sha256:d6102ab03fea642ed445adf38f11a151668e07a1a2dd5f2c1ff62d8c5626414e"}, +] + +[package.dependencies] +defusedxml = ">=0.7.1,<0.8.0" +httplib2 = ">=0.21.0,<0.22.0" +splunk-sdk = ">=1.6.18,<2.0.0" + [[package]] name = "tomli" version = "2.0.1" @@ -1072,4 +1089,4 @@ docker = ["lovely-pytest-docker"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "06e038cc695f7b3da4ff5b691deea02590f8f7d4cc20804254f4e5308276cda5" +content-hash = "8036aff2674aa29d7cbc8da9abc8930cf9d1e68afe8e8c457db62510b1535208" diff --git a/pyproject.toml b/pyproject.toml index b517943e8..d54b58392 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,6 @@ classifiers = [ include = ["pytest_splunk_addon/**/*.json", "pytest_splunk_addon/**/*.txt"] [tool.poetry.dependencies] -httplib2 = "<=0.20.4" python = "^3.7" pytest = ">5.4.0,<7.3" splunk-sdk = "^1.6" @@ -48,6 +47,7 @@ defusedxml = "^0.7.1" Faker = "^13.12.0" xmltodict = "^0.13.0" xmlschema = "^1.11.3" +splunksplwrapper = "^1.0.1" [tool.poetry.extras] docker = ['lovely-pytest-docker'] diff --git a/pytest_splunk_addon/helmut/__init__.py b/pytest_splunk_addon/helmut/__init__.py deleted file mode 100644 index 7823cd273..000000000 --- a/pytest_splunk_addon/helmut/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from . import log - -log.setup_logger(debug=True) diff --git a/pytest_splunk_addon/helmut/app/__init__.py b/pytest_splunk_addon/helmut/app/__init__.py deleted file mode 100644 index 7674be32b..000000000 --- a/pytest_splunk_addon/helmut/app/__init__.py +++ /dev/null @@ -1,214 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Summary -======= -A module which deals with a Splunk app -""" - -import logging -import os -from builtins import object - -from pytest_splunk_addon.helmut.manager.confs import Confs - - -class App(object): - """ - A representation of a Splunk Application - - @ivar __logger: The logger we use - @ivar _required_indexes: Sub classes can indexes to this list and they will - be added as part of the setup. The indexes must be - know by the filegetter.py, the format of each - entry is a dictionary with the same parameters as - the filegetter.get_file method. - @ivar _required_configs: A list of paths to config files that should be - copied to the app's local config directory as part - of the setup - @ivar _required_lookups: A list of paths to lookup files that should be - copied to the app's lookup directory as part of - the setup - @ivar _name: The name of this app - @ivar _splunk: The splunk instance this app belongs to - @ivar _config_manager: The config manager for this app - @ivar _shared_service: The shared service for this app. - @ivar package: The package this app will be installed from or None if not - package exists. - """ - - DEFAULT_NAMESPACE = "nobody:{app_name}" - - def __init__(self, name, splunk, package=None): - """ - Constructs this app - - @param name: The name of the app, should be all lower case. - @type name: str - - @param splunk: The splunk instance this app belongs to. - @type splunk: Splunk - - @param package: An optional path to any package this app can be - installed from - @type package: str - """ - super(App, self).__init__() - - self.__logger = logging.getLogger("App-{0}".format(name)) - - self._name = name - self._splunk = splunk - self._confs = None - - self.package = package - - @property - def confs(self): - """ - The confs manager that is used for this app. - - The manager will have a namespace that has the app portion set to this - app. - - @rtype: L{Confs} - """ - if self._confs is None: - self._confs = self._create_confs_manager() - return self._confs - - def _create_confs_manager(self): - """ - Creates a confs manager for this app. - - It uses the same connector factory as the Splunk instance. - - @return: The newly created confs manager. - @rtype: L{Confs} - """ - return Confs(self.splunk.default_connector) - - @property - def namespace(self): - """ - The namespace for this app. - - @rtype: str - """ - return self.DEFAULT_NAMESPACE.format(app_name=self._name) - - @confs.setter - def confs(self, value): - """ - Updates the confs manager for this app. - - @param value: The new manager. - @type value: L{Confs} - """ - self._confs = value - - @property - def installed(self): - """ - Checks too see whether this app is already installed or not. - - It does this by checking if the directory exists which means that there - is no guarantee that it was installed this session. - - @rtype: bool - """ - return self.splunk.has_app(self.name) - - @property - def name(self): - """ - The name for this app - - @rtype: str - """ - return self._name - - @property - def splunk(self): - """ - The Splunk instance this app belongs to - - @rtype: L{Splunk} - """ - return self._splunk - - @property - def apps_dir(self): - """ - The path to the directory that splunk stores it's apps - - @rtype: str - """ - return self.splunk.apps_dir - - @property - def install_path(self): - """ - The path to the directory where this app will be/is installed - - @rtype: str - """ - return os.path.join(self.apps_dir, self.name) - - def can_install(self): - """ - Checks if this app can be installed meaning if a package has been - supplied. - - @rtype: bool - @return: True if this app can be installed - """ - return self.package is not None - - def install(self): - """ - Installs this app. - - @rtype: bool - @return: True if the app was installed and splunk needs to restart - """ - self._verify_can_install() - return self.splunk.install_app(self.name, self.package) - - def _verify_can_install(self): - """ - Checks that this app can be installed and raising an exception if it - can't. - - @raise AppHasNoPackage: If the app can't be installed. - """ - if not self.package: - raise AppHasNoPackage(self.name) - - def uninstall(self): - """ - Uninstalls this app - - @rtype: bool - @return: True if the app was installed and has now been removed - """ - return self.splunk.uninstall_app(self.name) - - -class AppHasNoPackage(RuntimeError): - def __init__(self, app_name): - msg = "The app {0} has no package to install from".format(app_name) - super(AppHasNoPackage, self).__init__(msg) diff --git a/pytest_splunk_addon/helmut/connector/__init__.py b/pytest_splunk_addon/helmut/connector/__init__.py deleted file mode 100644 index a1d99c0aa..000000000 --- a/pytest_splunk_addon/helmut/connector/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Module for handling generic connections with a Splunk instance. - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-21 -""" - -__all__ = ["sdk", "rest"] - -from .rest import RESTConnector -from .sdk import SDKConnector diff --git a/pytest_splunk_addon/helmut/connector/base.py b/pytest_splunk_addon/helmut/connector/base.py deleted file mode 100644 index 97bde7f13..000000000 --- a/pytest_splunk_addon/helmut/connector/base.py +++ /dev/null @@ -1,155 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Module for handling generic connections with a Splunk instance. - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-21 -""" - -from abc import ABCMeta -from builtins import range -from builtins import str - -from future.utils import with_metaclass - -from pytest_splunk_addon.helmut.log import Logging - - -class Connector(with_metaclass(ABCMeta, Logging)): - """ - A connector is an object that handles connections with Splunk. - - This is the abstract base class for all connectors. - - @cvar DEFAULT_USERNAME: The username that will be used if a username is not - explicitly specified. - @cvar DEFAULT_PASSWORD: The password that will be used if a password is not - explicitly specified. - @cvar DEFAULT_OWNER: The owner that will be used if an owner is not - explicitly specified. - @cvar DEFAULT_APP: The app that will be used if an app is not - explicitly specified. - @ivar _splunk: The Splunk instance associated with this connector. - @ivar _username: The username that this connector uses. - @ivar _password: The password that this connector uses. - @ivar _owner: The owner that this connector uses. - @ivar _app: The app that this connector uses. - """ - - DEFAULT_USERNAME = "admin" - DEFAULT_PASSWORD = "changeme" - DEFAULT_OWNER = "nobody" - DEFAULT_APP = "system" - - # types of connectors - (SDK, REST) = list(range(0, 2)) - - def __init__(self, splunk, username=None, password=None, owner=None, app=None): - """ - Creates a new Connector instance. - - The namespace needs to be in the : format. - - @param splunk: The Splunk object we are communicating with. - @type splunk: L{Splunk<..splunk.Splunk>} - @param username: The username to use (or None for default) - @type username: str - @param password: The password to use (or None for default) - @type password: str - @param owner: The owner to use (or None for default) - @type owner: str - @param app: The app to use (or None for default) - @type app: str - """ - self._splunk = splunk - self._username = username or self.DEFAULT_USERNAME - self._password = password or self.DEFAULT_PASSWORD - self._owner = owner or self.DEFAULT_OWNER - self._app = app or self.DEFAULT_APP - self._attempt_login_time = 0 - Logging.__init__(self) - - @property - def splunk(self): - """ - The Splunk object associated with this connector. - - @rtype: L{Splunk<..splunk.Splunk>} - """ - return self._splunk - - @property - def username(self): - """ - The username for this connector. - - @rtype: str - """ - return self._username - - @username.setter - def username(self, value): - """ - Setter for the username property - """ - self._username = value - - @property - def password(self): - """ - The password for this connector. - - @rtype: str - """ - return self._password - - @password.setter - def password(self, value): - """ - Setter for the password property - """ - self._password = value - - @property - def namespace(self): - """ - The namespace for this connector. - - Will be in the format : - - @rtype: str - """ - return str(self._owner) + ":" + str(self._app) - - @property - def owner(self): - """ - The owner for this connector. - - @rtype: str - """ - return self._owner - - @property - def app(self): - """ - The app for this connector. - - @rtype: str - """ - return self._app diff --git a/pytest_splunk_addon/helmut/connector/httplib2_handler.py b/pytest_splunk_addon/helmut/connector/httplib2_handler.py deleted file mode 100644 index 720833f5a..000000000 --- a/pytest_splunk_addon/helmut/connector/httplib2_handler.py +++ /dev/null @@ -1,82 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from future import standard_library - -standard_library.install_aliases() -from builtins import range -import httplib2 -import logging -import time -import random -from io import BytesIO -from http.client import ResponseNotReady - -RETRIES = 3 - - -def sdk_request_adapter(url, message, **kwargs): - """ - :param url: the URL to make the request to (including any query and fragment sections) - :param message: a dictionary with the following keys: - - method: The method for the request, typically ``GET``, ``POST``, or ``DELETE``. - - headers: A list of pairs specifying the HTTP headers (for example: ``[('key': value), ...]``). - - body: A string containing the body to send with the request (this string - should default to ''). - :param kwargs: not used - :return: response_dict, a dictionary with the following keys: - - status: An integer containing the HTTP status code (such as 200 or 404). - - reason: The reason phrase, if any, returned by the server. - - headers: A list of pairs containing the response headers (for example, ``[('key': value), ...]``). - - body: A stream-like object supporting ``read(size=None)`` and ``close()`` methods to get the body of the response. - """ - method = message.get("method", "GET").upper() - body = message.get("body", "") if method == "POST" else None - headers = dict(message.get("headers", [])) - h = httplib2.Http(disable_ssl_certificate_validation=True) - for i in range(RETRIES): - try: - resp, content = h.request(url, method=method, body=body, headers=headers) - break - except ResponseNotReady: - # splunk restart is still in progress - time.sleep(30) - except Exception as ex: # noqa: E722 - logging.getLogger("..connector").exception( - "request failed, url=%s, attempts=%s", url, i + 1 - ) - if i == RETRIES - 1 or not _is_retry_safe(method, url): - raise - else: - # intermediate network error - time.sleep(random.randint(5, 17)) - - return { - "status": resp.status, - "reason": resp.reason, - "headers": resp, - "body": BytesIO(content), - } - - -def _is_retry_safe(method, url): - if method in ("GET", "HEAD", "DELETE", "PUT"): - # idempotent request - return True - elif method == "POST": - for rest_url in ["/auth/login"]: - if rest_url in url: - return True - return False diff --git a/pytest_splunk_addon/helmut/connector/rest.py b/pytest_splunk_addon/helmut/connector/rest.py deleted file mode 100644 index 61e8340a2..000000000 --- a/pytest_splunk_addon/helmut/connector/rest.py +++ /dev/null @@ -1,471 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Sagar Bhatnagar -""" - -from future import standard_library - -standard_library.install_aliases() -from builtins import str -from .base import Connector -import urllib.request, urllib.parse, urllib.error -import httplib2 -from pytest_splunk_addon.helmut.exceptions import AuthenticationError -import json -from defusedxml import cElementTree as ET -from xml.dom.minidom import parseString -import time -import six - - -class RESTConnector(Connector): - """ - This represents workaround to access REST thru HTTP client library httplib2 - - Associated with each object is a C{http} object from the httplib which in - turn contains connection info, namespace and auth. - - When a connector is logged in a sessionkey is generated and will be kept - until the point that you logout or the server is restarted. - - When Splunk is restarted the connector I{tries} to login again - - @ivar _service: The underlying service, aka the http request object - @cvar HEADERS: The default headers to pass with http request. this will - get appended with the 'Authorization' key when sessionkey is used - - """ - - HEADERS = {"content-type": "text/xml; charset=utf-8"} - METHODS = ["GET", "POST", "PUT", "DELETE"] - SUCCESS = {"GET": "200", "POST": "201", "DELETE": "200", "PUT": "200"} - - DEFAULT_OWNER = "admin" - DEFAULT_APP = ( - "search" # defaulting it search app for the case when app is not passed. - ) - - def __init__(self, splunk, username=None, password=None, app=None, owner=None): - """ - Creates a new REST connector. - - The connector will logged in when created with default values - - @param splunk: The Splunk instance - @type splunk: L{..splunk.Splunk} - @param username: The username to use. If None (default) - L{Connector.DEFAULT_USERNAME} is used. - @type username: str - @param password: The password to use. If None (default) - L{Connector.DEFAULT_PASSWORD} is used. - @type password: str - @param app: The app to use.This will construct namespace : - @type app: str - @param app: The owner to use.This will construct namespace : - @type app: str - - """ - super(RESTConnector, self).__init__( - splunk, - username=username, - password=password, - owner=owner, - app=app, - ) - self.uri_base = splunk.uri_base() - self._timeout = 60 - self._debug_level = 0 - self._disable_ssl_certificate = True - self._follow_redirects = False - httplib2.debuglevel = self._debug_level - self.sessionkey = None - self._service = httplib2.Http( - timeout=self._timeout, - disable_ssl_certificate_validation=self._disable_ssl_certificate, - ) - self._service.follow_redirects = self._follow_redirects - self._service.add_credentials(self._username, self._password) - - splunk.register_start_listener(self) - - @property - def namespace(self): - """ - The namespace for this RESTconnector. - - Will be in the format // - - @rtype: str - """ - return "/" + str(self._owner) + "/" + str(self._app) - - def make_request( - self, - method, - uri, - body=None, - urlparam=None, - use_sessionkey=False, - log_response=True, - ): - """ - Make a HTTP request to an endpoint - - @type method: string - @param method: HTTP valid methods: PUT, GET, POST, DELETE - @type uri: string - @param uri: URI of the REST endpoint - @type body: string or dictionary or a sequence of two-element tuples - @param body: the request body - @type urlparam: string/ dictionary or a sequence of two-element tuples - @param urlparam: the URL parameters - @type use_sessionkey: bool - @param use_sessionkey: toggle for using sessionkey or not - @type log_response: bool - @param log_response: log the response to ..log or not - - >>> conn.make_request('POST', '/services/receivers/simple', - urlparam={'host': 'foo'}, body="my event") - - """ - if body is None: - body = "" - if type(body) != str and type(body) != str: - body = urllib.parse.urlencode(body) - if urlparam is None: - urlparam = "" - if type(urlparam) != str: - urlparam = urllib.parse.urlencode(urlparam) - if urlparam != "": - url = "%s%s?%s" % (self.uri_base, uri, urlparam) - else: - url = "%s%s" % (self.uri_base, uri) - - if use_sessionkey: - self._service.clear_credentials() - self.update_headers("Authorization", "Splunk %s" % self.sessionkey) - else: - if not self._service.credentials.credentials: - self._service.add_credentials(self._username, self._password) - if "Authorization" in self.HEADERS: - self.HEADERS.pop("Authorization") - response, content = self._service.request( - url, method, body=body, headers=self.HEADERS - ) - - self.logger.info( - "Request => {r}".format( - r={ - "method": method, - "url": url, - "body": body, - "auth": "{u}:{p}".format(u=self._username, p=self._password), - "header": self.HEADERS, - } - ) - ) - if log_response: - self.logger.info("Response => {r}".format(r=response)) - self.logger.debug("Content => {c}".format(c=content)) - - return response, content - - def make_requestNS( - self, - method, - uri, - body=None, - urlparam=None, - use_sessionkey=False, - log_response=True, - ): - - """ - Wrapper on top of make_request. For uri, don't use any of /services - or /serviceNS,just pass the endpoint,it will read the namespace from - connector itself - - @type method: string - @param method: HTTP valid methods: PUT, GET, POST, DELETE - @type uri: string - @param uri: URI of the REST endpoint - @type body: string or dictionary or a sequence of two-element tuples - @param body: the request body - @type urlparam: string/ dictionary or a sequence of two-element tuples - @param urlparam: the URL parameters - @type use_sessionkey: bool - @param use_sessionkey: toggle for using sessionkey or not - @type log_response: bool - @param log_response: log the response to ..log or not - - >>> conn.make_requestNS('GET', 'data/outputs/tcp/default') - - """ - uri = "/servicesNS" + self.namespace + uri - response, content = self.make_request( - method, - uri, - body, - urlparam=urlparam, - use_sessionkey=use_sessionkey, - log_response=log_response, - ) - return response, content - - def parse_content_json(self, content): - """ - Parses the content object (in json format) to python dict - - @type content: json - @param content: content object from http request in json format - """ - - return json.loads(str(content)) - - @property - def _service_arguments(self): - """ - The arguments to pass to the Service (httplib in this case). - - If makes sure that they have default values if nothing is specified. - - @rtype: dict - @return: default values for the httplib service - - """ - return { - "username": self._username, - "password": self._password, - "namespace": self.namespace, - "uri_base": self.splunk.uri_base(), - } - - def _recreate_service(self): - """ - Clones the current service with the same values. - - It then tries to log the service in if the old one was logged in. - Called when Splunk starts - """ - _was_logged_in = self._was_logged_in() - service = self._clone_existing_service() - self._service = service - self.uri_base = self._service_arguments["uri_base"] - if _was_logged_in: - try: - self.login() - except AuthenticationError as autherr: - self.logger.warn( - "RESTConnector for username:{username} password:{password}" - " login failed when recreating service. error msg:{error}".format( - username=self.username, - password=self.password, - error=autherr.message, - ) - ) - - def login(self): - """ - Logs the connector in. - - Just hits the auth endpoint and retreives and sets the sessionkey. - - """ - body = urllib.parse.urlencode( - {"username": self._username, "password": self._password} - ) - url = "%s%s" % (self.uri_base, "/services/auth/login") - response, content = self._service.request(url, "POST", body=body) - self._attempt_login_time = time.time() - if response.status != 200: - msg = "Login failed... response status: %s content: %s" % ( - response.status, - content, - ) - self.logger.warn(msg) - raise AuthenticationError(msg) - - root = ET.fromstring(six.ensure_text(content, "utf-8")) - self.sessionkey = root[0].text - if not self._service.credentials.credentials: - self._service.add_credentials(self._username, self._password) - - def _clone_existing_service(self): - """ - clones the existing service - - @return: The newly created service (httplib) http object - @rtype: http object - """ - http = httplib2.Http( - timeout=self._timeout, - disable_ssl_certificate_validation=self._disable_ssl_certificate, - ) - http.follow_redirects = False - http.add_credentials( - self._service_arguments["username"], self._service_arguments["password"] - ) - return http - - def logout(self): - """ - Logs the connector out - - This just unsets the sessionkey. - - """ - if "Authorization" in self.HEADERS: - self.HEADERS.pop("Authorization") - self.sessionkey = None - self._service.clear_credentials() - - def is_logged_in(self): - """ - Checks if the connector is logged in. - - This checks if the sessionkey is set and is not expired. - @return: True if the connector is logged in - @rtype: bool - - """ - if self.sessionkey is None: - return False - elif self._is_session_expired(): - return False - else: - return True - - def _was_logged_in(self): - """ - Checks if the connector was logged in. - - This checks if the sessionkey is set. - :return: - """ - return self.sessionkey is not None - - def _is_session_expired(self): - """ - Checks if the session key is an expired one. - - Hits an endpoint with that key and check response status is 401 - """ - url = "%s%s" % (self.uri_base, "/services/data/outputs/tcp/default") - self._service.clear_credentials() - self.update_headers("Authorization", "Splunk %s" % self.sessionkey) - response, content = self._service.request(url, "GET", headers=self.HEADERS) - self._service.add_credentials(self._username, self._password) - if response["status"] == "401": - self.logger.debug( - "Session is expired for RESTconnector %s:%s" - % (self.username, self.password) - ) - return True - else: - self.logger.debug( - "Session is NOT expired for RESTconnector %s:%s" - % (self.username, self.password) - ) - return False - - def update_headers(self, key=None, value=None): - """ - Appends a key,value pair to the HEADERS - - @type key: str - @param key: key to append to HEADERS - @type value: str - @param value: value for that key to append to HEADERS - - """ - if key in self.HEADERS: - self.HEADERS.pop(key) - self.HEADERS.update({key: value}) - - def debug_level(self, value): - """ - Overrides default value for debug_level for httplib service - - @type value: int - @param value: debugging level - - """ - - self._debug_level = value - - def timeout(self, value): - """ - Overrides default value for timout for http request - - @type value: int - @param value: timeout for the http request in seconds - - """ - - self._timeout = value - - def disable_ssl_certificate(self, value): - """ - Overrides disable sssl certificate condition - - @type value: bool - @param value: enable/disable ssl certificate for auth - - """ - - self._disable_ssl_certificate = value - - def follow_redirects(self, value): - """ - Overrides default value of the follow_redircets - - @type value: bool - @param value: follow redirects - - """ - - self._follow_redirects = value - - def __del__(self): - """ - Called when the object is being deallocated. - - It unregisters itself with the Splunk start listeners. - - """ - self.splunk.unregister_start_listener(self) - - def __call__(self): - """ - Called when the splunk instance class notifies REST connector listener - - Need it as local splunk instance notify method invokes l() and then - service will be recreated and initialized with default values - """ - self._recreate_service() - - def parse_content_xml(self, content, tag): - """ - Parses the content object (in xml format) - - @type content: xml - @param content: content object from http request in xml format - - """ - dom = parseString(content) - xmlTag = dom.getElementsByTagName(tag)[0].toxml() - return xmlTag diff --git a/pytest_splunk_addon/helmut/connector/sdk.py b/pytest_splunk_addon/helmut/connector/sdk.py deleted file mode 100644 index eed65696a..000000000 --- a/pytest_splunk_addon/helmut/connector/sdk.py +++ /dev/null @@ -1,271 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -This module handles connections through the public python SDK. - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-09 -""" -import time - -from splunklib.binding import _NoAuthenticationToken, AuthenticationError -from splunklib.client import Service, Endpoint - -from .base import Connector - - -class SDKConnector(Connector): - """ - This class represents one connection through the public python SDK. - - One connection means one session. - If you have multiple, identical, SDKConnectors it represents having - multiple login from the same user. - - Associated with each object is a C{Service} object from the SDK which in - turn contains connection info, namespace and auth. - - When a connector is logged in an auth token is generated and will be kept - until the point that you logout or the server is restarted. - - The connector reads ports, host and scheme from the given Splunk instance. - When Splunk is restarted the info is read again to reflect any changes - that were made. - - When Splunk is restarted the connector I{tries} to login again if the - connector was logged in before. This might not always work (if you disabled - auth for instance) though. - - The Python SDK, as of version 0.8.0, uses separate parameters to evaluate - the namespace. Therefor the primary parameters to be used are 'owner', 'app' - and 'sharing' instead of namespace. See splunklib.binding for more info. - - @ivar _service: The underlying service - @cvar DEFAULT_SHARING: The default sharing option, as defined by the - Python SDK in splunklib.binding, that will be used - if it isn't specified by the user. - @cvar DEFAULT_OWNER: The default owner that will be used if it isn't - specified by the user. - @cvar DEFAULT_APP: The default app that will be used if it isn't - specified by the user. - """ - - DEFAULT_SHARING = "system" - DEFAULT_HANDLER = None - - # TODO: TEMPORARY FOR EST-1859 - PATH_SERVER_SETTINGS = "server/settings/settings/" - - def __init__( - self, - splunk, - username=None, - password=None, - namespace=None, - sharing=DEFAULT_SHARING, - owner=None, - app=None, - ): - """ - Creates a new connector. - - The connector will not be logged in when created so you have to manually - login. - - @param splunk: The Splunk instance - @type splunk: L{..splunk.Splunk} - @param username: The username to use. If None (default) - L{Connector.DEFAULT_USERNAME} is used. - @type username: str - @param password: The password to use. If None (default) - L{Connector.DEFAULT_PASSWORD} is used. - @type password: str - @param namespace: Deprecated. user owner and app instead. - @type namespace: str - @param sharing: used by python sdk service - @type sharing: str - @param owner: used by python sdk service - @type owner: str - @param app: used by python sdk service - @type app: str - """ - - super(SDKConnector, self).__init__( - splunk, username=username, password=password, owner=owner, app=app - ) - if namespace is not None and namespace != self.namespace: - msg = ( - "namespace is derecated. please use owner and app. " - "Your namespace setting : %s, owner&app setting:%s" - % (namespace, self.namespace) - ) - self.logger.error(msg) - raise Exception(msg) - self.sharing = ( - sharing # accepting None value, so SDK takes owner and app blindly. - ) - - self._service = Service(handler=self.DEFAULT_HANDLER, **self._service_arguments) - splunk.register_start_listener(self._recreate_service) - - # TODO: TEMPORARY FOR EST-1859 - self._server_settings_endpoint = Endpoint( - self._service, self.PATH_SERVER_SETTINGS - ) - - @property - def _service_arguments(self): - """ - The arguments to pass to the Service. - - If makes sure that they have default values if nothing is specified. - - @rtype: dict - """ - return { - "username": self.username, - "password": self.password, - # No longer used by SDK's splunklib.binding since 0.8.0 (beta) - # 'namespace': self.namespace, - "owner": self.owner, - "app": self.app, - "sharing": self.sharing, - "scheme": self.splunk.splunkd_scheme(), - "host": self.splunk.splunkd_host(), - "port": self.splunk.splunkd_port(), - } - - def __del__(self): - """ - Called when the object is being deallocated. - It unregisters itself with the Splunk start listeners. - """ - self.splunk.unregister_start_listener(self._recreate_service) - - def _recreate_service(self): - """ - Clones the current service with the same values. - It then tries to log the service in if the old one was logged in. - - Called when Splunk starts. - """ - self.logger.debug("Recreating and cloning the current Service.") - _was_logged_in = self._was_logged_in() - service = self._clone_existing_service() - self._service = service - if _was_logged_in: - try: - self.login() - except AuthenticationError as autherr: - self.logger.warn( - "SDKConnector for username:{username} password:{password}" - " login failed when recreating service. error msg:{error}".format( - username=self.username, - password=self.password, - error=autherr.message, - ) - ) - - # TODO: TEMPORARY FOR EST-1859 - self._server_settings_endpoint = Endpoint( - self._service, self.PATH_SERVER_SETTINGS - ) - - def _clone_existing_service(self): - """ - Clones the existing service with the exception that it re-reads the - connection info from the Splunk instance. - - @return: The newly created Service - @rtype: Service - """ - return Service(handler=self.DEFAULT_HANDLER, **self._service_arguments) - - @property - def service(self): - """ - The Service that is connected with this connector. - - The Service is the object that comes from the public SDK. - - @rtype: Service - """ - return self._service - - # TODO: TEMPORARY FOR EST-1859 - @property - def server_settings_endpoint(self): - return self._server_settings_endpoint - - def is_logged_in(self): - """ - Checks if the connector is logged in. - - Hits an endpoint and check if AuthenticationError is raised. - - @return: True if the connector is logged in - @rtype: bool - """ - try: - self._service.get("authentication/current-context") - # FAST-8222 - except AuthenticationError as err: - self.logger.debug( - "SDKconnector %s:%s is NOT logged in" % (self.username, self.password) - ) - return False - else: - self.logger.debug( - "SDKconnector %s:%s is logged in" % (self.username, self.password) - ) - return True - - def _was_logged_in(self): - """ - Checks if the connector was logged in. - - This checks if the service.token is set. - :return: - """ - return self._service.token is not _NoAuthenticationToken - - def login(self): - """ - Logs the connector in. - - Just calls the login method on the service object. - - @return: self - @rtype: SDKConnector - """ - self.logger.debug("Logging in the connector.") - self._attempt_login_time = time.time() - self.service.login() - return self - - def logout(self): - """ - Logs the connector out by calling the logout method on the service. - - This in turn just unsets the auth token. - - @return: self - @rtype: SDKConnector - """ - self.logger.debug("Logging out the connector.") - self.service.logout() - return self diff --git a/pytest_splunk_addon/helmut/exceptions/__init__.py b/pytest_splunk_addon/helmut/exceptions/__init__.py deleted file mode 100644 index 59b77e0da..000000000 --- a/pytest_splunk_addon/helmut/exceptions/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from httplib2 import HttpLib2Error - - -class UnsupportedConnectorError(BaseException): - """ - Raised in manager_utils during creation of a Manager when the class type - of the new Manager object is being determined. When mapping the class type - of the connector currently in use to the appropriate and corresponding - subclass of Manager associated with that connector type, this - exception will be raised if the Manager has no subclasses associated with - that connector type. - - Also raised during creation of a connector for a splunk instance if - attempting to create a connector type which isn't supported or doesnt exist - """ - - def __init__(self, message=None): - message = message or "The specified connector is not supported" - super(UnsupportedConnectorError, self).__init__(message) - - -class InvalidFileModeError(BaseException): - """ - When opening a steam to a file, if the file mode is invalid this exception - will be raised. - """ - - def __init__(self, message="The specified file mode is invalid"): - super(InvalidFileModeError, self).__init__(message) - - -class RetrieveError(RuntimeError): - """ - Error raised when retrieving data over SSH. - - NOTE: If this exception is to be related to solely SSH - communication, its name should be changed to reflect this. - """ - - pass - - -class SendError(RuntimeError): - """ - Error raised when sending data over SSH. - - NOTE: If this exception is to be related to solely SSH - communication, its name should be changed to reflect this. - """ - - pass - - -class AuthenticationError(HttpLib2Error): - """ - Raised when a login request to Splunk fails. - """ - - pass - - -class ExpectedExceptionNotRaisedError(BaseException): - """ - Raised when a expected exception is not raised - """ - - def __init__(self, err=None): - """ - - :param err: Expected Exception. - :type err: Exception - :return: - """ - message = "Expected exception not raised: %s" % err - super(ExpectedExceptionNotRaisedError, self).__init__(message) diff --git a/pytest_splunk_addon/helmut/exceptions/command_execution.py b/pytest_splunk_addon/helmut/exceptions/command_execution.py deleted file mode 100644 index ea4d7044f..000000000 --- a/pytest_splunk_addon/helmut/exceptions/command_execution.py +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -class CommandExecutionFailure(RuntimeError): - """ - Generic exception for when a Splunk command fails to execute. - - @ivar command: The command that failed. - @type command: str - @ivar code: The exit code. - @type code: int - @param stdout: The standard output. - @type stdout: str - @ivar stderr: The standard error output. - @type stderr: str - """ - - def __init__(self, command="", code="", stdout="", stderr=""): - # FAST-8061 Custom exceptions are not raised properly when used in Multiprocessing Pool - """ - Creates a new exception. - - @param command: The command that failed. - @type command: str - @param code: The exit code. - @type code: int - @param stderr: The stderr output. - @type stderr: str - """ - self.command = command - self.code = code - self.stderr = stderr - self.stdout = stdout - - super(CommandExecutionFailure, self).__init__(self._error_message) - - @property - def _error_message(self): - """ - The error message for this exception. - - Is built using L{command}, L{code}, L{stdout} and L{stderr}. - - @rtype: str - """ - message = "Command {cmd} returned code {code}.\n" - message += "############\nstdout: {stdout}\n" - message += "############\nstderr: {stderr}" - - return message.format( - cmd=self.command, code=self.code, stdout=self.stdout, stderr=self.stderr - ) diff --git a/pytest_splunk_addon/helmut/exceptions/confs.py b/pytest_splunk_addon/helmut/exceptions/confs.py deleted file mode 100644 index 61bfde5a3..000000000 --- a/pytest_splunk_addon/helmut/exceptions/confs.py +++ /dev/null @@ -1,52 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Created on Jul 18, 2012 - -@author: parhamfh -""" - - -class StanzaNotFound(RuntimeError): - """ - Raised when a conf file does not contain the specified stanza. - - @ivar conf: The name of the conf file. - @ivar stanza: The name of the stanza. - """ - - def __init__(self, conf, stanza): - """ - Creates a new exception. - - @param conf: The name of conf file. - @type conf: str - @param stanza: The name of stanza. - @type stanza: str - """ - self.conf = conf - self.stanza = stanza - super(StanzaNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - """ - The error message for this exception. - - @rtype: str - """ - msg = "Stanza '{stanza}' doesn't exist in conf-file '{conf}.conf'" - return msg.format(stanza=self.stanza, conf=self.conf) diff --git a/pytest_splunk_addon/helmut/exceptions/search.py b/pytest_splunk_addon/helmut/exceptions/search.py deleted file mode 100644 index ce52c5ffe..000000000 --- a/pytest_splunk_addon/helmut/exceptions/search.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -class SearchFailure(RuntimeError): - """ - This exception is raised when a search fails and returns the error through sdk get_message function. - """ - - def __init__(self, search_message): - self.search_message = search_message - super(SearchFailure, self).__init__(self._error_message) - - @property - def _error_message(self): - message = "Search failed with Error: {0}" - return message.format(self.search_message) diff --git a/pytest_splunk_addon/helmut/exceptions/wait.py b/pytest_splunk_addon/helmut/exceptions/wait.py deleted file mode 100644 index 4324d11cf..000000000 --- a/pytest_splunk_addon/helmut/exceptions/wait.py +++ /dev/null @@ -1,52 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -class WaitTimedOut(RuntimeError): - """ - This exception is raised when a designated wait period times out. - """ - - def __init__(self, seconds_waited): - self.seconds_waited = seconds_waited - super(WaitTimedOut, self).__init__(self._error_message) - - @property - def _error_message(self): - message = "Search was not done after {0} seconds" - return message.format(self.seconds_waited) - - -class DownloadTimedOut(RuntimeError): - """ - This exception is raise when release doesnt return the package - """ - - pass - - -class ExecuteTimeOut(RuntimeError): - """ - This exception is raise when execute function time out - """ - - pass - - -class Md5CheckFailed(RuntimeError): - """ - This exception is raise when MD5 check failed - """ - - pass diff --git a/pytest_splunk_addon/helmut/log/__init__.py b/pytest_splunk_addon/helmut/log/__init__.py deleted file mode 100644 index 643334f73..000000000 --- a/pytest_splunk_addon/helmut/log/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Created on Jun 15, 2012 - -@author: parhamfh -""" - -import datetime -import logging -import logging.config -import os -from builtins import object -from logging import FileHandler, Formatter - -from future.utils import with_metaclass - -_LOG_FORMAT = "[%(asctime)s] %(levelname)s - %(name)s: %(message)s" -_DATE_FORMAT = "%Y-%m-%d %H:%M:%S.%f" -_FILE_NAME = "helmut.log" - - -def setup_logger(debug=False): - """ - Setups up the logging library - - @param debug: If debug log messages are to be outputted - @type debug: bool - """ - logger = logging.getLogger("") - handler = FileHandler(filename=_FILE_NAME, mode="w") - handler.setFormatter(HelmutFormatter(_LOG_FORMAT)) - level = logging.INFO - if debug: - level = logging.DEBUG - logger.addHandler(handler) - logger.setLevel(level) - logger.debug("Logger: DEBUG logging is enabled") - - -class HelmutFormatter(Formatter): - - # Disabling error b/c function overrides old style Python function - # pylint: disable=C0103 - def formatTime(self, record, datefmt=None): - t = datetime.datetime.now() - # The [:-3] is put there to trim the last three digits of the - # microseconds, remove it if you intend to remove microseconds - # from the _DATE_FORMAT - return t.strftime(_DATE_FORMAT)[:-3] - - -from abc import ABCMeta - - -class Logging(with_metaclass(ABCMeta, object)): - def __init__(self): - self._logger = self._get_logger() - super(Logging, self).__init__() - - def _get_logger(self): - """ - Creates a new logger for this instance, should only be called once. - - @return: The newly created logger. - """ - return logging.getLogger(self._logger_name) - - @property - def _logger_name(self): - """ - The name of the logger. - - @rtype: str - """ - return self.__class__.__name__ - - @property - def logger(self): - """ - The logger of this Splunk object. - - @return: The associated logger. - """ - return self._logger diff --git a/pytest_splunk_addon/helmut/log/logging.conf b/pytest_splunk_addon/helmut/log/logging.conf deleted file mode 100644 index 8977e7718..000000000 --- a/pytest_splunk_addon/helmut/log/logging.conf +++ /dev/null @@ -1,57 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -[loggers] -keys=root,botocore,s3transfer,testcube - -[handlers] -keys=fileHandler - -[formatters] -keys=helmutFormatter - -[logger_root] -level=DEBUG -handlers=fileHandler - -[logger_botocore] -level=INFO -handlers=fileHandler -qualname=botocore -propagate=0 - -[logger_s3transfer] -level=INFO -handlers=fileHandler -qualname=s3transfer -propagate=0 - -[logger_testcube] -level=DEBUG -handlers=fileHandler -qualname=pytest_splunk_testcube -propagate=0 - -[handler_fileHandler] -class=FileHandler -args=('..log', 'w') -level=DEBUG -formatter=helmutFormatter - -[formatter_helmutFormatter] -format=[%(asctime)s] %(levelname)s - %(name)s: %(message)s -datefmt=%X -class=..log.HelmutFormatter diff --git a/pytest_splunk_addon/helmut/manager/__init__.py b/pytest_splunk_addon/helmut/manager/__init__.py deleted file mode 100644 index fa4969969..000000000 --- a/pytest_splunk_addon/helmut/manager/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import ABCMeta - -from future.utils import with_metaclass - -from pytest_splunk_addon.helmut.log import Logging - - -class Manager(with_metaclass(ABCMeta, Logging)): - def __init__(self, connector): - self._connector = connector - - Logging.__init__(self) - - @property - def connector(self): - return self._connector diff --git a/pytest_splunk_addon/helmut/manager/confs/__init__.py b/pytest_splunk_addon/helmut/manager/confs/__init__.py deleted file mode 100644 index 233c064e0..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/__init__.py +++ /dev/null @@ -1,111 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager import Manager -from pytest_splunk_addon.helmut.misc.collection import Collection -from pytest_splunk_addon.helmut.misc.manager_utils import ( - create_wrapper_from_connector_mapping, -) - -PATH_CONF = "configs/conf-%s/" -PATH_PROPERTIES = "properties/" -PATH_PERFIX = "/servicesNS/nobody/system/" -COUNT_OFFSET = "?count=-1&offset=0" - - -class Confs(Manager, Collection): - """ - This manager represents the collection of .conf files of the Splunk system. - - A notable difference to most other managers is that there are two layers - of ItemFromManager classes in this class: - - * Confs contains all the .conf files represented as Conf objects (which are - of the type ItemFromManager). - * A Conf object contains all the stanzas represented as Stanza objects - (which are also of the type ItemFromManager). - * A Stanza contains a collection of key-value pairs corresponding to the - content of a stanza. - """ - - def __init__(self, connector): - """ - The constructor of Confs. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - Manager.__init__(self, connector) - Collection.__init__(self) - - def __new__(cls, connector): - """ - The function called when creating a new Confs object. - An internal map stores mappings from connector type to corresponding - Indexes subclass, making sure that the appropriate Indexes class is - evoked. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - mappings = _CONNECTOR_TO_WRAPPER_MAPPINGS - return create_wrapper_from_connector_mapping(cls, connector, mappings) - - @abstractmethod - def __getitem__(self, conf_name): - """ - Fetch a .conf file. - - @param conf_name: The name of the conf file to fetch. - @type conf_name: String - """ - pass - - def __contains__(self, conf_name): - for conf in self: - if conf.name == conf_name: - return True - return False - - @abstractmethod - def create(self, conf_name): - """ - Create a new .conf file. If .conf file already exists do nothing - except returning the .conf file. - - @param conf_name: The name of the .conf file to create. - @type conf_name: String. - """ - pass - - -# We need to do this at the bottom to avoid import errors -from pytest_splunk_addon.helmut.connector.sdk import SDKConnector -from pytest_splunk_addon.helmut.connector.rest import RESTConnector -from pytest_splunk_addon.helmut.manager.confs.sdk import SDKConfsWrapper -from pytest_splunk_addon.helmut.manager.confs.rest import RESTConfsWrapper - -_CONNECTOR_TO_WRAPPER_MAPPINGS = { - SDKConnector: SDKConfsWrapper, - RESTConnector: RESTConfsWrapper, -} diff --git a/pytest_splunk_addon/helmut/manager/confs/conf.py b/pytest_splunk_addon/helmut/manager/confs/conf.py deleted file mode 100644 index 162b0ecce..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/conf.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager.object import ItemFromManager -from pytest_splunk_addon.helmut.misc.collection import Collection - - -class Conf(ItemFromManager, Collection): - """ - This class represents a .conf file in the Splunk system and is the subclass - associated with the L{SDKConnector}. A Conf object holds Stanza objects - that represent the .conf files content. - - See documentation for L{Confs} for more information. - """ - - def __init__(self, connector, conf_name): - """ - The constructor for Conf. - - @param connector: The connector which is used to talk to Splunk. - @type connector: L{Connector} - @param conf_name: The name of the conf file to be created. The suffix - .conf does not need to be specified here. - @type conf_name: String - """ - ItemFromManager.__init__(self, connector) - Collection.__init__(self) - self._name = conf_name - - @property - def name(self): - return self._name - - @abstractmethod - def __getitem__(self, stanza_name): - """ - Fetch a stanza. - - @param stanza_name: Name of the stanza to fetch. - type stanza_name: String - """ - pass - - @abstractmethod - def items(self): - pass - - @abstractmethod - def create_stanza(self, stanza_name, values=None): - """ - Create stanza in conf-file. Do nothing if stanza exists. - - @param stanza_name: The name of the stanza to create. - @type stanza_name: String. - """ - pass - - @abstractmethod - def delete_stanza(self, stanza_name): - """ - Delete stanza in conf-file. If stanza doesn't exist, do nothing. - - @param stanza_name: The name of the stanza to remove. - @type stanza_name: String. - """ - pass - - def __contains__(self, stanza_name): - for stanza in self: - if stanza.name == stanza_name: - return True - return False diff --git a/pytest_splunk_addon/helmut/manager/confs/rest/__init__.py b/pytest_splunk_addon/helmut/manager/confs/rest/__init__.py deleted file mode 100644 index 1d084163a..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/rest/__init__.py +++ /dev/null @@ -1,136 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Lei Zhang -@contact: U{leiz@splunk.com} -@since: 2018-05-01 -""" -import json -from builtins import object -from builtins import range - -from pytest_splunk_addon.helmut.manager.confs import Confs -from pytest_splunk_addon.helmut.manager.confs import ( - PATH_PERFIX, - PATH_CONF, - PATH_PROPERTIES, -) -from pytest_splunk_addon.helmut.manager.confs.rest.conf import RESTConfWrapper -from pytest_splunk_addon.helmut.util.string_unicode_convert import ( - normalize_to_str, - normalize_to_unicode, -) - - -class RESTConfsWrapper(Confs): - """ - The Confs subclass that is associated with the RESTConnector. - """ - - @property - def _service(self): - return self.connector - - def __getitem__(self, conf_name): - for conf in self: - if conf.name == conf_name: - return conf - raise ConfNotFound(conf_name) - - def create(self, conf_name): - conf_name = normalize_to_unicode(conf_name) - if conf_name in self: - self.logger.info("conf file '%s' already existed" % conf_name) - return - - self.logger.info("Creating conf file %s" % conf_name) - # create(conf_name) - return RESTConfWrapper(self.connector, self._create(conf_name)) - - def items(self): - conf_names = self.list() - # for c in conf_names: - # print c.name - return [RESTConfWrapper(self.connector, conf_name) for conf_name in conf_names] - - def list(self): - # return conf object - conf_list = [] - url = PATH_PERFIX + PATH_PROPERTIES - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - for i in range(len(parsed_content["entry"])): - conf_list.append(parsed_content["entry"][i]["name"]) - return [Configurations(self.connector, conf_name) for conf_name in conf_list] - - def _create(self, conf_name): - conf_name = normalize_to_str(conf_name) - url = PATH_PERFIX + PATH_PROPERTIES - user_args = {"__conf": conf_name} - response, content = self.connector.make_request( - "POST", url, user_args, {"output_mode": "json"} - ) - assert response["status"] == "201" - if conf_name in self: - return Configurations(self.connector, conf_name) - - -class ConfNotFound(RuntimeError): - """ - Raised when a conf file that does not exist is read from. - - @ivar conf: The name of the conf that did not exist. - """ - - def __init__(self, conf): - """ - Creates a new exception. - - @param conf: The name of the conf file that was missing. - """ - self.conf = conf - super(ConfNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - """ - The error message for this exception. - - @rtype: str - """ - msg = "The conf file {conf}.conf doesn't exist" - return msg.format(conf=self.conf) - - -class Configurations(object): - """ - wraps a Config object using Splunk REST connector - """ - - def __init__(self, connector, conf_name): - self.connector = connector - self._name = conf_name - self._path = PATH_CONF % conf_name - - @property - def name(self): - return self._name - - @property - def path(self): - return self._path diff --git a/pytest_splunk_addon/helmut/manager/confs/rest/conf.py b/pytest_splunk_addon/helmut/manager/confs/rest/conf.py deleted file mode 100644 index 6c78915f1..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/rest/conf.py +++ /dev/null @@ -1,193 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Lei Zhang -@contact: U{leiz@splunk.com} -@since: 2018-05-01 -""" -from future import standard_library - -standard_library.install_aliases() -from builtins import range -from builtins import object -from splunklib.client import HTTPError - -from pytest_splunk_addon.helmut.manager.confs.conf import Conf -from pytest_splunk_addon.helmut.manager.confs.rest.stanza import RESTStanzaWrapper -from pytest_splunk_addon.helmut.exceptions.confs import StanzaNotFound -from pytest_splunk_addon.helmut.manager.confs import PATH_PERFIX, COUNT_OFFSET -from pytest_splunk_addon.helmut.util.string_unicode_convert import ( - normalize_to_str, - normalize_to_unicode, -) -import json -import urllib.request, urllib.parse, urllib.error - - -class RESTConfWrapper(Conf): - """ - The L{Conf} object corresponding to a Conf object in the Splunk REST API. - It holds a set of L{RESTStanza}s. - """ - - def __init__(self, rest_connector, rest_conf): - super(RESTConfWrapper, self).__init__(rest_connector, rest_conf.name) - self._raw_rest_conf = rest_conf - - @property - def raw_rest_conf(self): - return self._raw_rest_conf - - def __getitem__(self, stanza_name): - for stanza in self: - if stanza.name == stanza_name: - return stanza - raise StanzaNotFound(self.name, stanza_name) - - def stanzas_list( - self, - ): - stanza_list = [] - url = PATH_PERFIX + self._raw_rest_conf.path + COUNT_OFFSET - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - for i in range(len(parsed_content["entry"])): - stanza_list.append(parsed_content["entry"][i]["name"]) - return [ - RestStanza(self.connector, self._raw_rest_conf, stanza_name) - for stanza_name in stanza_list - ] - - def _create_stanza(self, stanza_name, **values): - if stanza_name in self: - return RestStanza(self.connector, self._raw_rest_conf, stanza_name) - values = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in values.items() - ) - stanza_name = normalize_to_str(stanza_name) - url = PATH_PERFIX + self._raw_rest_conf.path - user_args = {"name": stanza_name} - user_args.update(values) - response, content = self.connector.make_request( - "POST", url, user_args, {"output_mode": "json"} - ) - assert response["status"] == "201" - if normalize_to_unicode(stanza_name) in self: - return RestStanza(self.connector, self._raw_rest_conf, stanza_name) - - def _delete_stanza(self, stanza_name): - stanza_name = normalize_to_str(stanza_name) - url = ( - PATH_PERFIX - + self._raw_rest_conf.path - + "/{stanza_name}".format(stanza_name=stanza_name) - ) - self.connector.make_request("DELETE", url) - - def items(self): - stanzas = self.stanzas_list() - return [RESTStanzaWrapper(self, stanza) for stanza in stanzas] - - def create_stanza(self, stanza_name, values=None): - values = values or {} - values = dict( - [normalize_to_unicode(k), normalize_to_unicode(v)] - for k, v in values.items() - ) - stanza_name = normalize_to_unicode(stanza_name) - try: - self.logger.info( - "Creating stanza '%s' in %s.conf with values:" - " %s." % (stanza_name, self.name, values) - ) - return RESTStanzaWrapper(self, self._create_stanza(stanza_name, **values)) - except HTTPError as h: - self.logger.warn( - "Stanza '%s' already existed in %s.conf. " - "HTTPError message: %s" % (stanza_name, self.name, h) - ) - return self[stanza_name] - except Exception: - raise - - def delete_stanza(self, stanza_name): - stanza_name = normalize_to_unicode(stanza_name) - try: - self.logger.info( - "Deleting stanza '%s' in %s.conf" % (stanza_name, self.name) - ) - self._delete_stanza(stanza_name) - except HTTPError as h: - self.logger.warn("Error during deletion: %s" % h) - except Exception: - raise - - -class RestStanza(object): - """ - wraps a Stanza object using Splunk REST connector - """ - - def __init__(self, connector, rest_conf, rest_stanza_name): - self.connector = connector - self.rest_conf = rest_conf - self._name = rest_stanza_name - - @property - def name(self): - return self._name - - @property - def content(self): - return self._content() - - def _content(self): - name = urllib.parse.quote_plus(self._name) - url = ( - PATH_PERFIX + self.rest_conf.path + "{stanza_name}".format(stanza_name=name) - ) - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - return parsed_content["entry"][0]["content"] - - def update(self, **values): - values = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in values.items() - ) - name = urllib.parse.quote_plus(self._name) - url = ( - PATH_PERFIX + self.rest_conf.path + "{stanza_name}".format(stanza_name=name) - ) - user_args = values - response, content = self.connector.make_request( - "POST", url, user_args, {"output_mode": "json"} - ) - assert response["status"] == "200" - - def refresh( - self, - ): - name = urllib.parse.quote_plus(self._name) - url = ( - PATH_PERFIX + self.rest_conf.path + "{stanza_name}".format(stanza_name=name) - ) - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" diff --git a/pytest_splunk_addon/helmut/manager/confs/rest/stanza.py b/pytest_splunk_addon/helmut/manager/confs/rest/stanza.py deleted file mode 100644 index bee6dcd89..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/rest/stanza.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Lei Zhang -@contact: U{leiz@splunk.com} -@since: 2018-05-01 -""" -from pytest_splunk_addon.helmut.exceptions.confs import StanzaNotFound -from pytest_splunk_addon.helmut.manager.confs.stanza import Stanza -from pytest_splunk_addon.helmut.util.string_unicode_convert import normalize_to_unicode - - -class RESTStanzaWrapper(Stanza): - """ - This class is the associated subclass of Stanza to the L{RESTConnector}. - This represents and wraps a Stanza object using the Splunk REST API. - """ - - def __init__(self, rest_conf, rest_stanza): - super(RESTStanzaWrapper, self).__init__(rest_conf, rest_stanza.name) - self._raw_rest_stanza = rest_stanza - - @property - def raw_rest_stanza(self): - return self._raw_rest_stanza - - @property - def raw_rest_conf(self): - return self.conf.raw_rest_conf - - def items(self): - return self.raw_rest_stanza.content - - def __setitem__(self, key, value): - key = normalize_to_unicode(key) - value = normalize_to_unicode(value) - try: - self.logger.info( - "Setting key '%s' to '%s' in stanza '%s' " - "in %s.conf." % (key, value, self.name, self.conf_name) - ) - # Update takes positional arguments and we send a dictionary so - # if written update(key=value) field 'key' in stanza will get value - self.raw_rest_stanza.update(**{key: value}) - self.raw_rest_stanza.refresh() - - except StanzaNotFound as s: - self.logger.warn(s) - raise - - def delete_value(self, key): - key = normalize_to_unicode(key) - try: - self.logger.info( - "Deleting key %s in stanza '%s' in %s.conf." - % (key, self.name, self.conf_name) - ) - # Update takes positional arguments and we send a dictionary - # If written update(key=value) field 'key' in stanza will get value - self.raw_rest_stanza.update(**{key: ""}) - self.raw_rest_stanza.refresh() - - # If key has value '' or None when fetched return True - if self[key] is None or self[key] == "": - return True - - # Something is wrong since the key still contains _some_ value - raise RuntimeError( - "delete_value(%s, %s, %s) did not properly" - " delete value. unexpected value in self[%s]: " - "%s" % (key, self.name, self.conf_name, key, self[key]) - ) - except StanzaNotFound as s: - self.logger.warn(s) - raise diff --git a/pytest_splunk_addon/helmut/manager/confs/sdk/__init__.py b/pytest_splunk_addon/helmut/manager/confs/sdk/__init__.py deleted file mode 100644 index f605d8927..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/sdk/__init__.py +++ /dev/null @@ -1,81 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from pytest_splunk_addon.helmut.manager.confs import Confs -from pytest_splunk_addon.helmut.manager.confs.sdk.conf import SDKConfWrapper - - -class SDKConfsWrapper(Confs): - """ - The Confs subclass that is associated with the SDKConnector. - It wraps the Splunk Python SDK's Confs object and contains a collection of - L{SDKConfWrapper}s. - """ - - @property - def _service(self): - return self.connector.service - - def __getitem__(self, conf_name): - for conf in self: - if conf.name == conf_name: - return conf - raise ConfNotFound(conf_name) - - def create(self, conf_name): - if conf_name in self: - self.logger.info("conf file '%s' already existed" % conf_name) - return - - self.logger.info("Creating conf file %s" % conf_name) - return SDKConfWrapper(self.connector, self._service.confs.create(conf_name)) - - def items(self): - conf_names = self._service.confs.list() - # for c in conf_names: - # print c.name - return [SDKConfWrapper(self.connector, conf_name) for conf_name in conf_names] - - -class ConfNotFound(RuntimeError): - """ - Raised when a conf file that does not exist is read from. - - @ivar conf: The name of the conf that did not exist. - """ - - def __init__(self, conf): - """ - Creates a new exception. - - @param conf: The name of the conf file that was missing. - """ - self.conf = conf - super(ConfNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - """ - The error message for this exception. - - @rtype: str - """ - msg = "The conf file {conf}.conf doesn't exist" - return msg.format(conf=self.conf) diff --git a/pytest_splunk_addon/helmut/manager/confs/sdk/conf.py b/pytest_splunk_addon/helmut/manager/confs/sdk/conf.py deleted file mode 100644 index 4dc1bc5c9..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/sdk/conf.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from splunklib.client import HTTPError - -from pytest_splunk_addon.helmut.exceptions.confs import StanzaNotFound -from pytest_splunk_addon.helmut.manager.confs.conf import Conf -from pytest_splunk_addon.helmut.manager.confs.sdk.stanza import SDKStanzaWrapper - - -class SDKConfWrapper(Conf): - """ - The L{Conf} object corresponding to a Conf object in the Splunk Python SDK. - It holds a set of L{SDKStanzaWrapper}s. - """ - - def __init__(self, sdk_connector, sdk_conf): - super(SDKConfWrapper, self).__init__(sdk_connector, sdk_conf.name) - self._raw_sdk_conf = sdk_conf - - @property - def _service(self): - return self.connector.service - - @property - def raw_sdk_conf(self): - return self._raw_sdk_conf - - @property - def _path(self): - return self._raw_sdk_conf.path - - def __getitem__(self, stanza_name): - for stanza in self: - if stanza.name == stanza_name: - return stanza - raise StanzaNotFound(self.name, stanza_name) - - def items(self): - stanzas = self._service.confs[self.name].list() - return [SDKStanzaWrapper(self, stanza) for stanza in stanzas] - - def create_stanza(self, stanza_name, values=None): - values = values or {} - try: - self.logger.info( - "Creating stanza '%s' in %s.conf with values:" - " %s." % (stanza_name, self.name, values) - ) - return SDKStanzaWrapper( - self, self.raw_sdk_conf.create(stanza_name, **values) - ) - except HTTPError as h: - self.logger.warn( - "Stanza '%s' already existed in %s.conf. " - "HTTPError message: %s" % (stanza_name, self.name, h) - ) - return self[stanza_name] - except Exception: - raise - - def delete_stanza(self, stanza_name): - try: - self.logger.info( - "Deleting stanza '%s' in %s.conf" % (stanza_name, self.name) - ) - self.raw_sdk_conf.delete(stanza_name) - except HTTPError as h: - self.logger.warn("Error during deletion: %s" % h) - except Exception: - raise diff --git a/pytest_splunk_addon/helmut/manager/confs/sdk/stanza.py b/pytest_splunk_addon/helmut/manager/confs/sdk/stanza.py deleted file mode 100644 index 326f56735..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/sdk/stanza.py +++ /dev/null @@ -1,89 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from pytest_splunk_addon.helmut.exceptions.confs import StanzaNotFound -from pytest_splunk_addon.helmut.manager.confs.stanza import Stanza - - -class SDKStanzaWrapper(Stanza): - """ - This class is the associated subclass of Stanza to the L{SDKConnector}. - This represents and wraps a Stanza object in the Splunk Python SDK. - """ - - def __init__(self, sdk_conf, sdk_stanza): - super(SDKStanzaWrapper, self).__init__(sdk_conf, sdk_stanza.name) - self._raw_sdk_stanza = sdk_stanza - - @property - def raw_sdk_stanza(self): - return self._raw_sdk_stanza - - @property - def raw_sdk_conf(self): - return self.conf.raw_sdk_conf - - @property - def _service(self): - return self.connector.service - - def items(self): - # return self.raw_sdk_stanza.read()['content'] - return self.raw_sdk_stanza.content - - def __setitem__(self, key, value): - try: - self.logger.info( - "Setting key '%s' to '%s' in stanza '%s' " - "in %s.conf." % (key, value, self.name, self.conf_name) - ) - # Update takes positional arguments and we send a dictionary so - # if written update(key=value) field 'key' in stanza will get value - self.raw_sdk_stanza.update(**{key: value}) - self.raw_sdk_stanza.refresh() - - except StanzaNotFound as s: - self.logger.warn(s) - raise - - def delete_value(self, key): - try: - self.logger.info( - "Deleting key %s in stanza '%s' in %s.conf." - % (key, self.name, self.conf_name) - ) - # Update takes positional arguments and we send a dictionary - # If written update(key=value) field 'key' in stanza will get value - self.raw_sdk_stanza.update(**{key: ""}) - self.raw_sdk_stanza.refresh() - - # If key has value '' or None when fetched return True - if self[key] is None or self[key] == "": - return True - - # Something is wrong since the key still contains _some_ value - raise RuntimeError( - "delete_value(%s, %s, %s) did not properly" - " delete value. unexpected value in self[%s]: " - "%s" % (key, self.name, self.conf_name, key, self[key]) - ) - except StanzaNotFound as s: - self.logger.warn(s) - raise diff --git a/pytest_splunk_addon/helmut/manager/confs/stanza.py b/pytest_splunk_addon/helmut/manager/confs/stanza.py deleted file mode 100644 index 31c1441e2..000000000 --- a/pytest_splunk_addon/helmut/manager/confs/stanza.py +++ /dev/null @@ -1,101 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager.object import ItemFromManager -from pytest_splunk_addon.helmut.misc.collection import Collection - - -class Stanza(ItemFromManager, Collection): - """ - This class is a collection of key-value pairs contained in a stanza. - - See L{Confs} and L{Conf} for more information. - """ - - def __init__(self, conf, stanza_name): - """ - The constructor for Stanza. - - @param conf: The Conf object which contains this Stanza object. - @type conf: L{Conf} - @param stanza_name: The name of this stanza. - @type type: String - """ - ItemFromManager.__init__(self, conf.connector) - Collection.__init__(self) - - self._conf = conf - self._name = stanza_name - - @property - def conf(self): - return self._conf - - @property - def conf_name(self): - return self.conf.name - - @property - def name(self): - return self._name - - @abstractmethod - def items(self): - pass - - @abstractmethod - def __setitem__(self, key, value): - pass - - @abstractmethod - def delete_value(self, value): - """ - Delete a value from the stanza. - - @param value: The attributes name to remove (i.e the key of - the key-value pair). - @type value: String. - @return: A Boolean True if the key is now None or '', False otherwise. - @rtype - Boolean - """ - pass - - def __getitem__(self, key): - """ - Fetch a value in the stanza. - - @param key: The key associated with the value to be fetched. - @type key: String - """ - # print "fetching key> %s"%key - for (k, v) in self: - # print "key %s val %s"%(k, v) - if key == k: - return v - raise KeyError("The specified key is not in the stanza") - - def __contains__(self, key): - return key in list(self.items()) - - def __iter__(self): - return list(self.items()).iteritems() diff --git a/pytest_splunk_addon/helmut/manager/indexes/__init__.py b/pytest_splunk_addon/helmut/manager/indexes/__init__.py deleted file mode 100644 index 14942a187..000000000 --- a/pytest_splunk_addon/helmut/manager/indexes/__init__.py +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager import Manager -from pytest_splunk_addon.helmut.misc.collection import Collection -from pytest_splunk_addon.helmut.misc.manager_utils import ( - create_wrapper_from_connector_mapping, -) - -PATH_PERFIX = "/servicesNS/nobody/system/data/indexes/" -COUNT_OFFSET = "?count=-1&offset=0" -DISABLE = "/disable" -ENABLE = "/enable" -SYSTEM_MESSAGE = "/servicesNS/nobody/system/messages" -RESTART = "/services/server/control/restart" -ROLL_HOT_BUCKETS = "/roll-hot-buckets" - - -class Indexes(Manager, Collection): - """ - This class represents the Indexes endpoint in REST which is a collection of - L{Index}es. - """ - - def __init__(self, connector): - """ - Indexes' constructor. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - Manager.__init__(self, connector) - Collection.__init__(self) - - def __new__(cls, connector): - """ - The function called when creating a new Indexes object. - An internal map stores mappings from connector type to corresponding - Indexes subclass, making sure that the appropriate Indexes class is - evoked. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - mappings = _CONNECTOR_TO_WRAPPER_MAPPINGS - return create_wrapper_from_connector_mapping(cls, connector, mappings) - - @abstractmethod - def create_index(self, index_name): - """ - Create an index. - - @param index_name: The name of the new index. - @type index_name: String - """ - pass - - @abstractmethod - def __getitem__(self, index_name): - """ - Retrieve an index. - - @param index_name: Index name. - @type index_name: L{String} - """ - pass - - -class IndexNotFound(RuntimeError): - def __init__(self, index_name): - self.index_name = index_name - super(IndexNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - f = "Could not find index with name {name}" - return f.format(name=self.index_name) - - -class OperationError(Exception): - """Raised for a failed operation, such as a time out.""" - - pass - - -# We need to do this at the bottom to avoid import errors -from pytest_splunk_addon.helmut.connector.sdk import SDKConnector -from pytest_splunk_addon.helmut.connector.rest import RESTConnector -from pytest_splunk_addon.helmut.manager.indexes.sdk import SDKIndexesWrapper -from pytest_splunk_addon.helmut.manager.indexes.rest import RESTIndexesWrapper - -_CONNECTOR_TO_WRAPPER_MAPPINGS = { - SDKConnector: SDKIndexesWrapper, - RESTConnector: RESTIndexesWrapper, -} diff --git a/pytest_splunk_addon/helmut/manager/indexes/index.py b/pytest_splunk_addon/helmut/manager/indexes/index.py deleted file mode 100644 index a5be1cd8c..000000000 --- a/pytest_splunk_addon/helmut/manager/indexes/index.py +++ /dev/null @@ -1,52 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager.object import ItemFromManager - - -class Index(ItemFromManager): - """ - The Index class represents an index in Splunk. - """ - - @abstractmethod - def disable(self): - """ - Disable this index. Requires Splunk to restart. - """ - pass - - @abstractmethod - def enable(self): - """ - Enable this index. Does not require Splunk to restart. - """ - pass - - @abstractmethod - def edit(self, **kwargs): - """ - Edit this index. Check REST documentation to see what options are - available at - http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex - """ - pass diff --git a/pytest_splunk_addon/helmut/manager/indexes/rest/__init__.py b/pytest_splunk_addon/helmut/manager/indexes/rest/__init__.py deleted file mode 100644 index 52be2e8bb..000000000 --- a/pytest_splunk_addon/helmut/manager/indexes/rest/__init__.py +++ /dev/null @@ -1,254 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Lei Zhang -@contact: U{leiz@splunk.com} -@since: 2018-06-11 -""" -from future import standard_library - -standard_library.install_aliases() -from builtins import range -from future.utils import raise_ -from builtins import object -from splunklib.client import HTTPError - -from pytest_splunk_addon.helmut.manager.indexes import Indexes -from pytest_splunk_addon.helmut.manager.indexes.rest.index import RESTIndexWrapper -from pytest_splunk_addon.helmut.manager.indexes import ( - IndexNotFound, - PATH_PERFIX, - COUNT_OFFSET, - DISABLE, - SYSTEM_MESSAGE, - RESTART, - ENABLE, - ROLL_HOT_BUCKETS, - OperationError, -) -from pytest_splunk_addon.helmut.util.string_unicode_convert import ( - normalize_to_str, - normalize_to_unicode, -) -import json -import urllib.request, urllib.parse, urllib.error -from datetime import datetime, timedelta -import time - - -class RESTIndexesWrapper(Indexes): - """ - The Indexes subclass that wraps the Splunk REST Indexes object. - It basically contains a collection of L{RestIndexWrapper}s. - """ - - def create_index(self, index_name): - index_name = normalize_to_unicode(index_name) - try: - self.logger.info("Creating index '%s'" % index_name) - self._create_index(index_name) - except HTTPError as err: - # Index already exists - if not err.status == 409: - raise - self.logger.warn( - "Index '%s' already exists. HTTPError: %s" % (index_name, err) - ) - return self[index_name] - - def _create_index(self, index_name): - index_name = normalize_to_str(index_name) - url = PATH_PERFIX - user_args = {"name": index_name} - response, content = self.connector.make_request( - "POST", url, user_args, {"output_mode": "json"} - ) - assert response["status"] == "201" - if normalize_to_unicode(index_name) in self: - return RestIndex(self.connector, index_name) - - def _list_index(self): - index_list = [] - url = PATH_PERFIX + COUNT_OFFSET - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - for i in range(len(parsed_content["entry"])): - index_list.append(parsed_content["entry"][i]["name"]) - return [RestIndex(self.connector, index_name) for index_name in index_list] - - def __getitem__(self, index_name): - for index in self: - if index.name == index_name: - return index - raise IndexNotFound(index_name) - - def __contains__(self, index_name): - for index in self: - if index.name == index_name: - return True - return False - - def items(self): - indexes = self._list_index() - return [RESTIndexWrapper(self.connector, index) for index in indexes] - - -class RestIndex(object): - """ - wraps a Index object using Splunk REST connector - """ - - def __init__(self, connector, index_name): - self.connector = connector - self._name = index_name - self.result = None - - @property - def name(self): - return self._name - - def encode_name(self): - return urllib.parse.quote_plus(self._name) - - def refresh(self): - name = self.encode_name() - url = PATH_PERFIX + name - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - return parsed_content["entry"][0] - - def clean(self, timeout): - try: - result = self.refresh() - tds = result["content"]["maxTotalDataSizeMB"] - ftp = result["content"]["frozenTimePeriodInSecs"] - self.update(maxTotalDataSizeMB=1, frozenTimePeriodInSecs=1) - self.roll_hot_buckets() - diff = timedelta(seconds=timeout) - start = datetime.now() - Done = False - while not Done and datetime.now() < start + diff: - time.sleep(1) - res = self.refresh() - if int(res["content"]["totalEventCount"]) == 0: - Done = True - if not Done: - raise_( - OperationError, - "Cleaning index %s took longer than %s seconds; timing out." - % (self._name, timeout), - ) - finally: - self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) - - def disable(self): - name = self.encode_name() - url = PATH_PERFIX + name + DISABLE - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("POST", url, req_args) - assert response["status"] == "200" - if self.restart_required(): - self.restart(120) - - def enable(self): - name = self.encode_name() - url = PATH_PERFIX + name + ENABLE - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("POST", url, req_args) - assert response["status"] == "200" - - def update(self, **kwargs): - name = self.encode_name() - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - url = PATH_PERFIX + name - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("POST", url, kwargs, req_args) - assert response["status"] == "200" - - def delete(self, **kwargs): - name = self.encode_name() - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - url = PATH_PERFIX + name - response, content = self.connector.make_request("DELETE", url) - assert response["status"] == "200" - - def roll_hot_buckets( - self, - ): - name = self.encode_name() - url = PATH_PERFIX + name + ROLL_HOT_BUCKETS - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("POST", url, req_args) - assert response["status"] == "200" - - def restart_required(self): - url = SYSTEM_MESSAGE - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - if len(parsed_content["entry"]) == 0: - return False - else: - for entry in parsed_content["entry"]: - if "restart_required" in entry["title"]: - return True - return False - - def restart(self, timeout=None): - msg = {"value": "Restart requested by Splunk Helumt RestIndex Oject"} - url = SYSTEM_MESSAGE - user_args = {"name": "restart_required", "value": msg} - response, content = self.connector.make_request( - "POST", url, user_args, {"output_mode": "json"} - ) - assert response["status"] == "201" - response, content = self.connector.make_request( - "POST", RESTART, {"output_mode": "json"} - ) - assert response["status"] == "200" - result = json.loads(content) - if timeout is None: - return result - start = datetime.now() - diff = timedelta(seconds=timeout) - while datetime.now() - start < diff: - try: - time.sleep(1) - if self.login(self.connector) and not self.restart_required(): - return result - except Exception as e: - time.sleep(1) - raise Exception("Operation time out.") - - def login(self, connector): - if ( - hasattr(connector, "is_logged_in") - and connector._attempt_login_time > 0 - and not connector.is_logged_in() - ): - connector.login() - return connector.is_logged_in() - else: - return False diff --git a/pytest_splunk_addon/helmut/manager/indexes/rest/index.py b/pytest_splunk_addon/helmut/manager/indexes/rest/index.py deleted file mode 100644 index 4895a5b2a..000000000 --- a/pytest_splunk_addon/helmut/manager/indexes/rest/index.py +++ /dev/null @@ -1,146 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Lei Zhang -@contact: U{leiz@splunk.com} -@since: 2018-06-11 -""" -import time - -from pytest_splunk_addon.helmut.exceptions.wait import WaitTimedOut -from pytest_splunk_addon.helmut.manager.indexes.index import Index -from pytest_splunk_addon.helmut.util.string_unicode_convert import normalize_to_unicode - - -class RESTIndexWrapper(Index): - """ - The L{Index} subclass corresponding to an Index object in the - Splunk REST API. - """ - - def __init__(self, rest_connector, rest_index): - """ - RESTIndexWrapper's constructor. - - @param rest_connector: The connector which talks to Splunk through the - Splunk REST API. - @type rest_connector: RESTConnector - @param rest_index: The name of the new index. - @type rest_index: String - """ - super(RESTIndexWrapper, self).__init__(rest_connector) - self._raw_rest_index = rest_index - - def get_total_event_count(self): - """ - Returns the event count of the index. - - @return: The total event count. - @rtype: int - """ - result = self._raw_rest_index.refresh() - return int(result["content"]["totalEventCount"]) - - def wait_for_event_count(self, ecount, timeout): - event_number = 0 - previous_event_number = 0 - counter = timeout - done = False - while not done and counter > 0: - event_number = self.get_total_event_count() - if event_number > ecount: - self.logger.error( - "Index {name} contains events count is {now},more than expected events count {ecount}.".format( - name=self.name, now=event_number, ecount=ecount - ) - ) - return - elif event_number < ecount: - if event_number != previous_event_number: - self.logger.info( - "Index {name} events count is {now},previous events count is {pre}".format( - name=self.name, now=event_number, pre=previous_event_number - ) - ) - previous_event_number = event_number - time.sleep(1) - counter -= 1 - else: - done = True - - if counter != 0: - self.logger.info( - "Indexing (%s) completed in %s seconds." - % (self.name, (timeout - counter)) - ) - else: - self.logger.warn( - "Indexing (%s) did not complete within %s seconds.The events number is %s" - % (self.name, timeout, event_number) - ) - raise WaitTimedOut(timeout) - - def get_max_warm_db_count(self): - """ - Returns the value for stanza field maxWarmDBCount. - - @return: The value for maxWarmDBCount. - @rtype: int - """ - result = self._raw_rest_index.refresh() - return int(result["content"]["maxWarmDBCount"]) - - @property - def name(self): - """ - The name of the index. - """ - return self._raw_rest_index.name - - def clean(self, timeout=300): - """ - Cleans an index. All events will be removed. - - @param timeout: The maximum time to wait for the clean in seconds. - Default: 300 seconds. - @type timeout: int - """ - self.logger.info("Cleaning index %s" % self.name) - self._raw_rest_index.clean(timeout) - - def disable(self): - self.logger.info("Disabling index %s" % self.name) - self._raw_rest_index.disable() - - def enable(self): - self.logger.info("Enabling index %s" % self.name) - self._raw_rest_index.enable() - - def edit(self, **kwargs): - kwargs = dict( - [normalize_to_unicode(k), normalize_to_unicode(v)] - for k, v in kwargs.items() - ) - self.logger.info("Editing index %s with: %s" % (self.name, kwargs)) - self._raw_rest_index.update(**kwargs) - - def delete(self, **kwargs): - kwargs = dict( - [normalize_to_unicode(k), normalize_to_unicode(v)] - for k, v in kwargs.items() - ) - self.logger.info("Deleting index %s with: %s" % (self.name, kwargs)) - self._raw_rest_index.delete(**kwargs) diff --git a/pytest_splunk_addon/helmut/manager/indexes/sdk/__init__.py b/pytest_splunk_addon/helmut/manager/indexes/sdk/__init__.py deleted file mode 100644 index cfb635b20..000000000 --- a/pytest_splunk_addon/helmut/manager/indexes/sdk/__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from splunklib.client import HTTPError - -from pytest_splunk_addon.helmut.manager.indexes import IndexNotFound -from pytest_splunk_addon.helmut.manager.indexes import Indexes -from pytest_splunk_addon.helmut.manager.indexes.sdk.index import SDKIndexWrapper - - -class SDKIndexesWrapper(Indexes): - """ - The Indexes subclass that wraps the Splunk Python SDK's Indexes object. - It basically contains a collection of L{SDKIndexWrapper}s. - """ - - @property - def _service(self): - return self.connector.service - - def create_index(self, index_name): - try: - self.logger.info("Creating index '%s'" % index_name) - self.connector.service.indexes.create(index_name) - except HTTPError as err: - # Index already exists - if not err.status == 409: - raise - self.logger.warn( - "Index '%s' already exists. HTTPError: %s" % (index_name, err) - ) - return self[index_name] - - def __getitem__(self, index_name): - for index in self: - if index.name == index_name: - return index - raise IndexNotFound(index_name) - - def __contains__(self, index_name): - for index in self: - if index.name == index_name: - return True - return False - - def items(self): - indexes = self._service.indexes - return [SDKIndexWrapper(self.connector, index) for index in indexes] diff --git a/pytest_splunk_addon/helmut/manager/indexes/sdk/index.py b/pytest_splunk_addon/helmut/manager/indexes/sdk/index.py deleted file mode 100644 index 702d1c2e5..000000000 --- a/pytest_splunk_addon/helmut/manager/indexes/sdk/index.py +++ /dev/null @@ -1,143 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -import time - -from pytest_splunk_addon.helmut.exceptions.wait import WaitTimedOut -from pytest_splunk_addon.helmut.manager.indexes.index import Index - - -class SDKIndexWrapper(Index): - """ - The L{Index} subclass corresponding to an Index object in the - Splunk Python SDK. - """ - - def __init__(self, sdk_connector, sdk_index): - """ - SDKIndexWrapper's constructor. - - @param sdk_connector: The connector which talks to Splunk through the - Splunk Python SDK. - @type sdk_connector: SDKConnector - @param sdk_index: The name of the new index. - @type sdk_index: String - """ - super(SDKIndexWrapper, self).__init__(sdk_connector) - self._raw_sdk_index = sdk_index - - def get_total_event_count(self): - """ - Returns the event count of the index. - - @return: The total event count. - @rtype: int - """ - - return int(self._raw_sdk_index.refresh().content.totalEventCount) - - def wait_for_event_count(self, ecount, timeout): - event_number = 0 - previous_event_number = 0 - counter = timeout - done = False - while not done and counter > 0: - event_number = self.get_total_event_count() - if event_number > ecount: - self.logger.error( - "Index {name} contains events count is {now},more than expected events count {ecount}.".format( - name=self.name, now=event_number, ecount=ecount - ) - ) - return - elif event_number < ecount: - if event_number != previous_event_number: - self.logger.info( - "Index {name} events count is {now},previous events count is {pre}".format( - name=self.name, now=event_number, pre=previous_event_number - ) - ) - previous_event_number = event_number - time.sleep(1) - counter -= 1 - else: - done = True - - if counter != 0: - self.logger.info( - "Indexing (%s) completed in %s seconds." - % (self.name, (timeout - counter)) - ) - else: - self.logger.warn( - "Indexing (%s) did not complete within %s seconds.The events number is %s" - % (self.name, timeout, event_number) - ) - raise WaitTimedOut(timeout) - - def get_max_warm_db_count(self): - """ - Returns the value for stanza field maxWarmDBCount. - - @return: The value for maxWarmDBCount. - @rtype: int - """ - return int(self._raw_sdk_index.refresh().content.maxWarmDBCount) - - @property - def _service(self): - """ - Return the service associated with - """ - return self.connector.service - - @property - def name(self): - """ - The name of the index. - """ - return self._raw_sdk_index.name - - def clean(self, timeout=300): - """ - Cleans an index. All events will be removed. - - @param timeout: The maximum time to wait for the clean in seconds. - Default: 300 seconds. - @type timeout: int - """ - self.logger.info("Cleaning index %s" % self.name) - self._raw_sdk_index.clean(timeout) - - def disable(self): - self.logger.info("Disabling index %s" % self.name) - self._raw_sdk_index.disable() - - def enable(self): - self.logger.info("Enabling index %s" % self.name) - self._raw_sdk_index.enable() - - def edit(self, **kwargs): - self.logger.info("Editing index %s with: %s" % (self.name, kwargs)) - self._raw_sdk_index.update(**kwargs) - - def delete(self, **kwargs): - self.logger.info("Deleting index %s with: %s" % (self.name, kwargs)) - self._raw_sdk_index.delete(**kwargs) diff --git a/pytest_splunk_addon/helmut/manager/jobs/__init__.py b/pytest_splunk_addon/helmut/manager/jobs/__init__.py deleted file mode 100644 index cb7896056..000000000 --- a/pytest_splunk_addon/helmut/manager/jobs/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager import Manager -from pytest_splunk_addon.helmut.misc.collection import Collection -from pytest_splunk_addon.helmut.misc.manager_utils import ( - create_wrapper_from_connector_mapping, -) - -PATH_PERFIX = "/servicesNS/nobody/system/search/jobs/" -EVENTS = "/events" -RESULTS = "/results" -SUMMARY = "/summary" -CONTROL = "/control" -RESULTS_PREVIEW = "/results_preview" -TIMELINE = "/timeline" -SEARCHLOG = "/search.log" - - -class Jobs(Manager, Collection): - """ - Jobs is the manager that handles searches. - It does not handle pausing, resuming, etc of individual searches, it just - spawns and lists searches. - """ - - def __init__(self, connector): - Manager.__init__(self, connector) - Collection.__init__(self) - - def __new__(cls, connector): - mappings = _CONNECTOR_TO_WRAPPER_MAPPINGS - return create_wrapper_from_connector_mapping(cls, connector, mappings) - - @abstractmethod - def create(self, query, **kwargs): - pass - - @abstractmethod - def __getitem__(self, sid): - pass - - -class JobNotFound(RuntimeError): - def __init__(self, sid): - self.sid = sid - super(JobNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - return "Could not find a job with SID {sid}".format(sid=self.sid) - - -# We need this at the bottom to avoid cyclic imports - -from pytest_splunk_addon.helmut.connector.sdk import SDKConnector -from pytest_splunk_addon.helmut.connector.rest import RESTConnector -from pytest_splunk_addon.helmut.manager.jobs.sdk import SDKJobsWrapper -from pytest_splunk_addon.helmut.manager.jobs.rest import RESTJobsWrapper - -_CONNECTOR_TO_WRAPPER_MAPPINGS = { - SDKConnector: SDKJobsWrapper, - RESTConnector: RESTJobsWrapper, -} diff --git a/pytest_splunk_addon/helmut/manager/jobs/job.py b/pytest_splunk_addon/helmut/manager/jobs/job.py deleted file mode 100644 index 99a917442..000000000 --- a/pytest_splunk_addon/helmut/manager/jobs/job.py +++ /dev/null @@ -1,108 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -import time -from abc import abstractmethod, abstractproperty -from builtins import str - -from pytest_splunk_addon.helmut.exceptions.search import SearchFailure -from pytest_splunk_addon.helmut.exceptions.wait import WaitTimedOut -from pytest_splunk_addon.helmut.manager.object import ItemFromManager - - -class Job(ItemFromManager): - """ - Job handles the individual searches that spawn jobs. This manager has the - ability to stop, pause, finalize, etc jobs. You can also retrieve - different data about the job such as event count. - """ - - _SECONDS_BETWEEN_JOB_IS_DONE_CHECKS = 1 - - @abstractmethod - def get_results(self, **kwargs): - pass - - @abstractmethod - def is_done(self): - pass - - @abstractmethod - def is_failed(self): - pass - - @abstractmethod - def get_messages(self): - pass - - @abstractproperty - def sid(self): - pass - - def wait(self, timeout=5400): - """ - Waits for this search to finish. - - @param timeout: The maximum time to wait in seconds. None or 0 - means no limit, None is default. - @type timeout: int - @return: self - @rtype: L{SDKJobWrapper} - @raise WaitTimedOut: If the search isn't done after - C{timeout} seconds. - """ - self.logger.debug("Waiting for job to finish.") - if timeout == 0: - timeout = None - - start_time = time.time() - while not self.is_done(): - try: - if self.is_failed(): - self.logger.warn( - "job %s failed. error message: %s" - % (self.sid, self.get_messages()) - ) - break - except AttributeError as e: - self.logger.debug(str(e)) - _check_if_wait_has_timed_out(start_time, timeout) - time.sleep(self._SECONDS_BETWEEN_JOB_IS_DONE_CHECKS) - - self.logger.debug("Job %s wait is done." % self.sid) - return self - - def check_message(self): - if self.get_messages(): - message = self.get_messages() - for key in message: - if key == "error": - raise SearchFailure(message[key]) - - -def _check_if_wait_has_timed_out(start_time, timeout): - if timeout is None: - return - if _wait_timed_out(start_time, timeout): - raise WaitTimedOut(timeout) - - -def _wait_timed_out(start_time, timeout): - return time.time() > start_time + timeout diff --git a/pytest_splunk_addon/helmut/manager/jobs/rest/__init__.py b/pytest_splunk_addon/helmut/manager/jobs/rest/__init__.py deleted file mode 100644 index 2fd146e55..000000000 --- a/pytest_splunk_addon/helmut/manager/jobs/rest/__init__.py +++ /dev/null @@ -1,235 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Annie Ju -@contact: U{xju@splunk.com} -@since: 2018-06-14 -""" - -import json -from builtins import object -from builtins import range - -from pytest_splunk_addon.helmut.manager.jobs import ( - Jobs, - JobNotFound, - PATH_PERFIX, - CONTROL, - EVENTS, - RESULTS, - RESULTS_PREVIEW, - SEARCHLOG, - SUMMARY, - TIMELINE, -) -from pytest_splunk_addon.helmut.manager.jobs.rest.job import RESTJobWrapper -from pytest_splunk_addon.helmut.util.string_unicode_convert import normalize_to_str - - -class RESTJobsWrapper(Jobs): - def create(self, query, **kwargs): - self.logger.info("Creating job with query: %s" % query) - job = self._create(query, **kwargs) - return RESTJobWrapper(self.connector, job) - - def _create(self, query, **kwargs): - query = normalize_to_str(query) - url = PATH_PERFIX - user_args = {"search": query} - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - args = user_args.copy() - args.update(kwargs) - response, content = self.connector.make_request( - "POST", url, args, {"output_mode": "json"} - ) - assert response["status"] == "201" - parsed_content = json.loads(content) - return RestJob(self.connector, parsed_content["sid"]) - - def __contains__(self, sid): - for job in self: - if job.sid == sid: - return True - return False - - def __getitem__(self, sid): - for job in self: - if job.sid == sid: - return job - raise JobNotFound(sid) - - # Required from Collection - - def items(self): - jobs = self._service.jobs - return [RESTJobWrapper(self.connector, job) for job in jobs] - - def _list_job(self): - job_list = [] - url = PATH_PERFIX - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - for i in range(len(parsed_content["entry"])): - job_list.append(parsed_content["entry"][i]["name"]) - return [RestJob(self.connector, index_name) for index_name in job_list] - - -class RestJob(object): - """ - wraps a Job object using Splunk REST connector - """ - - def __init__(self, connector, sid): - self.connector = connector - self._sid = sid - - @property - def sid(self): - return self._sid - - def refresh(self): - sid = self._sid - url = PATH_PERFIX + sid - req_args = {"output_mode": "json"} - response, content = self.connector.make_request("GET", url, urlparam=req_args) - assert response["status"] == "200" - parsed_content = json.loads(content) - return parsed_content["entry"][0] - - def update_search(self, **kwargs): - sid = self._sid - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - url = PATH_PERFIX + sid - req_args = {"output_mode": "json"} - response, content = self.connector.make_request( - "POST", url, body=kwargs, urlparam=req_args - ) - assert response["status"] == "200" - - def delete_search(self, **kwargs): - sid = self._sid - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - url = PATH_PERFIX + sid - req_args = {"output_mode": "json"} - response, content = self.connector.make_request( - "DELETE", url, body=kwargs, urlparam=req_args - ) - assert response["status"] == "200" - - def control_search(self, **kwargs): - sid = self._sid - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - url = PATH_PERFIX + sid + CONTROL - req_args = {"output_mode": "json"} - response, content = self.connector.make_request( - "POST", url, body=kwargs, urlparam=req_args - ) - assert response["status"] == "200" - return json.loads(content) - - def get_search_events(self, **kwargs): - sid = self._sid - url = PATH_PERFIX + sid + EVENTS - req_args = {"output_mode": "json", "segmentation": "none"} - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - args = req_args.copy() - args.update(kwargs) - response, content = self.connector.make_request("GET", url, urlparam=args) - assert response["status"] == "200" - return content - - def get_search_results(self, **kwargs): - sid = self._sid - url = PATH_PERFIX + sid + RESULTS - req_args = {"output_mode": "json", "segmentation": "none"} - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - args = req_args.copy() - args.update(kwargs) - # Refer to INFRA-17464, get search result should not be recorded in ..log, - # otherwise, the ..log might be too large to open. - response, content = self.connector.make_request( - "GET", url, urlparam=args, log_response=False - ) - assert response["status"] == "200" - return content - - def get_search_results_preview(self, **kwargs): - sid = self._sid - url = PATH_PERFIX + sid + RESULTS_PREVIEW - req_args = {"output_mode": "json", "segmentation": "none"} - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - args = req_args.copy() - args.update(kwargs) - # Refer to INFRA-17464, get search result preview should not be recorded in ..log, - # otherwise, the ..log might be too large to open. - response, content = self.connector.make_request( - "GET", url, urlparam=args, log_response=False - ) - assert response["status"] == "200" - return content - - def get_search_log(self, **kwargs): - sid = self._sid - url = PATH_PERFIX + sid + SEARCHLOG - req_args = {"output_mode": "json", "segmentation": "none"} - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - args = req_args.copy() - args.update(kwargs) - response, content = self.connector.make_request("GET", url, urlparam=args) - assert response["status"] == "200" - return content - - def get_search_summary(self, **kwargs): - sid = self._sid - url = PATH_PERFIX + sid + SUMMARY - req_args = {"output_mode": "json", "segmentation": "none"} - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - args = req_args.copy() - args.update(kwargs) - response, content = self.connector.make_request("GET", url, urlparam=args) - return content - - def get_search_timeline(self, **kwargs): - sid = self._sid - url = PATH_PERFIX + sid + TIMELINE - req_args = {"output_mode": "json", "segmentation": "none"} - kwargs = dict( - [normalize_to_str(k), normalize_to_str(v)] for k, v in kwargs.items() - ) - args = req_args.copy() - args.update(kwargs) - response, content = self.connector.make_request("GET", url, urlparam=args) - return content diff --git a/pytest_splunk_addon/helmut/manager/jobs/rest/job.py b/pytest_splunk_addon/helmut/manager/jobs/rest/job.py deleted file mode 100644 index ca100493f..000000000 --- a/pytest_splunk_addon/helmut/manager/jobs/rest/job.py +++ /dev/null @@ -1,322 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Annie Ju -@contact: U{xju@splunk.com} -@since: 2018-06-14 -""" -from future import standard_library - -standard_library.install_aliases() -from pytest_splunk_addon.helmut.manager.jobs.results import Results -from pytest_splunk_addon.helmut.manager.jobs.job import Job -import json - - -class RESTJobWrapper(Job): - def __init__(self, rest_connector, rest_job): - """ - The constructor of the RESTJobWrapper. - - @param rest_connector: The RESTConnector object used to connect to Splunk. - @type param: RESTConnector - @param rest_job: The Job object from the Python REST. - @type param: splunklib.client.Job - """ - super(RESTJobWrapper, self).__init__(rest_connector) - self._raw_rest_job = rest_job - - @property - def raw_rest_job(self): - return self._raw_rest_job - - @property - def sid(self): - return self.raw_rest_job.sid - - def __str__(self): - return "REST Job with SID {sid}".format(sid=self.sid) - - # Endpoint specific - def get_request(self): - result = self.raw_rest_job.refresh() - return result["content"]["request"] - - def get_event_count(self): - result = self.raw_rest_job.refresh() - return ( - int(result["content"]["eventCount"]) - if "eventCount" in result["content"] - else 0 - ) - - def get_scan_count(self): - result = self.raw_rest_job.refresh() - return ( - int(result["content"]["scanCount"]) - if "scanCount" in result["content"] - else 0 - ) - - def get_event_available_count(self): - result = self.raw_rest_job.refresh() - return ( - int(result["content"]["eventAvailableCount"]) - if "eventAvailableCount" in result["content"] - else 0 - ) - - def get_result_count(self): - result = self.raw_rest_job.refresh() - return ( - int(result["content"]["resultCount"]) - if "resultCount" in result["content"] - else 0 - ) - - def get_cursor_time(self): - result = self.raw_rest_job.refresh() - return result["content"]["cursorTime"] - - def is_preview_enabled(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isPreviewEnabled"]) - - def is_done(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isDone"]) - - def is_failed(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isFailed"]) - - def is_finalized(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isFinalized"]) - - def is_saved(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isSaved"]) - - def get_keywords(self): - result = self.raw_rest_job.refresh() - return result["content"]["keywords"] - - def is_paused(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isPaused"]) - - def result_is_streaming(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["resultIsStreaming"]) - - def get_messages(self): - result = self.raw_rest_job.refresh() - return result["content"]["messages"] - - def get_ttl(self): - result = self.raw_rest_job.refresh() - return result["content"]["ttl"] - - def set_ttl(self, value): - self.logger.info("Setting job %s TTL to: %s" % (self.sid, value)) - response = self.raw_rest_job.control_search(action="setttl", ttl=value) - assert ( - "The ttl of the search job was changed to {value}".format(value=value) - in response["messages"][0]["text"] - ) - return self - - def get_error(self): - result = self.raw_rest_job.refresh() - return result["content"]["error"] if "error" in result["content"] else None - - def get_earliest_time(self): - result = self.raw_rest_job.refresh() - return result["content"]["earliestTime"] - - def get_latest_time(self): - result = self.raw_rest_job.refresh() - return ( - result["content"]["latestTime"] - if "latestTime" in result["content"] - else None - ) - - def get_run_duration(self): - result = self.raw_rest_job.refresh() - return float(result["content"]["runDuration"]) - - def get_event_search(self): - result = self.raw_rest_job.refresh() - return result["content"]["eventSearch"] - - def event_is_streaming(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["eventIsStreaming"]) - - def get_event_sorting(self): - result = self.raw_rest_job.refresh() - val = result["content"]["eventSorting"] - return None if val == "none" else val - - def get_report_search(self): - result = self.raw_rest_job.refresh() - return result["content"]["reportSearch"] - - def get_remote_search(self): - result = self.raw_rest_job.refresh() - return result["content"]["remoteSearch"] - - def event_is_truncated(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["eventIsTruncated"]) - - def get_label(self): - result = self.raw_rest_job.refresh() - return result["content"]["label"] - - def get_dispatch_state(self): - result = self.raw_rest_job.refresh() - return result["content"]["dispatchState"] - - def is_saved_search(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isSavedSearch"]) - - def is_zombie(self): - result = self.raw_rest_job.refresh() - return bool(result["content"]["isZombie"]) - - def get_search_providers(self): - result = self.raw_rest_job.refresh() - return result["content"]["searchProviders"] - - def get_status_buckets(self): - result = self.raw_rest_job.refresh() - return int(result["content"]["statusBuckets"]) - - def get_done_progress(self): - result = self.raw_rest_job.refresh() - return int(result["content"]["doneProgress"]) - - # The methods below are forwarding calls to the job - def cancel(self): - self.logger.info("Cancelling job, SID: %s" % self.sid) - response = self.raw_rest_job.control_search(action="cancel") - assert "cancelled" in response["messages"][0]["text"] - return self - - def disable_preview(self): - self.logger.info("Disabling preview for job, SID: %s" % self.sid) - response = self.raw_rest_job.control_search(action="disablepreview") - assert response["messages"][0]["text"] == "Search job results preview disabled." - return self - - def get_events(self, **kwargs): - response = self.raw_rest_job.get_search_events(**kwargs) - return _build_results_from_rest_response(response) - - def get_events_dict(self, **kwargs): - response = self.raw_rest_job.get_search_events(**kwargs) - return _build_events_from_rest_response(response) - - def get_results_dict(self, **kwargs): - response = self.raw_rest_job.get_search_results(**kwargs) - return _build_results_dict_from_rest_response(response) - - def enable_preview(self): - self.logger.info("Enabling preview for job, SID: %s" % self.sid) - response = self.raw_rest_job.control_search(action="enablepreview") - assert response["messages"][0]["text"] == "Search job results preview enabled." - return self - - def finalize(self): - self.logger.info("Finalizing job, SID: %s" % self.sid) - response = self.raw_rest_job.control_search(action="finalize") - assert "finalized" in response["messages"][0]["text"] - return self - - def pause(self): - self.logger.info("Pausing job, SID: %s" % self.sid) - response = self.raw_rest_job.control_search(action="pause") - assert "paused" in response["messages"][0]["text"] - return self - - def unpause(self): - self.logger.info("Unpausing job, SID: %s" % self.sid) - response = self.raw_rest_job.control_search(action="unpause") - assert "unpaused" in response["messages"][0]["text"] - return self - - def get_preview(self, **kwargs): - response = self.raw_rest_job.get_search_results_preview(**kwargs) - return _build_results_from_rest_response(response) - - def get_results(self, **kwargs): - response = self.raw_rest_job.get_search_results(**kwargs) - return _build_results_from_rest_response(response) - - def get_search_log(self, **kwargs): - return self.raw_rest_job.get_search_log(**kwargs) - - def set_priority(self, value): - self.logger.info("Setting priority of job %s to: %s" % (self.sid, value)) - response = self.raw_rest_job.control_search( - action="setpriority", priority=value - ) - assert ( - "The search job's priority was changed" in response["messages"][0]["text"] - ) - return self - - def get_summary(self, **kwargs): - response = self.raw_rest_job.get_search_summary(**kwargs) - return _build_results_from_rest_response(response) if response != "" else "" - - def get_timeline(self, **kwargs): - response = self.raw_rest_job.get_search_timeline(**kwargs) - return response - - def touch(self): - self.logger.info("Touching job, SID: %s" % self.sid) - response = self.raw_rest_job.control_search(action="touch") - assert "touched" in response["messages"][0]["text"] - return self - - -def _build_results_from_rest_response(response): - """ - Get results from the REST and return them. - """ - events = json.loads(response)["results"] - return Results(events) - - -def _build_results_dict_from_rest_response(response): - """ - Get results from the REST and return them. - """ - events = json.loads(response)["results"] - return events - - -def _build_events_from_rest_response(response): - """ - Get results from the REST and return them. - """ - events = json.loads(response)["results"] - return events diff --git a/pytest_splunk_addon/helmut/manager/jobs/results.py b/pytest_splunk_addon/helmut/manager/jobs/results.py deleted file mode 100644 index 1da44732e..000000000 --- a/pytest_splunk_addon/helmut/manager/jobs/results.py +++ /dev/null @@ -1,269 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -import copy -from builtins import object - - -class Results(object): - """ - A class that represents a result set. - - These results can be represented in two ways; as a list or as a dictionary. - - The formats are: - - List:: - [ - { - 'field1': value1, - 'field2': value2, - ... - }, - - { - 'field2': value3, - 'field3': value4, - ... - } - ] - - Dictionary:: - { - 'field1': [value1, None, ...], - 'field2': [value2, value3, ...], - 'field3': [None, value4, ...], - ... - } - - As you can see each event in the list doesn't have to contain all fields. - - @ivar _list: The results as a list - @ivar _dict_cached: Since creating the dictionary is expensive it's cached. - You should use the _dict property though! - """ - - def __init__(self, results_): - """ - Constructor - - @param results_: The raw results as returned by ResultReader - @type results_: list - """ - super(Results, self).__init__() - - self._list = results_ - self._dict_cache = None - - def __repr__(self): - """ - Returns a string representation of this object - - @return: The representation - @rtype: str - """ - return "Results set with {count} result(s)".format(count=len(self)) - - def get_field(self, field): - """ - Returns the values for the specified field or None if it doesn't exist - - @param field: The field to get - @type field: str - @return: A list of values for that field - @rtype: list(str) - """ - return self.as_dict.get(field) - - def __getitem__(self, index): - """ - Returns the specified event as a dictionary. - - used when doing this: - >>> results[index] - - @param index: The index to get - @type index: int - @return: The fields for that event - @rtype: dict(str: str) - """ - return self.as_list[index] - - def get_event(self, index): - """ - Returns the event at the specified index as a dictionary - - This is an alias for C{__getitem__} - - @param index: The index to get - @type index: int - @return: The event at that index - @rtype: dict(str: str) - """ - return self[index] - - def __iter__(self): - """ - Returns an iterator for this result set. - - The iterator will iterate over each event. - - This is used when doing - >>> for event in results: ... - - @return: The iterator - @rtype: iterator - """ - return self.as_list.__iter__() - - def __contains__(self, field): - """ - Checks if the specified field is in this result set. - - This is equal to doing C{field in results.as_dict} - - @param field: The field to check - @type field: str - @return: True if it exists - @rtype: bool - """ - return field in self._dict - - def __len__(self): - """ - Returns the number of events in this result set - - @return: The event count - @rtype: int - """ - return len(self._list) - - @property - def as_dict(self): - """ - This result set as a dictionary. The format is specified in the - documentation for the class. - - This is a copy of the results so you can do whatever you want to do with - it. - - @rtype: dict - """ - return copy.deepcopy(self._dict) - - @property - def as_list(self): - """ - This result set as a list. The format is specified in the documentation - for the class - - This is a copy of the results so you can do whatever you want to do with - it. - - @rtype: list - """ - return copy.deepcopy(self._list) - - @property - def fields(self): - """ - The fields in this result set as a list. - - This is a copy of the fields so you can do whatever you want to do with - it. - - @rtype: list - """ - return list(self._dict.keys()) - - @property - def _dict(self): - """ - Returns the results as a dictionary. - - It caches the results so that the second call is very fast - - @rtype: dict - """ - if not self._dict_cache: - self._dict_cache = _list_to_dictionary(self._list) - return self._dict_cache - - -def _list_to_dictionary(events): - """ - Converts a list of events to a dictionary of fields. - - Input format:: - [ - { - 'field1': value1, - 'field2': value2, - ... - }, - - { - 'field2': value3, - 'field3': value4, - ... - }, - - ... - ] - - Output format:: - { - 'field1': [value1, None, ...], - 'field2': [value2, value3, ...], - 'field3': [None, value4, ...], - ... - } - - If not all fields are present in every event None will be inserted instead. - - @param events: The events as a list of dictionaries - @type events: list(dict(field(str): value(str))) - - @return: The events as a dictionary. - @rtype: dict(field(str): values(list(str))) - """ - fields = _get_fields(events) - r = {} - for event in events: - for field in fields: - if not field in r: - r[field] = [] - r[field].append(event.get(field, None)) - return r - - -def _get_fields(events): - """ - Returns a list of all fields in the given event set. - - @param events: The events as a list of dictionaries. - @type events: list(dict(str: str)) - @return: All the fields. - @rtype: set(str) - """ - fields = set() - for event in events: - fields.update(list(event.keys())) - return fields diff --git a/pytest_splunk_addon/helmut/manager/jobs/sdk/__init__.py b/pytest_splunk_addon/helmut/manager/jobs/sdk/__init__.py deleted file mode 100644 index 94d57018b..000000000 --- a/pytest_splunk_addon/helmut/manager/jobs/sdk/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -This module is a specialized version of the search_manager module for the SDK - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from pytest_splunk_addon.helmut.manager.jobs import Jobs, JobNotFound -from pytest_splunk_addon.helmut.manager.jobs.sdk.job import SDKJobWrapper - - -class SDKJobsWrapper(Jobs): - @property - def _service(self): - return self.connector.service - - def create(self, query, **kwargs): - self.logger.info("Creating job with query: %s" % query) - job = self._service.jobs.create(query, **kwargs) - return SDKJobWrapper(self.connector, job) - - def __contains__(self, sid): - for job in self: - if job.sid == sid: - return True - return False - - def __getitem__(self, sid): - for job in self: - if job.sid == sid: - return job - raise JobNotFound(sid) - - # Required from Collection - - def items(self): - jobs = self._service.jobs - return [SDKJobWrapper(self.connector, job) for job in jobs] diff --git a/pytest_splunk_addon/helmut/manager/jobs/sdk/job.py b/pytest_splunk_addon/helmut/manager/jobs/sdk/job.py deleted file mode 100644 index c44b7e9ff..000000000 --- a/pytest_splunk_addon/helmut/manager/jobs/sdk/job.py +++ /dev/null @@ -1,272 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -import splunklib.results as results - -from pytest_splunk_addon.helmut.manager.jobs.job import Job -from pytest_splunk_addon.helmut.manager.jobs.results import Results - - -class SDKJobWrapper(Job): - def __init__(self, sdk_connector, sdk_job): - """ - The constructor of the SDKJobWrapper. - - @param sdk_connector: The SDKConnector object used to connect to Splunk. - @type param: SDKConnector - @param sdk_job: The Job object from the Python SDK. - @type param: splunklib.client.Job - """ - self._raw_sdk_job = sdk_job - - super(SDKJobWrapper, self).__init__(sdk_connector) - - @property - def raw_sdk_job(self): - return self._raw_sdk_job - - @property - def sid(self): - return self.raw_sdk_job.sid - - def __str__(self): - return "SDK Job with SID {sid}".format(sid=self.sid) - - # Endpoint specific - - def get_request(self): - return self.raw_sdk_job.refresh().content.request - - def get_event_count(self): - return int(self.raw_sdk_job.refresh().content.eventCount) - - def get_scan_count(self): - return int(self.raw_sdk_job.refresh().content.scanCount) - - def get_event_available_count(self): - return int(self.raw_sdk_job.refresh().content.eventAvailableCount) - - def get_result_count(self): - return int(self.raw_sdk_job.refresh().content.resultCount) - - def get_cursor_time(self): - return self.raw_sdk_job.refresh().content.cursorTime - - def is_done(self): - return self.raw_sdk_job.is_done() - - def is_failed(self): - return self.raw_sdk_job.refresh().content.isFailed == "1" - - def is_finalized(self): - return self.raw_sdk_job.refresh().content.isFinalized == "1" - - def is_saved(self): - return self.raw_sdk_job.refresh().content.isSaved == "1" - - def get_keywords(self): - return self.raw_sdk_job.refresh().content.keywords - - def is_paused(self): - return self.raw_sdk_job.refresh().content.isPaused == "1" - - def result_is_streaming(self): - return self.raw_sdk_job.refresh().content.resultIsStreaming == "1" - - def get_messages(self): - return self.raw_sdk_job.refresh().content.messages - - def get_ttl(self): - return int(self.raw_sdk_job.refresh().content.ttl) - - def set_ttl(self, value): - self.logger.info("Setting job %s TTL to: %s" % (self.sid, value)) - self.raw_sdk_job.set_ttl(value) - return self - - def get_error(self): - return self.raw_sdk_job.refresh().content.error - - def get_earliest_time(self): - return self.raw_sdk_job.refresh().content.earliestTime - - def get_latest_time(self): - return self.raw_sdk_job.refresh().content.latestTime - - def get_run_duration(self): - return float(self.raw_sdk_job.refresh().content.runDuration) - - def get_event_search(self): - return self.raw_sdk_job.refresh().content.eventSearch - - def event_is_streaming(self): - return self.raw_sdk_job.refresh().content.eventIsStreaming == "1" - - def get_event_sorting(self): - val = self.raw_sdk_job.refresh().content.eventSorting - return None if val == "none" else val - - def get_report_search(self): - return self.raw_sdk_job.refresh().content.reportSearch - - def get_remote_search(self): - return self.raw_sdk_job.refresh().content.remoteSearch - - def event_is_truncated(self): - return self.raw_sdk_job.refresh().content.eventIsTruncated == "1" - - def get_label(self): - return self.raw_sdk_job.refresh().content.label - - def get_dispatch_state(self): - return self.raw_sdk_job.refresh().content.dispatchState - - def is_saved_search(self): - return self.raw_sdk_job.refresh().content.isSavedSearch == "1" - - def is_zombie(self): - return self.raw_sdk_job.refresh().content.isZombie == "1" - - def get_search_providers(self): - return self.raw_sdk_job.refresh().content.searchProviders - - def get_status_buckets(self): - return int(self.raw_sdk_job.refresh().content.statusBuckets) - - def get_done_progress(self): - return self.raw_sdk_job.refresh().content.doneProgress - - # The methods below are forwarding calls to the job - - def cancel(self): - self.logger.info("Cancelling job, SID: %s" % self.sid) - self.raw_sdk_job.cancel() - return self - - def disable_preview(self): - self.logger.info("Disabling preview for job, SID: %s" % self.sid) - self.raw_sdk_job.disable_preview() - return self - - def get_events(self, **kwargs): - response = self.raw_sdk_job.events(**kwargs) - return _build_results_from_sdk_response(response) - - def get_events_dict(self, **kwargs): - response = self.raw_sdk_job.events(**kwargs) - return _build_events_from_sdk_response(response) - - def get_results_dict(self, **kwargs): - response = self.raw_sdk_job.results(**kwargs) - return _build_results_dict_from_sdk_response(response) - - def enable_preview(self): - self.logger.info("Enabling preview for job, SID: %s" % self.sid) - self.raw_sdk_job.enable_preview() - return self - - def finalize(self): - self.logger.info("Finalizing job, SID: %s" % self.sid) - self.raw_sdk_job.finalize() - return self - - def pause(self): - self.logger.info("Pausing job, SID: %s" % self.sid) - self.raw_sdk_job.pause() - return self - - def unpause(self): - self.logger.info("Unpausing job, SID: %s" % self.sid) - self.raw_sdk_job.unpause() - return self - - def get_preview(self, **kwargs): - response = self.raw_sdk_job.preview(**kwargs) - return _build_results_from_sdk_response(response) - - def get_results(self, **kwargs): - response = self.raw_sdk_job.results(**kwargs) - return _build_results_from_sdk_response(response) - - def get_search_log(self, **kwargs): - return self.raw_sdk_job.searchlog(**kwargs) - - def set_priority(self, value): - self.logger.info("Setting priority of job %s to: %s" % (self.sid, value)) - self.raw_sdk_job.set_priority(value) - return self - - # unfinished see JIRA AUTO-62 - def get_summary(self, **kwargs): - self.logger.warn("get_summary() is not fully implemented.") - return self.raw_sdk_job.refresh().summary(**kwargs) - - # unfinished see JIRA AUTO-62 - def get_timeline(self, **kwargs): - self.logger.warn("get_timeline() is not fully implemented.") - return self.raw_sdk_job.refresh().timeline(**kwargs) - - def touch(self): - self.logger.info("Touching job, SID: %s" % self.sid) - self.raw_sdk_job.touch() - return self - - -def _build_results_from_sdk_response(response): - """ - Get results from the SDK and return them. - """ - reader = results.ResultsReader(response) - events = [] - for result in reader: - events.append(_build_event_from_results_reader(result)) - return Results(events) - - -def _build_results_dict_from_sdk_response(response): - """ - Get results from the SDK and return them. - """ - reader = results.ResultsReader(response) - resultset = [] - for result in reader: - resultset.append(result) - return resultset - - -def _build_events_from_sdk_response(response): - """ - Get results from the SDK and return them. - """ - reader = results.ResultsReader(response) - events = [] - for result in reader: - events.append(_build_event_from_results_reader(result)) - return events - - -def _build_event_from_results_reader(reader): - """ - Creates an event as a dict from an event in the SDK. - """ - event = {} - for field in list(reader.keys()): - event[field] = reader[field] - return event diff --git a/pytest_splunk_addon/helmut/manager/object.py b/pytest_splunk_addon/helmut/manager/object.py deleted file mode 100644 index 1209c3a0c..000000000 --- a/pytest_splunk_addon/helmut/manager/object.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import ABCMeta - -from future.utils import with_metaclass - -from pytest_splunk_addon.helmut.log import Logging - - -class ItemFromManager(with_metaclass(ABCMeta, Logging)): - def __init__(self, connector): - self._connector = connector - Logging.__init__(self) - - @property - def connector(self): - return self._connector diff --git a/pytest_splunk_addon/helmut/manager/roles/__init__.py b/pytest_splunk_addon/helmut/manager/roles/__init__.py deleted file mode 100644 index 28feb7265..000000000 --- a/pytest_splunk_addon/helmut/manager/roles/__init__.py +++ /dev/null @@ -1,119 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager import Manager -from pytest_splunk_addon.helmut.misc.collection import Collection -from pytest_splunk_addon.helmut.misc.manager_utils import ( - create_wrapper_from_connector_mapping, -) - - -class Roles(Manager, Collection): - """ - This class represents the Roles endpoint in REST which is a collection of - L{Role}es. - """ - - def __init__(self, connector): - """ - Roles' constructor. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - Manager.__init__(self, connector) - Collection.__init__(self) - - def __new__(cls, connector): - """ - The function called when creating a new Roles object. - An internal map stores mappings from connector type to corresponding - Roles subclass, making sure that the appropriate Roles class is - evoked. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - mappings = _CONNECTOR_TO_WRAPPER_MAPPINGS - return create_wrapper_from_connector_mapping(cls, connector, mappings) - - @abstractmethod - def create_role(self, role_name, parent_role_name): - """ - Create a role. - - @param role_name: The name of the new role. - @type role_name: String - @param parent_role_name: The name of the role's parent. - @type parent_role_name: String - """ - pass - - @abstractmethod - def delete_role(self, role_name): - """ - Delete a role. - - @param role_name: The name of the role to be deleted. - @type role_name: String - """ - pass - - @abstractmethod - def update_role(self, role_name, **kwargs): - """ - Update a role. - - @param role_name: The name of the role to be updated. - @type role_name: String - @param kwargs: The new arguments for role to be updated. - @type kwargs: kwargs - """ - pass - - @abstractmethod - def __getitem__(self, role_name): - """ - Retrieve an role. - - @param role_name: Role name. - @type role_name: L{Role} - """ - pass - - -class RoleNotFound(RuntimeError): - def __init__(self, role_name): - self.role_name = role_name - super(RoleNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - f = "Could not find role with name {name}" - return f.format(name=self.role_name) - - -# We need to do this at the bottom to avoid import errors -from pytest_splunk_addon.helmut.connector.sdk import SDKConnector -from pytest_splunk_addon.helmut.manager.roles.sdk import SDKRolesWrapper - -_CONNECTOR_TO_WRAPPER_MAPPINGS = {SDKConnector: SDKRolesWrapper} diff --git a/pytest_splunk_addon/helmut/manager/roles/role.py b/pytest_splunk_addon/helmut/manager/roles/role.py deleted file mode 100644 index 81065e999..000000000 --- a/pytest_splunk_addon/helmut/manager/roles/role.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager.object import ItemFromManager - - -class Role(ItemFromManager): - """ - The Role class represents an role in Splunk. - """ - - @abstractmethod - def edit(self, **kwargs): - """ - Edit this role. Check REST documentation to see what options are - available at - http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTrole - """ - pass diff --git a/pytest_splunk_addon/helmut/manager/roles/sdk/__init__.py b/pytest_splunk_addon/helmut/manager/roles/sdk/__init__.py deleted file mode 100644 index 89dc96042..000000000 --- a/pytest_splunk_addon/helmut/manager/roles/sdk/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from splunklib.client import HTTPError - -from pytest_splunk_addon.helmut.manager.roles import RoleNotFound -from pytest_splunk_addon.helmut.manager.roles import Roles -from pytest_splunk_addon.helmut.manager.roles.sdk.role import SDKRoleWrapper - - -class SDKRolesWrapper(Roles): - """ - The Roles subclass that wraps the Splunk Python SDK's Roles object. - It basically contains a collection of L{SDKRoleWrapper}s. - - As a part of 6.3, copy_role returns disabled parameter. This param is not supported when making a REST call to authorization/roles to update a role - Hence, deleting this param from copied_role - """ - - @property - def _service(self): - return self.connector.service - - def create_role(self, role_name, parent_role_name=None): - try: - self.logger.info("Creating role %s" % role_name) - self._service.roles.create(role_name) - if not parent_role_name: - return - copied_role = self[parent_role_name].raw_sdk_role.content - - for key, value in list(copied_role.items()): - if "imported_" in key or value is None: - copied_role.pop(key) - if "imported_capabilities" == key: - copied_role["capabilities"] += value - - if "disabled" in key: - del copied_role["content"]["disabled"] - - self._service.roles[role_name].update(**copied_role) - except HTTPError as err: - # Role already exists - if not err.status == 409: - raise - self.logger.warn( - "Role %s already existed. HTTPError: %s" % (role_name, err) - ) - - def delete_role(self, role_name): - self.logger.info("Deleting role %s" % role_name) - self._service.roles.delete(role_name) - - def update_role(self, role_name, **kwargs): - self.logger.info("Updating role %s with: %s" % (role_name, kwargs)) - self._service.roles[role_name].update(**kwargs).refresh() - - def __getitem__(self, role_name): - for role in self: - if role.name == role_name: - return role - raise RoleNotFound(role_name) - - def __contains__(self, role_name): - for role in self: - if role.name == role_name: - return True - return False - - def items(self): - roles = self._service.roles - return [SDKRoleWrapper(self.connector, role) for role in roles] diff --git a/pytest_splunk_addon/helmut/manager/roles/sdk/role.py b/pytest_splunk_addon/helmut/manager/roles/sdk/role.py deleted file mode 100644 index 33bdccf2c..000000000 --- a/pytest_splunk_addon/helmut/manager/roles/sdk/role.py +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from pytest_splunk_addon.helmut.manager.roles.role import Role - - -class SDKRoleWrapper(Role): - """ - The L{Role} subclass corresponding to an Role object in the - Splunk Python SDK. - """ - - def __init__(self, sdk_connector, sdk_role): - """ - SDKRoleWrapper's constructor. - - @param sdk_connector: The connector which talks to Splunk through the - Splunk Python SDK. - @type sdk_connector: SDKConnector - @param sdk_role: The name of the new role. - @type sdk_role: String - """ - super(SDKRoleWrapper, self).__init__(sdk_connector) - self._raw_sdk_role = sdk_role - - @property - def raw_sdk_role(self): - return self._raw_sdk_role - - @property - def _service(self): - """ - Return the service associated with - """ - return self.connector.service - - @property - def name(self): - """ - The name of the role. - """ - return self.raw_sdk_role.name - - def edit(self, **kwargs): - self.logger.info("Editing role %s with: %s" % (self.name, kwargs)) - self.raw_sdk_role.update(**kwargs).refresh() diff --git a/pytest_splunk_addon/helmut/manager/saved_searches/__init__.py b/pytest_splunk_addon/helmut/manager/saved_searches/__init__.py deleted file mode 100644 index e42b3f4a2..000000000 --- a/pytest_splunk_addon/helmut/manager/saved_searches/__init__.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager import Manager -from pytest_splunk_addon.helmut.misc.collection import Collection -from pytest_splunk_addon.helmut.misc.manager_utils import ( - create_wrapper_from_connector_mapping, -) - - -class SavedSearches(Manager, Collection): - def __init__(self, connector): - """ - Constructor for SavedSearches. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - Manager.__init__(self, connector) - Collection.__init__(self) - - def __new__(cls, connector): - """ - The function called when creating a new SavedSearches object. - An internal map stores mappings from connector type to corresponding - SavedSearches subclass, making sure that the appropriate SavedSearches class is - evoked. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - mappings = _CONNECTOR_TO_WRAPPER_MAPPINGS - return create_wrapper_from_connector_mapping(cls, connector, mappings) - - @abstractmethod - def create(self, saved_search_name, query, **kwargs): - """ - Create a saved search. - - @param saved_search_name: The name of the new saved search. - @type saved_search_name: String - @param query: The actual search to be saved. - @type query: String - @param **kwargs: Any other settings for the saved search. - @type **kwargs: Dictionary - """ - pass - - @abstractmethod - def __getitem__(self, saved_search_name): - """ - Retrieve a saved search. - - @param saved_searc_name: The name of the saved search. - @type saved_search_name: SavedSearch - """ - pass - - -class SavedSearchNotFound(RuntimeError): - def __init__(self, saved_search_name): - self.saved_search_name = saved_search_name - super(SavedSearchNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - f = "Could not find saved search with name {name}" - return f.format(name=self.saved_search_name) - - -# We need to do this at the bottom to avoid import errors -from pytest_splunk_addon.helmut.connector.sdk import SDKConnector -from pytest_splunk_addon.helmut.manager.saved_searches.sdk import ( - SDKSavedSearchesWrapper, -) - -_CONNECTOR_TO_WRAPPER_MAPPINGS = {SDKConnector: SDKSavedSearchesWrapper} diff --git a/pytest_splunk_addon/helmut/manager/saved_searches/saved_search.py b/pytest_splunk_addon/helmut/manager/saved_searches/saved_search.py deleted file mode 100644 index ac4ef4b06..000000000 --- a/pytest_splunk_addon/helmut/manager/saved_searches/saved_search.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager.object import ItemFromManager - - -class SavedSearch(ItemFromManager): - """ - The SavedSearch class represents an saved search in Splunk. - """ - - @abstractmethod - def run(self, **kwargs): - """ - Run this saved search. - @param **kwargs: Any other settings for running this saved search. - @type **kwargs: Dictionary - """ - pass - - @abstractmethod - def edit(self, query=None, **kwargs): - """ - Edit this saved search. - @param query: The query that this saved search is supposed to run. Remains unchanged if no value is given. - @type query: String - @param **kwargs: Any other settings for the saved search. - @type **kwargs: Dictionary - """ - pass - - @abstractmethod - def disable(self): - """ - Disable this saved search. - """ - pass - - @abstractmethod - def enable(self): - """ - Enable this saved search. - """ - pass - - @abstractmethod - def get_artifacts(self): - """ - Return the artifacts associated with this saved search. - - @return: A list of the jobs associated with the saved searches. - @rtype: list - """ - pass - - def delete(self): - """ - Delete this saved search. - """ - pass diff --git a/pytest_splunk_addon/helmut/manager/saved_searches/sdk/__init__.py b/pytest_splunk_addon/helmut/manager/saved_searches/sdk/__init__.py deleted file mode 100644 index 4fb0a709c..000000000 --- a/pytest_splunk_addon/helmut/manager/saved_searches/sdk/__init__.py +++ /dev/null @@ -1,101 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from future.utils import raise_ -from splunklib.client import HTTPError - -from pytest_splunk_addon.helmut.manager.saved_searches import SavedSearchNotFound -from pytest_splunk_addon.helmut.manager.saved_searches import SavedSearches -from pytest_splunk_addon.helmut.manager.saved_searches.sdk.saved_search import ( - SDKSavedSearchWrapper, -) - - -class SDKSavedSearchesWrapper(SavedSearches): - """ - The SavedSearches subclass that wraps the Splunk Python SDK's SavedSearches object. - It basically contains a collection of L{SDKSavedSearchWrapper}s. - """ - - @property - def _service(self): - """ - The service associated with this connector. - """ - return self.connector.service - - def create(self, saved_search_name, query, **kwargs): - """ - Create a saved search. - - @param saved_search_name: The name of the new saved search. - @type saved_search_name: String - @param query: The actual search to be saved. - @type query: String - @param **kwargs: Any other settings for the saved search. - @type **kwargs: Dictionary - """ - try: - self.logger.info("Creating saved search '%s'" % saved_search_name) - self.connector.service.saved_searches.create( - saved_search_name, query, **kwargs - ) - except HTTPError as err: - # Saved search already exists - if not err.status == 409: - raise - self.logger.warn( - "Saved search '%s' already exists. HTTPError: %s" - % (saved_search_name, err) - ) - - def __getitem__(self, saved_search_name): - """ - Retrieve a saved search. - - @param saved_searc_name: The name of the saved search. - @type saved_search_name: SavedSearch - """ - for saved_search in self: - if saved_search.name == saved_search_name: - return saved_search - raise_(SavedSearchNotFound, saved_search_name) - - def __contains__(self, saved_search_name): - """ - Determines if a saved search with a particular name exists. - - @param key: The name of the saved search to be found. - @param type: String - @return: Whether or not the saved search exists. - @rtype: boolean - """ - for saved_search in self: - if saved_search.name == saved_search_name: - return True - return False - - def items(self): - """ - Returns a list of saved searches. - - @return: A list of saved searches. - @rtype: list - """ - saved_searches = self._service.saved_searches - return [ - SDKSavedSearchWrapper(self.connector, saved_search) - for saved_search in saved_searches - ] diff --git a/pytest_splunk_addon/helmut/manager/saved_searches/sdk/saved_search.py b/pytest_splunk_addon/helmut/manager/saved_searches/sdk/saved_search.py deleted file mode 100644 index d95f1c336..000000000 --- a/pytest_splunk_addon/helmut/manager/saved_searches/sdk/saved_search.py +++ /dev/null @@ -1,105 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from pytest_splunk_addon.helmut.manager.jobs.sdk import SDKJobWrapper -from pytest_splunk_addon.helmut.manager.saved_searches.saved_search import SavedSearch - - -class SDKSavedSearchWrapper(SavedSearch): - """ - The L{SavedSearch} subclass corresponding to an SavedSearch object in the - Splunk Python SDK. - """ - - def __init__(self, sdk_connector, sdk_saved_search): - """ - SDKSavedSearchWrapper's constructor. - - @param sdk_connector: The connector which talks to Splunk through the - Splunk Python SDK. - @type sdk_connector: SDKConnector - @param sdk_saved_search: The name of the new saved search. - @type sdk_saved_search: String - """ - super(SDKSavedSearchWrapper, self).__init__(sdk_connector) - self._raw_sdk_saved_search = sdk_saved_search - - def run(self, **kwargs): - """ - Run this saved search. - @param **kwargs: Any other settings for running this saved search. - @type **kwargs: Dictionary - """ - self.logger.info("Running saved search %s" % self.name) - return SDKJobWrapper( - self.connector, self._raw_sdk_saved_search.dispatch(**kwargs) - ) - - def edit(self, query=None, **kwargs): - """ - Edit this saved search. - @param query: The query that this saved search is supposed to run. Remains unchanged if no value is given. - @type query: String - @param **kwargs: Any other settings for the saved search. - @type **kwargs: Dictionary - """ - self.logger.info("Editing saved search %s" % self.name) - self._raw_sdk_saved_search.update(query, **kwargs) - - def disable(self): - """ - Disable this saved search. - """ - self.logger.info("Disabling saved search %s" % self.name) - self._raw_sdk_saved_search.disable() - - def delete(self): - """ - Delete this saved search. - """ - self.logger.info("Deleting saved search %s" % self.name) - self._raw_sdk_saved_search.delete() - - def enable(self): - """ - Enable this saved search. - """ - self.logger.info("Enabling saved search %s" % self.name) - self._raw_sdk_saved_search.enable() - - def get_artifacts(self): - """ - Return the artifacts associated with this saved search. - """ - results = self._raw_sdk_saved_search.history() - jobs = [] - for result in results: - job = SDKJobWrapper(self.connector, result) - jobs.append(job) - return jobs - - @property - def name(self): - """ - The name of the saved_search. - """ - return self._raw_sdk_saved_search.name - - @property - def content(self): - """ - The contents of the saved_search. - """ - return self._raw_sdk_saved_search.content diff --git a/pytest_splunk_addon/helmut/manager/users/__init__.py b/pytest_splunk_addon/helmut/manager/users/__init__.py deleted file mode 100644 index b12f9e2a7..000000000 --- a/pytest_splunk_addon/helmut/manager/users/__init__.py +++ /dev/null @@ -1,111 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager import Manager -from pytest_splunk_addon.helmut.misc.collection import Collection -from pytest_splunk_addon.helmut.misc.manager_utils import ( - create_wrapper_from_connector_mapping, -) - - -class Users(Manager, Collection): - """ - This class represents the Users endpoint in REST which is a collection of - L{User}es. - """ - - def __init__(self, connector): - """ - Users' constructor. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - Manager.__init__(self, connector) - Collection.__init__(self) - - def __new__(cls, connector): - """ - The function called when creating a new Users object. - An internal map stores mappings from connector type to corresponding - Users subclass, making sure that the appropriate Users class is - evoked. - - @param connector: The connector through which Splunk is reached. - @type connector: Connector - """ - mappings = _CONNECTOR_TO_WRAPPER_MAPPINGS - return create_wrapper_from_connector_mapping(cls, connector, mappings) - - @abstractmethod - def create_user(self, username, password, roles, **kwargs): - """ - Create an user. - - @param username: The name of the new user. - @type username: String - @param password: The password of the new user. - @type password: String - @param roles: The role(s) of the new user. - @type roles: String or list - @param kwargs: The arguments the new user. - @type kwargs: kwargs - """ - pass - - @abstractmethod - def delete_user(self, username): - """ - Delete an user. - - @param username: The name of the user to be deleted. - @type username: String - """ - pass - - @abstractmethod - def __getitem__(self, username): - """ - Retrieve an user. - - @param username: User's name. - @type username: L{User} - """ - pass - - -class UserNotFound(RuntimeError): - def __init__(self, user_name): - self.user_name = user_name - super(UserNotFound, self).__init__(self._error_message) - - @property - def _error_message(self): - f = "Could not find user with name {name}" - return f.format(name=self.user_name) - - -# We need to do this at the bottom to avoid import errors -from pytest_splunk_addon.helmut.connector.sdk import SDKConnector -from pytest_splunk_addon.helmut.manager.users.sdk import SDKUsersWrapper - -_CONNECTOR_TO_WRAPPER_MAPPINGS = {SDKConnector: SDKUsersWrapper} diff --git a/pytest_splunk_addon/helmut/manager/users/sdk/__init__.py b/pytest_splunk_addon/helmut/manager/users/sdk/__init__.py deleted file mode 100644 index a3d201290..000000000 --- a/pytest_splunk_addon/helmut/manager/users/sdk/__init__.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from splunklib.client import HTTPError - -from pytest_splunk_addon.helmut.manager.users import UserNotFound -from pytest_splunk_addon.helmut.manager.users import Users -from pytest_splunk_addon.helmut.manager.users.sdk.user import SDKUserWrapper - - -class SDKUsersWrapper(Users): - """ - The Users subclass that wraps the Splunk Python SDK's Users object. - It basically contains a collection of L{SDKUserWrapper}s. - """ - - @property - def _service(self): - return self.connector.service - - def create_user(self, username, password, roles, **kwargs): - self.logger.info( - "Creating user. Username: %s. Password: %s. Role: %s" - % (username, password, roles) - ) - try: - kwargs["password"] = password - kwargs["roles"] = roles - - return SDKUserWrapper( - username, self.connector.service.users.create(username, **kwargs) - ) - except HTTPError as err: - # User already exists - if not err.status == 400: - raise - self.logger.warn("User already exists. HTTPError: %s" % err) - - def delete_user(self, user_or_username): - """ - Delete an user. - - @param user_name: The name of the user to be deleted or a SDKUserWrapper - object. - @type user_name: String or SDKUserWrapper - """ - # If attribute name exists it is (probably) a SDKUserWrapper object - try: - username = user_or_username.name - # If not it is (hopefully) the name of the User - except AttributeError: - self.logger.debug( - "Value given to delete_user() had no attribute" - " 'name'. Assuming it is of type 'str'." - ) - username = user_or_username - self.logger.info("Deleting user %s." % username) - self.connector.service.users.delete(username) - - def __getitem__(self, username): - for user in self: - if user.name == username: - return user - raise UserNotFound(username) - - def __contains__(self, username): - for user in self: - if user.name == username: - return True - return False - - def items(self): - users = self._service.users - return [SDKUserWrapper(self.connector, user) for user in users] diff --git a/pytest_splunk_addon/helmut/manager/users/sdk/user.py b/pytest_splunk_addon/helmut/manager/users/sdk/user.py deleted file mode 100644 index be176fda6..000000000 --- a/pytest_splunk_addon/helmut/manager/users/sdk/user.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from pytest_splunk_addon.helmut.manager.users.user import User - - -class SDKUserWrapper(User): - """ - The L{User} subclass corresponding to an User object in the - Splunk Python SDK. - """ - - def __init__(self, sdk_connector, sdk_user, username=None): - """ - SDKUserWrapper's constructor. - - @param username: The name of the User which this object represents. - @type username: String - @param sdk_connector: The connector which talks to Splunk through the - Splunk Python SDK. - @type sdk_connector: SDKConnector - @param sdk_user: The splunklib.Entity which represent an User in the - Python SDK. - """ - if username is None: - username = sdk_user.name - super(SDKUserWrapper, self).__init__(username, sdk_connector) - self._raw_sdk_user = sdk_user - - @property - def raw_sdk_user(self): - return self._raw_sdk_user - - @property - def _service(self): - """ - Return the service associated with - """ - return self.connector.service - - def full_name(self): - return self.raw_sdk_user.content.realname diff --git a/pytest_splunk_addon/helmut/manager/users/user.py b/pytest_splunk_addon/helmut/manager/users/user.py deleted file mode 100644 index 431199eee..000000000 --- a/pytest_splunk_addon/helmut/manager/users/user.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from abc import abstractmethod - -from pytest_splunk_addon.helmut.manager.object import ItemFromManager - - -class User(ItemFromManager): - """ - A User is the means by which you login to Splunk via. - This class represents a User object and the different - functions you have to manipulate that User object. - """ - - def __init__(self, username, connector): - """ - The constructor of the User class. - - @param username: The username of the Splunk User - @type username: String - @param connector: The connector which talks to Splunk. - @type connector: _Connector - """ - self._name = username - super(User, self).__init__(connector) - - @abstractmethod - def full_name(self): - pass - - @property - def name(self): - """ - The name of the user. - """ - return self._name diff --git a/pytest_splunk_addon/helmut/misc/__init__.py b/pytest_splunk_addon/helmut/misc/__init__.py deleted file mode 100644 index 60fcfce3f..000000000 --- a/pytest_splunk_addon/helmut/misc/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" diff --git a/pytest_splunk_addon/helmut/misc/collection.py b/pytest_splunk_addon/helmut/misc/collection.py deleted file mode 100644 index fa2901adb..000000000 --- a/pytest_splunk_addon/helmut/misc/collection.py +++ /dev/null @@ -1,62 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -from abc import ABCMeta, abstractmethod -from builtins import object - -from future.utils import with_metaclass - - -class Collection(with_metaclass(ABCMeta, object)): - """ - A Collection metaclass that specifies what functions a collection in the - Helmut framework must implement. - """ - - def __call__(self): - return list(self.items()) - - def __len__(self): - return len(list(self.items())) - - def __iter__(self): - for item in list(self.items()): - yield item - - @abstractmethod - def items(self): - """ - Return a collection of all the contained objects. It is up to the - subclass to decide whether this collection is a list, map or of any - other kind. - - @return: A collection of all the items contained. - """ - pass - - @abstractmethod - def __contains__(self, item): - """ - Return boolean whether item is contained in Collection. - - @param item: The item which is checked if contained. - """ - pass diff --git a/pytest_splunk_addon/helmut/misc/manager_utils.py b/pytest_splunk_addon/helmut/misc/manager_utils.py deleted file mode 100644 index 2c55e2df1..000000000 --- a/pytest_splunk_addon/helmut/misc/manager_utils.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" -from pytest_splunk_addon.helmut.exceptions import UnsupportedConnectorError - - -def create_wrapper_from_connector_mapping(base_class, connector, mappings): - wrapper = get_wrapper_class_from_connector_mapping(connector, mappings) - return super(base_class, base_class).__new__(wrapper) - - -def get_wrapper_class_from_connector_mapping(connector, mappings): - cls = connector.__class__ - if cls not in mappings: - raise UnsupportedConnectorError - return mappings[cls] diff --git a/pytest_splunk_addon/helmut/splunk/__init__.py b/pytest_splunk_addon/helmut/splunk/__init__.py deleted file mode 100644 index 956e41e67..000000000 --- a/pytest_splunk_addon/helmut/splunk/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -This module contains everything regarding a Splunk installation. - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -__all__ = ["local", "ssh"] diff --git a/pytest_splunk_addon/helmut/splunk/base.py b/pytest_splunk_addon/helmut/splunk/base.py deleted file mode 100644 index 62937dc9a..000000000 --- a/pytest_splunk_addon/helmut/splunk/base.py +++ /dev/null @@ -1,672 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -This module has things regarding a generic Splunk instance. - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-12-05 -""" -import time -from abc import ABCMeta, abstractmethod, abstractproperty - -from future.utils import with_metaclass - -from pytest_splunk_addon.helmut.connector.base import Connector -from pytest_splunk_addon.helmut.connector.rest import RESTConnector -from pytest_splunk_addon.helmut.connector.sdk import SDKConnector -from pytest_splunk_addon.helmut.exceptions import UnsupportedConnectorError -from pytest_splunk_addon.helmut.exceptions.command_execution import ( - CommandExecutionFailure, -) -from pytest_splunk_addon.helmut.log import Logging -from pytest_splunk_addon.helmut.util.rip import RESTInPeace - - -class Splunk(with_metaclass(ABCMeta, Logging)): - """ - Represents a Splunk instance. - - The Splunk instance may be on the same machine or a remote one. - - This class is abstract and cannot be instantiated directly. - - @ivar connector_factory: The factory to use when creating the connector. - @type connector_factory: function - @ivar _default_connector: The default connector. Is None at first and is - later created when L{default_connector} is used. - @type _default_connector: L{Connector} - @ivar _start_listeners: A collection of start listeners - @type _start_listeners: set - @ivar name: The name of this instance. Defaults to the ID of this object. - @type name: str - @ivar _logger: The logger this instance uses. - """ - - _username = "admin" - _password = "changeme" - - _CONNECTOR_TYPE_TO_CLASS_MAPPINGS = { - Connector.SDK: SDKConnector, - Connector.REST: RESTConnector, - } - _is_an_universal_forwarder = False - - def __init__(self, name): - """ - Creates a new Splunk instance. - """ - self._default_connector = None - self._start_listeners = set() - self._connectors = {} - - self._name = name or id(self) - super(Splunk, self).__init__() - self.logger.debug("Helmut Splunk created:{splunk}".format(splunk=self)) - - def __str__(self): - """ - Casts this instance to a string. - - @return: The string representation of this object. - @rtype: str - """ - return self._str_format.format(**self._str_format_arguments) - - @property - def _logger_name(self): - """ - :return: constructed logger name as {cls}({name}) - """ - return "{cls}({name})".format(cls=self.__class__.__name__, name=self.name) - - @abstractproperty - def _str_format(self): - """ - The format to use when casting this instance to a string. - - @rtype: str - """ - - @abstractproperty - def _str_format_arguments(self): - """ - The arguments for the L{_str_format} to use when casting this instance - to a string. - - @rtype: dict - """ - - @property - def name(self): - """ - The name of this instance. - - @rtype: str - """ - return self._name - - @property - def username(self): - return self._username - - @username.setter - def username(self, value): - self._username = value - - @property - def password(self): - return self._password - - @password.setter - def password(self, value): - self._password = value - - def register_start_listener(self, listener): - """ - Adds a listener that will be notified when splunk (re)starts. - - This can be used to re-read values from conf files or recreate things - that become invalid when Splunk restarts such as auth tokens. - - The listener must be a function (respond to C{__call__} to be more - precise) - - @param listener: The start listener - @raise InvalidStartListener: If the listener is not callable. - """ - _validate_start_listener(listener) - self._start_listeners.add(listener) - - def unregister_start_listener(self, listener): - """ - Removes the specified start listener. - - If the listener is not in the list this call has no effect. - - @param listener: The listener to remove - """ - try: - self._start_listeners.remove(listener) - except KeyError: - pass - - def create_connector( - self, contype=None, username=None, password=None, *args, **kwargs - ): - """ - Creates and returns a new connector of the type specified or - SDK connector if none specified - - This connector will not be logged in, for that see - L{create_logged_in_connector} - - Any argument specified to this method will be passed to the connector's - initialization method - - @param contype: Type of connector to create, defined in Connector - class, defaults to Connector.SDK - - @param args: Deprecated. - @param kwargs: owner, app, sharing(for SDK connector) - - @return: The newly created connector - """ - contype = contype or Connector.SDK - kwargs["username"] = username or self.username - kwargs["password"] = password or self.password - - if contype not in self._CONNECTOR_TYPE_TO_CLASS_MAPPINGS: - raise UnsupportedConnectorError - - if args: - self.logger.debug( - "Args in create_connector is deprecated, Please use kwargs." - ) - conn = self._CONNECTOR_TYPE_TO_CLASS_MAPPINGS[contype](self, *args, **kwargs) - - connector_id = self._get_connector_id(contype=contype, user=conn.username) - - if connector_id in list(self._connectors.keys()): - self.logger.warning( - "Connector {id} is being replaced".format(id=connector_id) - ) - del self._connectors[connector_id] - self._connectors[connector_id] = conn - - return self._connectors[connector_id] - - def create_logged_in_connector( - self, - set_as_default=False, - contype=None, - username=None, - password=None, - *args, - **kwargs - ): - """ - Creates and returns a new connector of type specified or of type - L{SDKConnector} if none specified. - - This method is identical to the L{create_connector} except that this - method also logs the connector in. - - Any argument specified to this method will be passed to the connector's - initialization method - - @param set_as_default: Determines whether the created connector is set - as the default connector too. True as default. - @type bool - @param contype: type of connector to create, available types defined in - L{Connector} class. Connector.SDK as default - - @return: The newly created, logged in, connector - """ - contype = contype or Connector.SDK - conn = self.create_connector( - contype, username=username, password=password, *args, **kwargs - ) - if set_as_default: - self._default_connector = conn - conn.login() - return conn - - def set_default_connector(self, contype, username): - """ - Sets the default connector to an already existing connector - - @param contype: type of connector, defined in L{Connector} class - @param username: splunk username used by connector - @type username: string - """ - self._default_connector = self.connector(contype, username) - - def remove_connector(self, contype, username): - """ - removes a connector, sets default connector to None if removing the - default connector - - @param contype: type of connector, defined in L{Connector} class - @param username: splunk username used by connector - @type username: string - """ - if self.default_connector == self.connector(contype, username): - self._default_connector = None - - connector_id = self._get_connector_id(contype, username) - del self._connectors[connector_id] - - def _get_connector_id(self, contype, user): - """ - Returns the connector id - - @param contype: type of connector, defined in L{Connector} class - @param username: splunk username used by connector - @type username: string - """ - connector_id = "{contype}:{user}".format(contype=contype, user=user) - return connector_id - - def set_credentials_to_use(self, username="admin", password="changeme"): - """ - This method just initializes/updates self._username to username - specified & self._password to password specified - - @param username: splunk username that gets assigned to _username - property of splunk class - - @param password: splunk password for the above username. - Note: This method won't create/update the actual credentials on - the splunk running instance. - - It is asssumed that credentials specified here are already - valid credentials. - """ - self._username = username - self._password = password - - @property - def default_connector(self): - """ - Returns the default connector for this Splunk instance. - - This method caches the value so it isn't created on every call. - """ - if self._default_connector is None: - self._default_connector = self.create_logged_in_connector( - set_as_default=True, username=self.username, password=self.password - ) - self._attempt_login(self._default_connector) - return self._default_connector - - @classmethod - def _attempt_login(cls, connector): - if ( - hasattr(connector, "is_logged_in") - and connector._attempt_login_time > 0 - and time.time() - connector._attempt_login_time > 30 * 60 - and not connector.is_logged_in() - ): - connector.login() - - def connector(self, contype=None, username=None, password=None): - """ - Returns the connector specified by type and username, defaults to - the default connector if none specified - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - """ - if contype is None and username is None: - return self.default_connector - - if contype == "REST": - rest_conn = self.create_logged_in_connector( - contype=Connector.REST, username=username, password=password - ) - self._attempt_login(rest_conn) - return rest_conn - - connector_id = self._get_connector_id(contype, username) - if connector_id not in list(self._connectors.keys()): - raise InvalidConnector( - "Connector {id} does not exist".format(id=connector_id) - ) - connector = self._connectors[connector_id] - self._attempt_login(connector) - return connector - - def jobs(self, contype=None, username=None): - """ - Returns a Jobs manager that uses the specified connector. Defaults to - default connector if none specified. - - This property creates a new Jobs manager each call so you may do as - you please with it. - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - @rtype: L{Jobs} - """ - from pytest_splunk_addon.helmut.manager.jobs import Jobs - - return Jobs(self.connector(contype, username)) - - def confs(self, contype=None, username=None, password=None): - """ - Returns a Confs manager that uses the specified connector. Defaults to - default connector if none specified. - - This property creates a new Confs manager each call so you may do as - you please with it. - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - @rtype: L{Confs} - """ - from pytest_splunk_addon.helmut.manager.confs import Confs - - return Confs(self.connector(contype, username, password)) - - def inputs(self, contype=None, username=None): - """ - Returns a Inputs manager that uses the specified connector. Defaults to - default connector if none specified. - - This property creates a new Inputs manager each call so you may do as - you please with it. - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - @rtype: L{Inputs} - """ - from pytest_splunk_addon.helmut.manager.inputs import Inputs - - return Inputs(self.connector(contype, username)) - - def indexes(self, contype=None, username=None, password=None): - """ - Returns a Indexes manager that uses the specified connector. Defaults - to default connector if none specified. - - This property creates a new Indexes manager each time it is called so - you may handle the object as you wish. - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - @rtype: L{Indexes} - """ - from pytest_splunk_addon.helmut.manager.indexes import Indexes - - return Indexes(self.connector(contype, username, password)) - - def roles(self, contype=None, username=None): - """ - Returns a Roles manager that uses the specified connector. Defaults to - default connector if none specified. - - This property creates a new Roles manager each call so you may do as - you please with it. - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - @rtype: L{Roles} - """ - from pytest_splunk_addon.helmut.manager.roles import Roles - - return Roles(self.connector(contype, username)) - - def saved_searches(self, contype=None, username=None): - """ - Returns a SavedSearches manager that uses the specified connector. - Defaults to default connector if none specified. - - This property creates a new SavedSearches manager each - call so you may do as you please with it. - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - @rtype: L{SavedSearch} - """ - from pytest_splunk_addon.helmut.manager.saved_searches import SavedSearches - - return SavedSearches(self.connector(contype, username)) - - def users(self, contype=None, username=None): - """ - Returns a Users manager that uses the specified connector. Defaults to - default connector if none specified. - - This property creates a new Users manager each call so you may do as - you please with it. - - @param contype: type of connector, defined in L{Connector} class - @param username: connector's username - @type username: string - @rtype: L{Users} - """ - from pytest_splunk_addon.helmut.manager.users import Users - - return Users(self.connector(contype, username)) - - def _notify_listeners_of_splunk_start(self): - """ - Notifies all the listeners that Splunk has started. - - Should be called by subclasses when Splunk has (re)started. - """ - for l in self._start_listeners: - l() - - def get_rip(self, owner=None, app=None, username=None, password=None): - """ - Create a RESTInPeace under certain user and app namespace - - :param owner: owner namespace, default to admin - :type owner: str - :param app: app namespace, default to search - :type app: str - :param username: default to self.username - :type username: str - :param password: default to self.password - :type password: str - :return: RESTInPeace - :rtype: RESTInPeace - """ - username = username or self.username - password = password or self.password - - self.create_logged_in_connector( - contype=Connector.REST, - username=username, - password=password, - owner=owner, - app=app, - ) - return RESTInPeace(self.connector(Connector.REST, username)) - - # Abstract methods - - @abstractmethod - def restart(self): - """ - Restarts the Splunk instance. - - Subclasses should call the L{_notify_listeners_of_splunk_start} method - when Splunk has restarted. - - @raise CouldNotRestartSplunk: If Splunk could not be restarted - @rtype: None - """ - - pass - - @abstractmethod - def is_running(self): - """ - Checks if Splunk is up and running. - - When this returns False a lot of commands will probably fail. - - @rtype: bool - @return: True if Splunk is started, False otherwise. - """ - pass - - @abstractmethod - def splunkd_scheme(self): - """ - Returns the scheme for the splunkd instance. - - Should be either C{http} or C{https} - - @return: The scheme - @rtype: str - """ - pass - - @abstractmethod - def get_host_os(self): - """ - Returns the os of the host. - """ - pass - - @abstractmethod - def splunkd_host(self): - """ - Returns the host for the splunkd instance. - - Should be either a hostname or an IP address - - @return: The host - @rtype: str - """ - pass - - @abstractmethod - def splunkd_port(self): - """ - Returns the port for the splunkd instanct. - - @return: The port - @rtype: int - """ - pass - - def enable_listen(self, ports): - """ - Enable this Splunk instance to receive data through certain TCP ports. - - Port values which are not ints or within the range 0-65535 will be - ignored. - - @param ports: The ports through which the data is received. - @type ports: A list of ints detailing which ports to be enabled. - """ - pass - - def disable_listen(self, ports): - """ - Disable ports through which this Splunk instance is receiving data. - - Port values which are not ints or within the range 0-65535 will be - ignored. - - @param ports: The port through which the data was received. - @type ports: A list of ints detailing which ports to be disabled. - """ - pass - - def check_for_fields_in_source(self, source, fieldsList): - """ - checks if the fields present in the fieldsList are being extracted - & stored by the splunk. It returns the list of missing fields. - """ - pass - - def is_monitoring_source(self, source): - """ - Checks if splunk is already monitoring a given source. If it is - already, it returns True, otherwise, it returns False - """ - pass - - def _dump_splunkd_process(self): - """ - Dump splunkd process/opening port into log for troubleshooting - """ - pass - - -def _validate_start_listener(listener): - """ - Validates the start listener making sure it can be called. - - @param listener: The start listener - @raise InvalidStartListener: If the listener is invalid - """ - if not _variable_is_a_function(listener): - raise InvalidStartListener - - -def _variable_is_a_function(variable): - """ - Checks if a specified variable is a function by checking if it implements - the __call__ method. - - This means that the object doesn't have to be a function to pass this - function, just implement __call__ - - @return: True if the variable is a function - @rtype: bool - """ - return hasattr(variable, "__call__") - - -class InvalidStartListener(AttributeError): - """ - Exception for when the start listener does not implement the - splunk_has_started method - """ - - def __init__(self, message=None): - message = message or "Start listeners must be callable" - super(InvalidStartListener, self).__init__(message) - - -class CouldNotRestartSplunk(CommandExecutionFailure): - """ - Raised when a Splunk restart fails. - """ - - pass - - -class InvalidConnector(KeyError): - """ - Raised when accessing an invalid connector - """ - - pass diff --git a/pytest_splunk_addon/helmut/splunk/cloud.py b/pytest_splunk_addon/helmut/splunk/cloud.py deleted file mode 100644 index 8a70ee7b7..000000000 --- a/pytest_splunk_addon/helmut/splunk/cloud.py +++ /dev/null @@ -1,302 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import absolute_import - -import sys -import time -import traceback -from builtins import str - -from splunklib.binding import HTTPError - -from pytest_splunk_addon.helmut.connector.base import Connector -from pytest_splunk_addon.helmut.manager.jobs import Jobs -from .base import Splunk - - -class CloudSplunk(Splunk): - def __init__( - self, - name=None, - splunkd_scheme=None, - splunkd_host=None, - splunkd_port="", - web_scheme=None, - web_host=None, - web_port=None, - username="admin", - password="changeme", - ): - """ - Creates a new CloudSplunk instance. - - About web url: - If web_scheme, web_host are given, use them. - If not, will try to query /services/server/settings to find web info. - If no sufficient permissions to query, will set it to default http://{_splunkd_host}:{} - """ - - self._splunkd_scheme = splunkd_scheme or "https" - self._splunkd_port = splunkd_port or "8089" - self._splunkd_host = splunkd_host or "127.0.0.1" - super(CloudSplunk, self).__init__(name) - self.set_credentials_to_use(username=username, password=password) - - server_web_scheme = server_web_host = server_web_port = None - if not (web_scheme and web_host): - try: - sdkconn = self.create_logged_in_connector(contype=Connector.SDK) - server_settings = sdkconn.service.settings - server_web_scheme = ( - "http" if server_settings["enableSplunkWebSSL"] == "0" else "https" - ) - server_web_host = server_settings["host"] - server_web_port = server_settings["httpport"] - self._pass4SymmKey = server_settings["pass4SymmKey"] - except HTTPError as he: - self.logger.warning( - "No sufficient permissions to qury server settings." - ) - self._web_scheme = web_scheme or server_web_scheme or "http" - self._web_host = web_host or server_web_host or self._splunkd_host - self._web_port = web_port or server_web_port or "" - self.logger.debug("Set web base to: {}".format(self.web_base())) - - @property - def _str_format(self): - return '<{cls}@{id} name="{name}" uri="{uri_base}>' - - @property - def _str_format_arguments(self): - return { - "cls": self.__class__.__name__, - "id": id(self), - "name": self.name, - "uri_base": self.uri_base(), - } - - def splunkd_scheme(self): - return self._splunkd_scheme - - def splunkd_host(self): - return self._splunkd_host - - def splunkd_port(self): - return self._splunkd_port - - def uri_base(self): - """ - Returns the splunkd host. - - @return: The host. - @rtype: str - """ - return ( - self.splunkd_scheme() - + "://" - + self.splunkd_host() - + ":" - + str(self.splunkd_port()) - ) - - @property - def pass4SymmKey(self): - if not self._pass4SymmKey: - sdkconn = self.create_logged_in_connector(contype=Connector.SDK) - server_settings = sdkconn.service.settings - self._pass4SymmKey = server_settings["pass4SymmKey"] - return self._pass4SymmKey - - def web_scheme(self): - return self._web_scheme - - def web_host(self): - return self._web_host - - def web_port(self): - if self._web_port: - return self._web_port - - def web_base(self): - """ - Returns the splunk web server. - - @return: the splunk web server address. - @rtype: string - """ - return "{scheme}://{host}:{web_port}".format( - scheme=self.web_scheme(), host=self.web_host(), web_port=self.web_port() - ) - - def create_connector( - self, contype=None, username=None, password=None, *args, **kwargs - ): - """ - @param contype: - @param username: Don't use this parameter. This is only for backward compatible. CloudSplunk - only uses self.username as connector's username. - @param password: Don't use this parameter. This is only for backward compatible. - @param args: - @param kwargs: - @return: - """ - if ( - username - and username != self.username - and password - and password != self.password - ): - raise CloudSplunkConnectorException() - return super(CloudSplunk, self).create_connector( - contype=contype, username=username, password=password, *args, **kwargs - ) - - def connector(self, contype=None, username=None): - """ - - @param contype: - @param username: Don't use this parameter. This is only for backward compatible. CloudSplunk - only uses self.username as connector's username. - @return: - """ - if username and username != self.username: - raise CloudSplunkConnectorException() - return super(CloudSplunk, self).connector(contype=contype, username=username) - - def get_host_os(self): - raise NotImplementedError("Host os should not matter for CloudSplunk.") - - def is_running(self): - restconn = self.create_connector(contype=Connector.REST) - try: - response, _ = restconn.make_request("GET", "/services/server/info") - return response["status"] == restconn.SUCCESS["GET"] - except Exception: - self.logger.debug("Not able to make GET request." + traceback.format_exc()) - return False - - def restart(self): - raise CloudRestartException() - - def get_event_count(self, search_string="*"): - """ - Displatches a search job and returns an event count without waiting for indexing to finish - @param search_string: The search string - """ - self.logger.info("Getting event count") - event_count = 0 - jobs = Jobs(self.default_connector) - job = jobs.create("search %s" % search_string) - job.wait() - event_count = job.get_event_count() - self.logger.debug("Event count: {ec}".format(ec=event_count)) - return event_count - - def get_final_event_count( - self, search_string="*", secondsToStable=60, retry_interval=30 - ): - """ - Waits until indexing is done and then gives the final event count that the search reported. - @param search_string: The search string - @param secondsToStable: The time to wait with stable index before we decide indexing is done - @param retry_interval: wait time b/w two successive search jobs - """ - resultPrev = -1 - resultSameSince = sys.maxsize - lastPolledAt = int(time.time()) - counts = [] - while True: - time.sleep(retry_interval) - result = self.get_event_count(search_string=search_string) - now = int( - time.time() - ) # time()'s precision will suffice here, and in fact seconds is all we want - if result == resultPrev: - if (now - resultSameSince) > secondsToStable: ### we have stable state - self.logger.info( - "Achieved stable state for search %s with totalEventCount=%s" - % (search_string, result) - ) - return result # returns the final event count... - if ( - resultSameSince == sys.maxsize - ): ### our first time in what could become stable state - self.logger.debug( - "Possibly entering stable state for search %s at totalEventCount=%s" - % (search_string, result) - ) - resultSameSince = lastPolledAt - self.logger.debug("Using resultSameSince=%d " % (resultSameSince)) - else: ### our 2nd/3rd/... time in what could become stable state - self.logger.debug( - "Confirming putative stable at totalEventCount=%s for search_string %s " - % (result, search_string) - ) - else: ### we do NOT have stable state - self.logger.debug( - "Flux at totalEventCount=%s for search_string %s; delta +%s" - % (result, search_string, (result - resultPrev)) - ) - resultPrev = result - resultSameSince = sys.maxsize - lastPolledAt = now - - def ensure_event_count( - self, search_string, expect_count, retry=3, retry_interval=10 - ): - """ - @param search_string: - @param retry: - @param retry_interval: - @return: - - ensure event count met in given tries, return true if met, else false - """ - event_count = 0 - while retry > 0: - event_count = self.get_final_event_count(search_string, secondsToStable=120) - if event_count == expect_count: - ( - "event count met. event_count={event_count}".format( - event_count=str(event_count) - ) - ) - return True - else: - self.logger.debug( - "wait event count to met. event_count={event_count}".format( - event_count=str(event_count) - ) - ) - time.sleep(retry_interval) - retry = retry - 1 - self.logger.debug( - "Timeout wait event count to met. event_count={event_count}".format( - event_count=str(event_count) - ) - ) - return False - - -class CloudSplunkConnectorException(Exception): - message = ( - "Don't pass username/password to connector. Helmut allows only one user per CloudSplunk instance." - "Please create another CloudSplunk instance if you need to use another user." - ) - - -class CloudRestartException(Exception): - message = "Restart on cloud is prohibited unless you are using cloud ops role." diff --git a/pytest_splunk_addon/helmut/util/Constants.py b/pytest_splunk_addon/helmut/util/Constants.py deleted file mode 100644 index 632ffab2f..000000000 --- a/pytest_splunk_addon/helmut/util/Constants.py +++ /dev/null @@ -1,196 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from builtins import object - - -class Constants(object): - TestConstants = { - # Email alerting constants - "EMAIL_SETTINGS": "/servicesNS/admin/search/admin/alert_actions/email", - "PAPER_ORIENTATION": "/servicesNS/admin/search/saved/searches/{0}", - "PAPER_SIZE": "/servicesNS/admin/search/saved/searches/{0}", - "SMTP_SERVER": "10.184.16.130:{0}", - "SPLUNK_MAIL_HOST": "10.184.16.130", - # Splunk constants - "AUTO_PORTS": " --auto-ports", - "DFLT_BRNCH": "current", - "RUN_LOCAL": "local", - "RUN_REMOTE": "remote", - "SSH_PORT": "22", - "SPLUNK_START": "splunkd start", - "SPLUNK_STOP": "splunkd stop", - "SPLUNK_RESTART": "splunkd restart", - "SERVER_INFO": "/services/server/info", - # Search head pooling constants - "SLAVE": "slave", - "MASTER": "captain", - "ADHOC": "adhoc", - "SERVER_ROLE": "/services/server/roles", - "SHP_CONFIG_PEER": "/services/shcluster/config/config", - "SHP_MEMBER_INFO": "/services/shcluster/{0}/info", - "SHP_MEMBER_GUID": "/services/shcluster/captain/members", - "SHP_MEMBER_MEMBERS": "/services/shcluster/member/members", - "SHP_CAPTAIN_CONFIG": "/services/shcluster/config", - "SHP_ARTIFACT": "/services/shcluster/{0}/artifacts", - "SHP_BOOTSTRAP": "/services/shcluster/member/consensus/foo/bootstrap", - "SHP_CONSENSUS": "/services/shcluster/member/consensus", - # 'SHP_ADD_PEER':'/services/shcluster/member/consensus/foo/set_configuration', - "SHP_ADD_PEER": "/services/shcluster/member/consensus/foo/bootstrap", - "SHP_DYNAMIC_SETUP": "/servicesNS/nobody/system/configs/conf-server/shclustering", - "SHP_CAPTAIN_INFO": "/services/shcluster/captain/info", - "SHP_STATUS": "/services/shcluster/status", - "SHP_CAPTAIN_ARTIFACTS": "/services/shcluster/captain/artifacts", - "SHP_JOBS": "/services/shcluster/captain/jobs", - "CONF_DEPLOY_MGR": "/services/apps/deploy", - "SHP_CAPTAIN_TRANSFER": "/services/shcluster/member/consensus/foo/transfer_captaincy", - # Search constants - "SAVED_SEARCH": "/servicesNS/{0}/{1}/saved/searches", # {0} is the user and {1} is the app - "FIRED_ALERT": "/servicesNS/admin/search/alerts/fired_alerts", - "SAVED_SEARCH_NAME": "/servicesNS/admin/search/admin/savedsearch/{0}", - "FIRED_ALERT_DETAILS": "/servicesNS/admin/search/alerts/fired_alerts/{0}", - "ADD_DIST_PEER": "/servicesNS/nobody/search/search/distributed/peers", - "EDIT_SAVED_SEARCH": "/servicesNS/nobody/{0}/saved/searches", - "EDIT_APP": "/servicesNS/nobody/system/apps/local", - "SEARCH_JOB": "/services/{0}/jobs", - "SEARCH_JOB_ID": "/services/search/jobs/{0}", - "JOB_CONTROL": "/services/search/jobs/{0}/control", - "JOB_EVENTS": "/search/jobs/{0}/events", - "JOB_RESULTS": "/search/jobs/{0}/results", - "JOBS_RESULTS_PREVIEW": "/search/jobs/{0}/results_preview", - "SEARCH_JOB_LOG": "/search/jobs/{0}/search.log", - "JOB_SUMMARY": "/search/jobs/{0}/summary", - "JOB_TIMELINE": "/search/jobs/{0}/timeline", - # Knowledge Object Constants - "ADD_TAG": "/servicesNS/nobody/{0}/search/fields/{1}/tags", # 0 is fieldname - "GET_TAG": "/servicesNS/nobody/{0}/{1}/tags", - "EDIT_EVENTTYPE": "/servicesNS/nobody/{0}/saved/eventtypes", - "GET_EVENTTYPE": "/servicesNS/nobody/{0}/saved/eventtypes", - "EDIT_LOOKUP": "/servicesNS/nobody/{0}/data/props/lookups", - "UPLOAD_LOOKUP_FILE": "/servicesNS/admin/{0}/data/lookup-table-files", - "CREATE_TABLE_LOOKUP": "/servicesNS/nobody/{0}/data/transforms/lookups", - "GET_LOOKUP_FILE": "/servicesNS/nobody/{0}/data/lookup-table-files/{0}", - "GET_TABLE_LOOKUP": "/servicesNS/nobody/{0}/data/transforms/lookups/{0}", - "EDIT_MACRO": "/servicesNS/nobody/{0}/admin/macros", - "EDIT_FELD_ALIAS": "/servicesNS/nobody/{0}/data/props/fieldaliases", - "GET_FIELD_ALIAS": "/servicesNS/nobody/{0}/data/props/fieldaliases/{1}", - "EXTRACTION": "", - "EDIT_CALC_FIELDS": "/servicesNS/nobody/{0}/data/props/calcfields", - "GET_CALC_FIELDS": "/servicesNS/nobody/{0}/data/props/calcfields/{1}", - "EDIT_FIELD_EXTRACTION": "/servicesNS/nobody/{0}/data/transforms/extractions", - "GET_FIELD_EXTRACTION": "/servicesNS/admin/{0}/data/transforms/extractions/{1}", - "EDIT_IFX": "/servicesNS/nobody/{0}/data/props/extractions", - "GET_IFX": "/servicesNS/nobody/{0}/data/props/extractions/{1}", - "EDIT_DASHBOARD": "/servicesNS/nobody/{0}/data/ui/views", - "GET_DASHBOARD": "/servicesNS/nobody/{0}/data/ui/views/{1}", - "SOURCE_TYPE_RENAME": "/servicesNS/nobody/{0}/data/props/sourcetype-rename", - "EDIT_DATAMODEL": "/servicesNS/nobody/{0}/datamodel/model", - "GET_DATAMODEL": "/servicesNS/nobody/{0}/datamodel/model/{1}", - "EMBED_REPORT": "/servicesNS/nobody/{0}/saved/searches/{1}/embed", - # knowledge objects with user space - "EDIT_FIELD_EXTRACTION_USRCXT": "/servicesNS/{0}/{1}/data/transforms/extractions", - "ADD_TAG_USRCXT": "/servicesNS/{0}/{1}/search/fields/{2}/tags", - "SOURCE_TYPE_RENAME_USRCXT": "/servicesNS/{0}/{1}/data/props/sourcetype-rename", - "EDIT_FELD_ALIAS_USRCXT": "/servicesNS/{0}/{1}/data/props/fieldaliases", - "EDIT_IFX_USRCTX": "/servicesNS/{0}/{1}/data/props/extractions", - "EDIT_SAVED_SEARCH_USRCTX": "/servicesNS/{0}/{1}/saved/searches", - "SUPPRESS": "/servicesNS/{0}/{1}/saved/searches/{2}/suppress", - "ACKNOWLEDGE": "/servicesNS/{0}/{1}/saved/searches/{2}/acknowledge", - # App constants - "APP_INSTALL": "/servicesNS/{0}/{1}/apps/appinstall", - "APP_LOCAL": "/servicesNS/{0}/{1}/apps/local", - "ONE_SHOT": "/servicesNS/{0}/{1}/data/inputs/oneshot", - "USER_CONTEXT": "/services/authentication/users", - "STORAGE_PASSWORDS": "/servicesNS/{0}/{1}/storage/passwords", - # Config constants - "CONFIG_CONF_INPUTS_NEW": "/services/configs/conf-inputs/_new", - "CONF_PROPERTY": "/services/properties/{0}/{1}/{2}", - # Server constants - "SERVER_INTRO_INDEXER": "/services/server/introspection/indexer", - # Authentication constants - "AUTHENTICATION_USERS": "/services/authentication/users", - # Cluster constants - "MASTER_INDEXES": "/services/cluster/master/indexes", - "MASTER_BUCKETS": "/services/cluster/master/buckets", - "MASTER_GENERATION": "/services/cluster/master/generation", - "SEARCH_HEAD_GENERATION": "/services/cluster/searchhead/generation", - "MASTER_MESSAGE": "/services/messages", - "MASTER_COMMIT_GENERATION": "/services/cluster/master/control/control/commit_generation", - "MASTER_CONTROL_ROLL_BUCKET": "/services/cluster/master/control/control/roll-hot-buckets", - "MASTER_SEARCH_HEADS": "/services/cluster/master/searchheads", - "MASTER_PEERS": "/services/cluster/master/peers", - # Forwarder director - "INDEXER_DISCOVERY": "/services/indexer_discovery", - # Data constants - "DATA_INDEXES": "/services/data/indexes", - "DATA_INDEXES_FREEZE": "/services/data/indexes/{0}/freeze-buckets", - "DATA_INPUTS_TCP_RAW": "/services/data/inputs/tcp/raw", - # Authorization & Authentication - "AUTH_ROLE": "/services/authorization/roles/", - "AUTH_USER": "/services/authentication/users/", - "AUTH_LDAP": "/services/authentication/providers/LDAP/", - "AUTH_SAML": "/services/authentication/providers/SAML/", - "AUTH_SCRIPTED": "/services/authentication/providers/Scripted/", - # Error Message - "MSG_SHP_ROLLING_RESTART_COMPLETE": "Message : Search Head Clustering is not currently in a rolling Restart state. May be the rolling restart is complete or this node is going to restart itself.", - # Auth Settings - "LDAP_AUTH_CONF": { - "name": "LDAP", - "host": "10.66.128.50", - "port": "389", - "groupBaseDN": "OU=groups,OU=automation,DC=jacktest,DC=com", - "groupMappingAttribute": "dn", - "groupMemberAttribute": "member", - "groupNameAttribute": "cn", - "userBaseDN": "OU=Users,OU=automation,DC=jacktest,DC=com", - "userNameAttribute": "samaccountname", - "bindDN": "CN=Administrator,CN=Users,DC=jacktest,DC=com", - "bindDNpassword": "QWE123asd", - "realNameAttribute": "cn", - }, - "LDAP_AUTH_CONF1": { - "bindDNpassword": "changeme", - "groupBaseDN": "ou=groups,dc=coreuitest,dc=com", - "groupMemberAttribute": "member", - "groupNameAttribute": "cn", - "host": "10.66.130.102", - "name": "LDAP1", - "realNameAttribute": "displayname", - "userBaseDN": "ou=people,dc=coreuitest,dc=com", - "userNameAttribute": "uid", - }, - "SAML_AUTH_CONF": { - "name": "SAML", - "allowSslCompression": "true", - "attributeQueryRequestSigned": "true", - "attributeQueryResponseSigned": "true", - "attributeQuerySoapPassword": "QWE123asd", - "attributeQuerySoapUsername": "saml_automation", - "entityId": "saml_automation", - "fqdn": "", - "idpAttributeQueryUrl": "https://ping.splunk.io:9031/idp/attrsvc.ssaml2", - "idpCertPath": "", - "idpSLOUrl": "https://ping.splunk.io:9031/idp/SLO.saml2", - "idpSSOUrl": "https://ping.splunk.io:9031/idp/SSO.saml2", - "redirectPort": "0", - "signAuthnRequest": "true", - "signedAssertion": "true", - # 'enableSplunkdSSL': 'true', - "sslKeysfile": "aNewServerCertificate.pem", - "sslKeysfilePassword": "changed", - # 'caCertFile': 'aCACertificate.pem', - # 'caPath': '/root/splunk/etc/auth/self_signed_certs' - }, - } diff --git a/pytest_splunk_addon/helmut/util/__init__.py b/pytest_splunk_addon/helmut/util/__init__.py deleted file mode 100644 index abb0984ec..000000000 --- a/pytest_splunk_addon/helmut/util/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Various utility classes and functions - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" diff --git a/pytest_splunk_addon/helmut/util/action_writer.py b/pytest_splunk_addon/helmut/util/action_writer.py deleted file mode 100644 index cd377ef5d..000000000 --- a/pytest_splunk_addon/helmut/util/action_writer.py +++ /dev/null @@ -1,127 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Module for writing actions to the user. -Mostly used for coreapps - -Summary -======= -It's used to print actions that look like this:: - - Action1... done - Action2... - Action3... done - Action4... done - done - Action5... failed - -@author: Nicklas Ansman-Giertz -@contact: U{ngiertz@splunk.com} -@since: 2011-11-23 -""" - -import sys -from builtins import range - -_LAST_OPENED = 0 -_LAST_CLOSED = 0 -_LEVEL = 0 - -LOG_ONLY = True - - -def write_action(action, logger=None, logger_msg=None): - """ - Writes the specified action to stdout. - - Writes the action appended by '... ' - - @type action: str - @param action: The action to write - - @type logger_msg: str - @param logger_msg: If specified this message will be written to the - logger instead of action - """ - if not LOG_ONLY: - global _LEVEL, _LAST_OPENED - # If there is one currently open print newline - if _LEVEL > _LAST_OPENED: - sys.stdout.write("\n") - - # Print tabs - for _index in range(_LEVEL): - sys.stdout.write("\t") - - sys.stdout.write(action + "... ") - sys.stdout.flush() - if logger: - logger.info(logger_msg or action) - - _LAST_OPENED = _LEVEL - _LEVEL += 1 - - -def write_done(): - """ - Tells the user that we're done with the previous action. - Does not print to the logger - """ - if not LOG_ONLY: - global _LEVEL, _LAST_CLOSED - _LEVEL -= 1 - - # If there has been actions in between, write tabs - if _LAST_CLOSED > _LEVEL: - for _index in range(_LEVEL): - sys.stdout.write("\t") - - sys.stdout.write("done!\n") - - _LAST_CLOSED = _LEVEL - sys.stdout.flush() - - -def write_failed(logger=None, msg=None): - """ - Tells the user that the last action failed and raises an exception. - - If msg is an exception it will be raised, if it's a string an Exception - will be raised. - If unspecified a generic fail message is thrown - - @type msg: str - @param msg: An optional fail message - """ - if not LOG_ONLY: - global _LEVEL - msg = msg or "Testing failed, check your logs for more info" - - _LEVEL -= 1 - - if _LAST_CLOSED > _LEVEL: - for _index in range(_LEVEL): - sys.stdout.write("\t") - - sys.stdout.write("failed!\n") - sys.stdout.flush() - - if logger: - logger.exception(msg) - - if isinstance(msg, Exception): - raise - raise Exception(msg) diff --git a/pytest_splunk_addon/helmut/util/alerthelper.py b/pytest_splunk_addon/helmut/util/alerthelper.py deleted file mode 100644 index b47146f24..000000000 --- a/pytest_splunk_addon/helmut/util/alerthelper.py +++ /dev/null @@ -1,503 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import print_function - -from future import standard_library - -standard_library.install_aliases() -from builtins import str -from builtins import object -import logging -import json -import os -import http.client -import urllib.request, urllib.parse, urllib.error -import urllib.request, urllib.error, urllib.parse -import time - -from pytest_splunk_addon.helmut.splunk_factory.splunkfactory import SplunkFactory -from pytest_splunk_addon.helmut.connector.base import Connector - -from pytest_splunk_addon.helmut.util.Constants import Constants as const - -LOGGER = logging.getLogger("alter helper log") -import socket - - -class AlertHelpers(object): - def __init__(self): - - global request_args - global email_args - request_args = dict() - email_args = dict() - - def createbasicAlert( - self, - name, - subject, - search="index=_internal", # replace index=_internal with the other search - cron_schedule="*/1 * * * *", - ): - """ - sets the attributes needed for a basic alert such in request_args which - is a global static list - search - splunk search query - name - name of the search - is_scheduled - save search as scheduled search - alert.track - List the alert in alert list - """ - - request_args[ - "search" - ] = search # replace hardcoding with current working directory - request_args["name"] = name - request_args["cron_schedule"] = cron_schedule - request_args["is_scheduled"] = "True" - request_args["actions"] = "email" - request_args["action.email"] = "True" - request_args["action.email.track_alert"] = "True" - request_args["alert.track"] = "True" - request_args["action.populate_lookup.track_alert"] = "True" - request_args["alert.digest_mode"] = "True" - request_args["alert.suppress"] = 1 - request_args["alert.suppress.period"] = "24h" - - def setEmailSubject(self, subject): - request_args["action.email.subject"] = subject - request_args["action.email.subject.alert"] = subject - request_args["action.email.subject.report"] = subject - - # def set_alert_condition(self): - - def setemailMode(self, mailserver, email_mode="TLS"): - """ - sets the email mode to TLS/SSL/None - """ - if email_mode == "TLS": - request_args["action.email.use_tls"] = 1 - else: - if email_mode == "SSL": - request_args["action.email.mailserver"] = mailserver - request_args["action.email.use_ssl"] = 1 - - def setup_mail_server(self, splunk, user, password, mail_server, mode="None"): - """ - Updates the email alert settings of the splunk instance - for SSL sets the smtp mailserver and ssl port - Restarts the splunk instance after updating the settings - """ - - request_type = "POST" - request_url = const.TestConstants["EMAIL_SETTINGS"] - - request_args = dict() - request_args["auth_password"] = "" - request_args["auth_username"] = "" - request_args["mailserver"] = const.TestConstants["SPLUNK_MAIL_HOST"] - request_args["use_ssl"] = 0 - request_args["use_tls"] = 0 - request_args["reportServerURL"] = " " - - if mode == "SSL": - request_args["auth_password"] = password - request_args["auth_username"] = user - request_args["mailserver"] = const.TestConstants["SMTP_SERVER"].format( - "465" - ) - request_args["use_ssl"] = 1 - request_args["reportServerURL"] = " " - - if mode == "TLS": - request_args["auth_password"] = password - request_args["auth_username"] = user - request_args["mailserver"] = const.TestConstants["SMTP_SERVER"].format("25") - request_args["use_tls"] = 1 - request_args["reportServerURL"] = " " - - self.make_http_request( - splunk, - request_type, - request_url, - request_args, - splunk.username, - splunk.password, - ) - splunk.execute("splunk restart") - - def updatePaperSettings( - self, paper_size="Letter", paper_orientation="Portrait", splunk_logo=0 - ): - """ - Update the email alert setting to update options such as - format - email format when results included inline - paper_size - Letter|A4|A3 etc - paper_orientation - Portrait | landscape - splunk_logo - Whether to include splunk logo or not - """ - request_args["action.email.reportPaperSize"] = paper_size - request_args["action.email.reportPaperOrientation"] = paper_orientation - request_args["action.email.reportIncludeSplunkLogo"] = splunk_logo - - def addContentsToEmail(self, tokenlist): - request_args["action.email.message.alert"] = tokenlist - - def addSearchResultstoEmail(self, sendMethod, format="csv"): - """ - Configures the email to have the search results as - 1. Inline - results directly in the message body - 2. Attachments - Attach the results to the message body - """ - # request_args['action.email.sendresults'] = 'True' - request_args["action.email.sendresults"] = 0 - request_args["action.email.inline"] = 0 - request_args["action.email.sendpdf"] = 0 - request_args["action.email.sendcsv"] = 0 - - if sendMethod == "inline": - """ - inline csv - inline table - inline raw - """ - request_args["action.email.sendresults"] = 1 - request_args["action.email.inline"] = 1 - self.configure_email_format(format) - - if sendMethod == "pdf": - request_args["action.email.sendresults"] = 1 - request_args["action.email.sendpdf"] = 1 - - if sendMethod == "attachment": - request_args["action.email.sendresults"] = 1 - request_args["action.email.sendcsv"] = 1 - - if sendMethod == "Noresults": - self.configure_email_format(format) - - def configure_email_format(self, email_format="plain"): - """ - Updates the email format (applies to inline and attachments) - to one of the following - 1. text - 2. html - 3. csv - 4. raw - """ - request_args["action.email.format"] = email_format - - def add_email_recipients(self, to_list={}, cc_list={}, bcc_list={}): - """ - Updates the email recipients in to,cc and bcc list - """ - to_list1 = to_list.split("|") - cc_list1 = cc_list.split("|") - bcc_list1 = bcc_list.split("|") - - toList = ",".join(to_list1) - ccList = ",".join(cc_list1) - bccList = ",".join(bcc_list1) - - toList = toList[1:-1] - ccList = ccList[1:-1] - bccList = bccList[1:-1] - - request_args["action.email.to"] = toList - request_args["action.email.cc"] = ccList - request_args["action.email.bcc"] = bccList - - def deleteAlert(self, alertname): - """ - deletes an alert using the name of the alert - """ - request_type = "DELETE" - request_url = const.TestConstants["SAVED_SEARCH_NAME"].format(alertname) - request_args = "" - self.make_http_request(request_type, request_url, request_args) - - def createAlert(self, splunk, user, context): - """ - Creates an alert by sending POST request to the saved/searches - REST endpoint - """ - request_type = "POST" - print("user and context is {0} and {1}".format(user, context)) - request_url = const.TestConstants["SAVED_SEARCH"].format(user, context) - self.make_http_request( - splunk, - request_type, - request_url, - request_args, - splunk.username, - splunk.password, - ) - - def get_paper_orientation(self, splunk, alertname): - """ - Check the rest endpoint for paper orientation (Landscape, Portrait) - """ - request_type = "GET" - request_url = const.TestConstants["PAPER_ORIENTATION"].format(alertname) - req_args = dict() - req_args["output_mode"] = "json" - content = self.make_http_request( - splunk, - request_type, - request_url, - req_args, - splunk.username, - splunk.password, - ) - parsedresponse = json.loads(content) - paper_orientation = str( - parsedresponse["entry"][0]["content"]["action.email.reportPaperOrientation"] - ) - return paper_orientation - - def get_paper_size(self, splunk, alertname): - """ - check the rest endpoint for paper format(Letter, Legal, A2, A3, A4, A5) - """ - request_type = "GET" - req_args = dict() - req_args["output_mode"] = "json" - request_url = const.TestConstants["PAPER_SIZE"].format(alertname) - content = self.make_http_request( - splunk, - request_type, - request_url, - req_args, - splunk.username, - splunk.password, - ) - parsedresponse = json.loads(content) - paper_size = str( - parsedresponse["entry"][0]["content"]["action.email.reportPaperSize"] - ) - return paper_size - - def get_triggered_alert_count(self, splunk, alert_name, password="changeme"): - "triggered_alert" - try: - request_type = "GET" - req_args = dict() - req_args["output_mode"] = "json" - request_url = const.TestConstants["FIRED_ALERT_DETAILS"].format(alert_name) - content = self.make_http_request( - splunk, - request_type, - request_url, - req_args, - splunk.username, - splunk.password, - ) - parsedresponse = json.loads(content) - triggered_alert = int( - parsedresponse["entry"][0]["content"]["triggered_alerts"] - ) - return triggered_alert - except Exception as e: - LOGGER.error("Exception when get triggered_alert:" + repr(e)) - return None - - def trigger_alerts( - self, - splunk, - mail_server, - user, - password, - emailMode="None", - search=None, - filename="alertconfig.conf", - hostname=None, - ): - """ - This method will trigger an alert using input file. Multiple alerts can - be defined in the file - The file format as follows, - name of alert, cron_schedule, email_to_list, email_cc_list, - email_bcc_list, email format,adSearchResults to email, - format in attachment, paper_size, paper_orientation,splunk logo - """ - conf = open(filename, "r") - f = conf.readlines() - self.setup_mail_server( - splunk, user, password, mail_server, mode=emailMode - ) # for mode get pytest parameter - - for line in f: - elements = line.split("|") - if hostname is None: - # INFRA-6217 - elements[0] = elements[0] + "." + socket.gethostname() - else: - elements[0] = elements[0] + "." + hostname - self.createbasicAlert( - name=elements[0], - cron_schedule=elements[1], - subject=elements[0], - search=search, - ) - self.setEmailSubject(elements[0]) - self.setemailMode(mail_server, email_mode=emailMode) - - self.add_email_recipients(elements[2], elements[3], elements[4]) - self.addSearchResultstoEmail(elements[6], format=elements[5]) - - self.updatePaperSettings( - paper_size=elements[8], - paper_orientation=elements[9], - splunk_logo=elements[10], - ) - - print("Creating alert {0}".format(elements[0])) - print("file name is {0}".format(elements[13])) - - if elements[13].rstrip("\n") != "None": - with open(elements[13].rstrip("\n"), "r") as myfile: - data = "".join(line for line in myfile) - self.addContentsToEmail(data) - - self.createAlert( - splunk, - context=elements[12].rstrip("\n"), - user=elements[11].rstrip("\n"), - ) - - def create_new_app( - self, - splunk, - appname, - appcontext, - user, - splunk_user="admin", - splunk_pwd="changeme", - ): - # app_params = {'name':'nithya','app':'search','user':'admin',} - app_params = {"name": appname} - app_url = const.TestConstants["APP_LOCAL"].format(user, appcontext) - request_type = "POST" - self.make_http_request( - splunk, - request_type, - app_url, - app_params, - splunk_user=splunk_user, - splunk_pwd=splunk_pwd, - ) - - def create_new_user( - self, splunk, newuser, pwd, roles, splunk_user="admin", splunk_pwd="changeme" - ): - # app_params = {'name':'nithya','app':'search','user':'admin',} - app_params = {"name": newuser, "roles": roles, "password": pwd} - app_url = const.TestConstants["USER_CONTEXT"] - request_type = "POST" - self.make_http_request( - splunk, - request_type, - app_url, - app_params, - splunk_user=splunk_user, - splunk_pwd=splunk_pwd, - ) - - def get_splunk(self, TEST_DIR, branch=None, product=None, build=None): - """ - Splunk instance - """ - splunk_home = ( - TEST_DIR + os.sep + "test_installs" + os.sep + "temp_splunk_instance" - ) - splunk_instance = SplunkFactory.getSplunk(splunk_home) - splunk_instance.install_nightly( - branch=branch, package_type=product, build=build - ) - # splunk_instance.COMMON_FLAGS = splunk_instance.COMMON_FLAGS + ' --auto-ports' - splunk_instance.start(auto_ports=True) - - return splunk_instance - - def one_shot_upload( - self, - splunk, - user, - appcontext, - one_shot_file, - splunk_user="admin", - splunk_pwd="changeme", - ): - print( - "this method will upload the data input file needed for \ - running searches in the alerts" - ) - oneshot_url = const.TestConstants["ONE_SHOT"].format(user, appcontext) - oneshot_args = {"name": one_shot_file} - request_type = "POST" - self.make_http_request( - splunk, - request_type, - oneshot_url, - oneshot_args, - splunk_user=splunk_user, - splunk_pwd=splunk_pwd, - ) - - def make_http_request( - self, - splunk, - request_type, - request_url, - request_args, - splunk_user="admin", - splunk_pwd="changeme", - ): - """ - This is a REST helper that will generate a http request - using request_type - GET/POST/... - request_url and request_args - """ - restconn = splunk.create_logged_in_connector( - contype=Connector.REST, username=splunk_user, password=splunk_pwd - ) - try: - response, content = restconn.make_request( - request_type, request_url, request_args - ) - return content - - except urllib.error.HTTPError as err: - LOGGER.error( - "Http error code is ({0}): {1} : {2}".format( - err.code, err.errno, err.strerror - ) - ) - except http.client.ResponseNotReady as e: - time.sleep(5) - LOGGER.warn( - "httplib.ResponseNotReady error happen, retry once. {e}".format(e=e) - ) - # retry one time - try: - restconn = splunk.create_logged_in_connector( - contype=Connector.REST, username=splunk_user, password=splunk_pwd - ) - response, content = restconn.make_request( - request_type, request_url, request_args - ) - return content - except Exception as e: - LOGGER.error("Error happened, exception is {e}".format(e=e)) diff --git a/pytest_splunk_addon/helmut/util/attrdict.py b/pytest_splunk_addon/helmut/util/attrdict.py deleted file mode 100644 index 3f115babe..000000000 --- a/pytest_splunk_addon/helmut/util/attrdict.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -class AttrDict(dict): - """A dictionary for which keys are also accessible as attributes.""" - - def __init__(self, *args, **kwargs): - super(AttrDict, self).__init__(*args, **kwargs) - self.__dict__ = self diff --git a/pytest_splunk_addon/helmut/util/basefileutils.py b/pytest_splunk_addon/helmut/util/basefileutils.py deleted file mode 100644 index 1f39b3c1e..000000000 --- a/pytest_splunk_addon/helmut/util/basefileutils.py +++ /dev/null @@ -1,69 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from pytest_splunk_addon.helmut.log import Logging - - -class BaseFileUtils(Logging): - def isfile(self, path): - raise NotImplementedError("Function not implemented") - - def isdir(self, path): - raise NotImplementedError("Function not implemented") - - def delete_file(self, file): - raise NotImplementedError("Function not implemented") - - def get_file_contents(self, path): - raise NotImplementedError("Function not implemented") - - def write_file_contents(self, path, contents, mode="w"): - raise NotImplementedError("Function not implemented") - - def copy_file(self, source, target): - raise NotImplementedError("Function not implemented") - - def move_file(self, source, target): - raise NotImplementedError("Function not implemented") - - def copy_directory(self, source, target, ignore=None): - raise NotImplementedError("Function not implemented") - - def compare_files(self, file1, file2): - raise NotImplementedError("Function not implemented") - - def move_directory(self, source, target, ignore=None): - raise NotImplementedError("Function not implemented") - - def force_remove_file(self, path): - raise NotImplementedError("Function not implemented") - - def force_remove_directory(self, path): - raise NotImplementedError("Function not implemented") - - def force_copy_file(self, source, target): - raise NotImplementedError("Function not implemented") - - def force_move_file(self, source, target): - raise NotImplementedError("Function not implemented") - - def force_move_directory(self, source, target): - raise NotImplementedError("Function not implemented") - - def force_copy_directory(self, source, target): - raise NotImplementedError("Function not implemented") - - def create_directory(self, path): - raise NotImplementedError("Function not implemented") diff --git a/pytest_splunk_addon/helmut/util/rest_uris.py b/pytest_splunk_addon/helmut/util/rest_uris.py deleted file mode 100644 index 37831b6e7..000000000 --- a/pytest_splunk_addon/helmut/util/rest_uris.py +++ /dev/null @@ -1,125 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from future import standard_library - -standard_library.install_aliases() -from builtins import object - - -class RESTURIS(object): - """ - Simple module to wrap REST endpoints into a consistent set of methods - """ - - URIS = { - "APP": "/servicesNS/{u}/{a}/apps", - "APP_TEMPLATE": "/servicesNS/{u}/{a}/apps/apptemplates", - "APP_LOCAL": "/servicesNS/{u}/{a}/apps/local/", - "APP_INSTALL": "/servicesNS/{u}/{a}/apps/appinstall", - "AUTOMATIC_LOOKUP": "/servicesNS/{u}/{a}/data/props/lookups", - "AUTHENTICATION": "/services/authentication/users", - "CALCUALTED_FIELD": "/servicesNS/{u}/{a}/data/props/calcfields", - "CAPABILITIES": "/services/authorization/capabilities/", - "CHANGEPASSWORD": "/servicesNS/{u}/{a}/authentication/changepassword/", - "CONFIG": "/servicesNS/{u}/{a}/configs/{config}/", - "CLUSTER_CONFIG": "/servicesNS/{u}/{a}/cluster/config", - "CLUSTER_MASTER": "/servicesNS/{u}/{a}/cluster/master", - "CLUSTER_SEARCHHEAD": "/servicesNS/{u}/{a}/cluster/searchhead", - "CLUSTER_SLAVE": "/servicesNS/{u}/{a}/cluster/slave", - "DATAMODEL_REPORT": "/services/datamodel/pivot/{dm}", - "DATAMODEL_ACC": "/services/datamodel/model/", - "DATAMODEL": "/servicesNS/{u}/{a}/datamodel/model/", - "DATAMODEL_ACCELERATION": "/services/datamodel/acceleration", - "DATAMODEL_DOWNLOAD": "/servicesNS/{u}/{a}/data/models/{dm}/download", - "DATAMODEL_PIVOT": "/servicesNS/{u}/{a}/datamodel/pivot/", - "DEPLOYMENT_CLIENT_CONFIG": ("/servicesNS/{u}/{a}/deployment/client/config"), - "DEPLOYMENT_SERVER_CLASSES": ( - "/servicesNS/{u}/{a}/deployment/server/serverclasses" - ), - "DEPLOYMENT_SERVER_CONFIG": ("/servicesNS/{u}/{a}/deployment/server/config"), - "DEPLOYMENT_SERVER_CLIENTS": ("/servicesNS/{u}/{a}/deployment/server/clients"), - "DEPLOYMENT_SERVER_APPLICATION": ( - "/servicesNS/{u}/{a}/deployment/server/applications" - ), - "EVENTTYPE": "/servicesNS/{u}/{a}/saved/eventtypes", - "FIRED_ALERT": "/servicesNS/{u}/{a}/alerts/fired_alerts", - "FIELD": "/servicesNS/{u}/{a}/search/fields", - "FIELD_ALIAS": "/servicesNS/{u}/{a}/data/props/fieldaliases", - "FIELD_EXTRACTION": "/servicesNS/{u}/{a}/data/props/extractions", - "FVTAG": "/servicesNS/{u}/{a}/saved/fvtags", - "HTTPAUTH_TOKEN": "/servicesNS/{u}/{a}/authentication/httpauth-tokens", - "INDEX": "/servicesNS/{u}/{a}/data/indexes/", - "INPUT_MONITOR": "/servicesNS/{u}/{a}/data/inputs/monitor", - "INPUT_ONESHOT": "/servicesNS/{u}/{a}/data/inputs/oneshot", - "INPUT_SCRIPT": "/servicesNS/{u}/{a}/data/inputs/script", - "INPUT_TCP_COOKED": "/servicesNS/{u}/{a}/data/inputs/tcp/cooked", - "INPUT_TCP_RAW": "/servicesNS/{u}/{a}/data/inputs/tcp/raw", - "INPUT_UDP": "/servicesNS/{u}/{a}/data/inputs/udp", - "INPUT_EVENTLOG": ("/servicesNS/{u}/{a}/data/inputs/win-event-log-collections"), - "INPUT_REGMON": "/servicesNS/{u}/{a}/data/inputs/WinRegMon", - "INPUT_PERFMON": "/servicesNS/{u}/{a}/data/inputs/win-perfmon", - "INPUT_HOSTMON": "/servicesNS/{u}/{a}/data/inputs/WinHostMon", - "INPUT_NETMON": "/servicesNS/{u}/{a}/data/inputs/WinNetMon", - "INPUT_ADMON": "/servicesNS/{u}/{a}/data/inputs/ad", - "INPUT_PRINTMON": "/servicesNS/{u}/{a}/data/inputs/WinPrintMon", - "JOB": "/servicesNS/{u}/{a}/search/jobs", - "LDAP": "/services/authentication/providers/LDAP/", - "LOOKUP": "/servicesNS/{u}/{a}/data/props/lookups/", - "LOOKUP_TRANSFORM": "/servicesNS/{u}/{a}/data/transforms/lookups/", - "LOOKUP_TABLE_FILES": "/servicesNS/{u}/{a}/data/lookup-table-files", - "LOGIN": "/services/auth/login", - "MACRO": "/servicesNS/{u}/{a}/data/macros", - "MESSAGES": "/servicesNS/{u}/{a}/messages", - "NAVIGATION": "/servicesNS/{u}/{a}/data/ui/nav", - "NTAG": "/servicesNS/{u}/{a}/saved/ntags", - "OPEN_IN_PIVOT_GENERATE": "/services/datamodel/generate", - "PROPERTIES": "/servicesNS/{u}/{a}/properties", - "ROLE": "/services/authorization/roles/", - "REFRESH": "/debug/refresh", - "SAVED_SEARCH": "/servicesNS/{u}/{a}/saved/searches", - "SCHEDULED_VIEW": "/servicesNS/{u}/{a}/scheduled/views", - "SEARCH_COMMANDS": "/servicesNS/{u}/{a}/search/commands", - "SOURCETYPE": "/servicesNS/{u}/{a}/saved/sourcetypes", - "SERVER_CONTROL_RESTART": "/services/server/control/restart/", - "SERVER_SETTINGS": "/services/{u}/server-settings/settings", - "ACCESS_CONTROL_XML": "/services/data/ui/manager/accesscontrols", - "TAG": "/servicesNS/{u}/{a}/search/tags", - "TIME": "/servicesNS/{u}/{a}/data/ui/times", - "TRANSFORMS_EXTRACTION": ("/servicesNS/{u}/{a}/data/transforms/extractions"), - "TRANSFORMS_LOOKUP": "/servicesNS/{u}/{a}/data/transforms/lookups/", - "TRANSPARENT_SUMMARIZATION": "/servicesNS/{u}/{a}/admin/summarization", - "TYPEAHEAD": "/servicesNS/{u}/{a}/search/typeahead/", - "USER": "/servicesNS/{u}/{a}/authentication/users", - "UI_MANAGER": "/servicesNS/{u}/{a}/data/ui/manager", - "UI_PREFS": "/servicesNS/{u}/{a}/admin/ui-prefs", - "USER_PREFS": "/servicesNS/{u}/{a}/admin/user-prefs", - "VIEW": "/servicesNS/{u}/{a}/data/ui/views", - "VIEWSTATES": "/servicesNS/{u}/{a}/data/ui/viewstates", - "VIX_INDEXES": "/servicesNS/{u}/{a}/data/vix-indexes", - "VIX_PROVIDERS": "/servicesNS/{u}/{a}/data/vix-providers", - "WORKFLOW_ACTION": "/servicesNS/{u}/{a}/data/ui/workflow-actions", - "RELOAD_ENDPOINT": "/services/configs/conf-{conf}/_reload", - "ROLL_HOT_TO_COLD": "/services/data/indexes/{index}/chill-bucket?bucket_id={bucket_id}", - "INDEXER_S2S_TOKEN": "/services/data/inputs/tcp/splunktcptoken", - "FORWARDER_S2S_TOKEN": "/services/data/outputs/tcp/group", - "SAML": "/services/authentication/providers/SAML", - "JOBS_CREATED_FROM_SAVED_SEARCH": "/servicesNS/{u}/{a}/saved/searches/{name}/history", - "SAML_USER_ROLE_MAP": "/services/{u}/SAML-user-role-map", - "SAML_GROUP": "/services/{u}/SAML-groups", - "SAML_METADATA": "services/{u}/SAML-sp-metadata", - "SAML_AUTH": "/services/{u}/SAML-auth", - "SPLUNK_AUTH": "/services/admin/Splunk-auth/splunk_auth", - } diff --git a/pytest_splunk_addon/helmut/util/restutils.py b/pytest_splunk_addon/helmut/util/restutils.py deleted file mode 100644 index 6d718e174..000000000 --- a/pytest_splunk_addon/helmut/util/restutils.py +++ /dev/null @@ -1,110 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import print_function - -from future import standard_library - -standard_library.install_aliases() -import urllib.request, urllib.error, urllib.parse -import logging -import urllib.request, urllib.parse, urllib.error -import threading - -from pytest_splunk_addon.helmut.connector.base import Connector - -LOGGER = logging.getLogger("rest util log") - - -class RestUtils(threading.Thread): - def invoke_restAPI( - self, - splunk, - appname="", - arguments={"output_mode": "json"}, - request_type="GET", - acl=None, - splunk_user="", - splunk_pwd="", - request_url="/servicesNS/nobody/system/apps/local", - ): - LOGGER.info("Creating edit a saved search") - if splunk_user == "": - splunk_user = splunk.username - if splunk_pwd == "": - splunk_pwd = splunk.password - - if request_type == "POST": - request_args = arguments - - if request_type == "UPDATE": - request_type = "POST" - request_url = request_url + "/" + appname - request_args = arguments - - if request_type == "GET" or request_type == "DELETE": - request_url = request_url + "/" + appname - request_args = {"output_mode": "json"} - response, content = self.make_http_request( - splunk, request_type, request_url, request_args, splunk_user, splunk_pwd - ) - - response, content = self.make_http_request( - splunk, request_type, request_url, request_args, splunk_user, splunk_pwd - ) - - if acl != None: - acl_req_url = request_url + "/" + appname + "/acl" - res, cont = self.make_http_request( - splunk, request_type, acl_req_url, acl, splunk_user, splunk_pwd - ) - - return response, content - - def make_http_request( - self, - splunk, - request_type, - request_url, - request_args="", - splunk_user="", - splunk_pwd="", - ): - """ - This is a REST helper that will generate a http request - using request_type - GET/POST/... - request_url and request_args - """ - if splunk_user == "": - splunk_user = splunk.username - if splunk_pwd == "": - splunk_pwd = splunk.password - restconn = splunk.create_logged_in_connector( - contype=Connector.REST, username=splunk_user, password=splunk_pwd - ) - try: - response, content = restconn.make_request( - request_type, request_url, request_args - ) - return response, content - - except urllib.error.HTTPError as err: - print( - "Http error code is ({0}): {1} : {2}".format( - err.code, err.errno, err.strerror - ) - ) - finally: - restconn.logout() diff --git a/pytest_splunk_addon/helmut/util/rip.py b/pytest_splunk_addon/helmut/util/rip.py deleted file mode 100644 index 229a93fcc..000000000 --- a/pytest_splunk_addon/helmut/util/rip.py +++ /dev/null @@ -1,579 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -#!/usr/bin/python -# vim: set fileencoding=utf-8 : - -""" -Meta -==== - $Id$ - $DateTime$ - $Author$ - $Change$ -""" - -from future import standard_library - -standard_library.install_aliases() -from builtins import object -import urllib.request, urllib.parse, urllib.error -import datetime -import time - -TIMEOUT = 60 -POLL_FREQUENCY = 0.5 - - -class TimeoutException(Exception): - """ - Simple timeout exception - """ - - pass - - -class RESTInPeace(object): - """ - Simple module to wrap REST endpoints into a consistent set of methods - Everything is accessed through servicesNS. - """ - - # Please keep the key in singular form. - URIS = { - "alert_action": "/servicesNS/{u}/{a}/admin/alert_actions", - "app": "/servicesNS/{u}/{a}/apps", - "app_template": "/servicesNS/{u}/{a}/apps/apptemplates", - "app_local": "/servicesNS/{u}/{a}/apps/local", - "app_install": "/servicesNS/{u}/{a}/apps/appinstall", - "automatic_lookup": "/servicesNS/{u}/{a}/data/props/lookups", - "calculated_field": "/servicesNS/{u}/{a}/data/props/calcfields", - "capabilities": "/servicesNS/{u}/{a}/authorization/capabilities", - "changepassword": "/servicesNS/{u}/{a}/authentication/changepassword", - "cluster_config": "/servicesNS/{u}/{a}/cluster/config", - "cluster_master": "/servicesNS/{u}/{a}/cluster/master", - "cluster_searchhead": "/servicesNS/{u}/{a}/cluster/searchhead", - "cluster_slave": "/servicesNS/{u}/{a}/cluster/slave", - "config": "/servicesNS/{u}/{a}/configs", - "conf_event_renderer": ("/servicesNS/{u}/{a}/configs/conf-event_renderers"), - "conf_saved_searches": "/servicesNS/{u}/{a}/configs/conf-savedsearches", - "datamodel": "/servicesNS/{u}/{a}/datamodel/model", - "datamodel_acceleration": "/servicesNS/{u}/{a}/datamodel/acceleration", - "datamodel_report": "/servicesNS/{u}/{a}/datamodel/report", - "deployment_client_config": ("/servicesNS/{u}/{a}/deployment/client/config"), - "deployment_server_class": ( - "/servicesNS/{u}/{a}/deployment/server/serverclasses" - ), - "deployment_server_config": ("/servicesNS/{u}/{a}/deployment/server/config"), - "deployment_server_client": ("/servicesNS/{u}/{a}/deployment/server/clients"), - "deployment_server_application": ( - "/servicesNS/{u}/{a}/deployment/server/applications" - ), - "distsearch_peer": "/servicesNS/{u}/{a}/search/distributed/peers", - "eventtype": "/servicesNS/{u}/{a}/saved/eventtypes", - "fired_alert": "/servicesNS/{u}/{a}/alerts/fired_alerts", - "field": "/servicesNS/{u}/{a}/search/fields", - "field_alias": "/servicesNS/{u}/{a}/data/props/fieldaliases", - "field_extraction": "/servicesNS/{u}/{a}/data/props/extractions", - "fvtag": "/servicesNS/{u}/{a}/saved/fvtags", - "httpauth_token": "/servicesNS/{u}/{a}/authentication/httpauth-tokens", - "index": "/servicesNS/{u}/{a}/data/indexes", - "input": "/servicesNS/{u}/{a}/data/inputs", - "input_monitor": "/servicesNS/{u}/{a}/data/inputs/monitor", - "input_oneshot": "/servicesNS/{u}/{a}/data/inputs/oneshot", - "input_script": "/servicesNS/{u}/{a}/data/inputs/script", - "input_tcp_cooked": "/servicesNS/{u}/{a}/data/inputs/tcp/cooked", - "input_tcp_raw": "/servicesNS/{u}/{a}/data/inputs/tcp/raw", - "input_udp": "/servicesNS/{u}/{a}/data/inputs/udp", - "input_eventlog": ("/servicesNS/{u}/{a}/data/inputs/win-event-log-collections"), - "input_regmon": "/servicesNS/{u}/{a}/data/inputs/WinRegMon", - "input_perfmon": "/servicesNS/{u}/{a}/data/inputs/win-perfmon", - "input_hostmon": "/servicesNS/{u}/{a}/data/inputs/WinHostMon", - "input_netmon": "/servicesNS/{u}/{a}/data/inputs/WinNetMon", - "input_admon": "/servicesNS/{u}/{a}/data/inputs/ad", - "input_printmon": "/servicesNS/{u}/{a}/data/inputs/WinPrintMon", - "job": "/servicesNS/{u}/{a}/search/jobs", - "ldap_strategy": "/servicesNS/{u}/{a}/authentication/providers/LDAP", - "license": "/servicesNS/{u}/{a}/licenser/licenses", - "licenser": "/servicesNS/{u}/{a}/licenser", - "licenser_group": "/servicesNS/{u}/{a}/licenser/groups", - "lookup": "/servicesNS/{u}/{a}/data/props/lookups", - "lookup_table_file": "/servicesNS/{u}/{a}/data/lookup-table-files", - "macro": "/servicesNS/{u}/{a}/admin/macros", - "message": "/servicesNS/{u}/{a}/messages", - "navigation": "/servicesNS/{u}/{a}/data/ui/nav", - "ntag": "/servicesNS/{u}/{a}/saved/ntags", - "panel": "/servicesNS/{u}/{a}/data/ui/panels", - "property": "/servicesNS/{u}/{a}/properties", - "role": "/servicesNS/{u}/{a}/authorization/roles", - # only simple and stream available, and only edit method works. - "receiver": "/services/receivers", - "saved_search": "/servicesNS/{u}/{a}/saved/searches", - "scheduled_view": "/servicesNS/{u}/{a}/scheduled/views", - "search_command": "/servicesNS/{u}/{a}/admin/commandsconf", - "search_head_cluster": "/servicesNS/{u}/{a}/shcluster", - "server": "/servicesNS/{u}/{a}/server", - "sourcetype": "/servicesNS/{u}/{a}/saved/sourcetypes", - "sourcetype_rename": "/servicesNS/{u}/{a}/saved/sourcetype-rename", - "tag": "/servicesNS/{u}/{a}/search/tags", - "tcp_output_group": "/servicesNS/{u}/{a}/data/outputs/tcp/group", - "time": "/servicesNS/{u}/{a}/data/ui/times", - "transforms_extraction": ("/servicesNS/{u}/{a}/data/transforms/extractions"), - "transforms_lookup": "/servicesNS/{u}/{a}/data/transforms/lookups", - "transparent_summarization": "/servicesNS/{u}/{a}/admin/summarization", - "user": "/servicesNS/{u}/{a}/authentication/users", - "ui_manager": "/servicesNS/{u}/{a}/data/ui/manager", - "ui_pref": "/servicesNS/{u}/{a}/data/ui/prefs", - "ui_tour": "/servicesNS/{u}/{a}/data/ui/ui-tour", - "user_pref": "/servicesNS/{u}/{a}/admin/user-prefs", - "view": "/servicesNS/{u}/{a}/data/ui/views", - "viewstate": "/servicesNS/{u}/{a}/data/ui/viewstates", - "vix_index": "/servicesNS/{u}/{a}/data/vix-indexes", - "vix_provider": "/servicesNS/{u}/{a}/data/vix-providers", - "workflow_action": "/servicesNS/{u}/{a}/data/ui/workflow-actions", - } - - # This list contains function-like URIs. - # Please keep the key in singular form. - FUNCTION_URIS = { - "generate_regex": "/servicesNS/{u}/{a}/field_extractor/generate_regex", - } - - SUCCESS = {"GET": "200", "POST": "201", "DELETE": "200"} - - def __init__(self, helmut_rest_connector, user_namespace=None, app_namespace=None): - """ - Pass in a logged-in helmut rest connector. Every call afterwards - will use this connector. Namespaces should be encapsulated inside - the connector. - """ - - self.conn = helmut_rest_connector - - if user_namespace is not None and app_namespace is not None: - self._user = user_namespace - self._app = app_namespace - else: - self._user, self._app = self.conn.namespace.strip("/").split("/") - - self.change_namespace(self._user, self._app) - - def change_namespace(self, user, app): - """ - Change the user/app namespace for all the rest calls. - Note: This does NOT change the user making the rest calls. - - @rtype user: string - @param user: username - - @rtype app: string - @param app: app id - """ - self._user = urllib.parse.quote(user, "") - self._app = app - - for uri_name, uri_value in list(self.URIS.items()): - final_uri_value = uri_value.format(u=self._user, a=self._app) - self.add_endpoint(uri_name, final_uri_value) - for uri_name, uri_value in list(self.FUNCTION_URIS.items()): - final_uri_value = uri_value.format(u=self._user, a=self._app) - self.add_function_endpoint(uri_name, final_uri_value) - - def add_endpoint(self, uri_name, uri_value): - """ - Creates generic create, edit, delete, check methods for the - given endpoint name and value. - - @type uri_name: string - @param uri_name: the name of the endpoint - - @type uri_value: string - @param uri_name: the uri for the endpoint - """ - - def gen_create(*args, **kwargs): - """ - Create - """ - if args: - if kwargs: - args = args[0] + list(kwargs.items()) - body = args - else: - body = kwargs - - return self.conn.make_request("POST", uri_value, body=body) - - gen_create.__doc__ = """ - Create method for the '{ep}' endpoint. - - uri: '{uri}' - - @return: the return value from the make_request on the endpoint - """.format( - ep=uri_name, uri=uri_value - ) - gen_create.__name__ = "create_{ep}".format(ep=uri_name) - setattr(self, gen_create.__name__, gen_create) - - def gen_get(id_name, sub_endpoint="", *args, **kwargs): - """ - Get - """ - uri = "{uri}/{id}{sub_ep}".format( - uri=uri_value, - id=urllib.parse.quote(id_name, safe=""), - sub_ep=( - sub_endpoint - if (sub_endpoint == "" or sub_endpoint.startswith("/")) - else "/{s}".format(s=sub_endpoint) - ), - ) - return self.conn.make_request("GET", uri, args, kwargs) - - gen_get.__doc__ = """ - Get method for the '{ep}' endpoint. - - uri: '{uri}' - - @type id_name: string - @param: id_name: the id of the object. - - @type sub_endpoint: string - @param sub_endpoint: child endpoint of the base endpoint - - @return: the return value from the make_request on the endpoint - """.format( - ep=uri_name, uri=uri_value - ) - gen_get.__name__ = "get_{ep}".format(ep=uri_name) - setattr(self, gen_get.__name__, gen_get) - - def gen_reload(*args, **kwargs): - """ - Get - """ - uri = "{uri}/_reload".format(uri=uri_value) - return self.conn.make_request("GET", uri, args, kwargs) - - gen_reload.__doc__ = """ - Reload method for the '{ep}' endpoint. - - uri: '{uri}/_reload' - - uses the '_reload' endpoint off the base endpoing regardless - of the object. - - Note: NOT all REST endpoints support this, please check your endpoint - before attempting to do this. - - @return: the return value from the make_request on the endpoint - """.format( - ep=uri_name, uri=uri_value - ) - gen_reload.__name__ = "reload_{ep}".format(ep=uri_name) - setattr(self, gen_reload.__name__, gen_reload) - - def gen_get_all(*args, **kwargs): - """ - Get - """ - return self.conn.make_request("GET", uri_value, args, kwargs) - - gen_get_all.__doc__ = """ - Get all method for the '{ep}' endpoint. - - uri: '{uri}' - - @type id_name: string - @param: id_name: the id of the object. - - @return: the return value from the make_request on the endpoint - """.format( - ep=uri_name, uri=uri_value - ) - gen_get_all.__name__ = "get_all_{ep}".format(ep=uri_name) - setattr(self, gen_get_all.__name__, gen_get_all) - - def gen_edit(id_name, sub_endpoint="", urlparam=None, *args, **kwargs): - """ - Edit - """ - if args: - if kwargs: - args = args[0] + list(kwargs.items()) - body = args - else: - body = kwargs - - uri = "{uri}/{id}{sub_ep}".format( - uri=uri_value, - id=urllib.parse.quote(id_name, safe=""), - sub_ep=( - sub_endpoint - if (sub_endpoint == "" or sub_endpoint.startswith("/")) - else "/{s}".format(s=sub_endpoint) - ), - ) - - return self.conn.make_request("POST", uri=uri, urlparam=urlparam, body=body) - - gen_edit.__doc__ = """ - Edit method for the '{ep}' endpoint. - - uri: '{uri}' - - @type id_name: string - @param: id_name: the id of the object. - - @type sub_endpoint: string - @param sub_endpoint: child endpoint of the base endpoint - - @return: the return value from the make_request on the endpoint - """.format( - ep=uri_name, uri=uri_value - ) - gen_edit.__name__ = "edit_{ep}".format(ep=uri_name) - setattr(self, gen_edit.__name__, gen_edit) - - def gen_delete(id_name, *args, **kwargs): - """ - Delete - """ - uri = "{uri}/{id}".format( - uri=uri_value, id=urllib.parse.quote(id_name, safe="") - ) - return self.conn.make_request("DELETE", uri, args, kwargs) - - gen_delete.__doc__ = """ - Delete method for the '{ep}' endpoint. - - uri: '{uri}' - - @type id_name: string - @param: id_name: the id of the object. - - @return: the return value from the make_request on the endpoint - """.format( - ep=uri_name, uri=uri_value - ) - gen_delete.__name__ = "delete_{ep}".format(ep=uri_name) - setattr(self, gen_delete.__name__, gen_delete) - - def gen_check(id_name, *args, **kwargs): - """ - Check - """ - if not id_name: - # invalid id_name, so we'll return False. - return False - - if args: - if kwargs: - args = args[0] + list(kwargs.items()) - body = args - else: - body = kwargs - - uri = "{uri}/{id}".format( - uri=uri_value, id=urllib.parse.quote(id_name, safe="") - ) - response = self.conn.make_request("GET", uri, body=body)[0] - return response["status"] == self.SUCCESS["GET"] - - gen_check.__doc__ = """ - Check method for the '{ep}' endpoint. - - uri: '{uri}' - - @type id_name: string - @param: id_name: the id of the object. - - @rtype: boolean - @return: True if the object exists and False otherwise. - """.format( - ep=uri_name, uri=uri_value - ) - gen_check.__name__ = "check_{ep}".format(ep=uri_name) - setattr(self, gen_check.__name__, gen_check) - - def gen_wait_to_be_created( - id_name, timeout=TIMEOUT, poll_frequency=POLL_FREQUENCY, *args, **kwargs - ): - """ - Wait for the specific item to be created. - - @type timeout: int - @param timeout: the number in second to poll for. - - @type poll_frequency: number - @param poll_frequency: the number in seconds to wait between - each poll - """ - if args: - if kwargs: - args = args[0] + list(kwargs.items()) - body = args - else: - body = kwargs - - uri = "{uri}/{id}".format( - uri=uri_value, id=urllib.parse.quote(id_name, safe="") - ) - - start_time = datetime.datetime.now() - response = self.conn.make_request("GET", uri, body=body)[0] - - while ( - response["status"] != self.SUCCESS["GET"] - and (datetime.datetime.now() - start_time).seconds < timeout - ): - time.sleep(poll_frequency) - response = self.conn.make_request("GET", uri, body=body)[0] - - if response["status"] != self.SUCCESS["GET"]: - raise TimeoutException( - "The entity '{uri}/{id}' was not found after {t} " - "seconds.".format( - uri=uri_value, - id=urllib.parse.quote(id_name, safe=""), - t=timeout, - ) - ) - - gen_wait_to_be_created.__doc__ = """ - Wait to be created method for the '{ep}' endpoint. - - uri: '{uri}' - - @type id_name: string - @param: id_name: the id of the object. - - @type timeout: int - @param timeout: the number in second to poll for. - - @type poll_frequency: number - @param poll_frequency: the number in seconds to wait between - each poll - """.format( - ep=uri_name, uri=uri_value - ) - gen_wait_to_be_created.__name__ = "wait_for_{ep}_to_be_created".format( - ep=uri_name - ) - setattr(self, gen_wait_to_be_created.__name__, gen_wait_to_be_created) - - def gen_wait_to_be_deleted( - id_name, timeout=TIMEOUT, poll_frequency=POLL_FREQUENCY, *args, **kwargs - ): - """ - Wait for the specific item to be deleted. - - @type timeout: int - @param timeout: the number in second to poll for. - - @type poll_frequency: number - @param poll_frequency: the number in seconds to wait between - each poll - """ - if args: - if kwargs: - args = args[0] + list(kwargs.items()) - body = args - else: - body = kwargs - - uri = "{uri}/{id}".format( - uri=uri_value, id=urllib.parse.quote(id_name, safe="") - ) - - start_time = datetime.datetime.now() - response = self.conn.make_request("GET", uri, body=body)[0] - - while ( - response["status"] == self.SUCCESS["GET"] - and (datetime.datetime.now() - start_time).seconds < timeout - ): - time.sleep(poll_frequency) - response = self.conn.make_request("GET", uri, body=body)[0] - - if response["status"] == self.SUCCESS["GET"]: - raise TimeoutException( - "The entity '{uri}/{id}' was still found after {t} " - "seconds.".format( - uri=uri_value, - id=urllib.parse.quote(id_name, safe=""), - t=timeout, - ) - ) - - gen_wait_to_be_deleted.__doc__ = """ - Wait to be deleted method for the '{ep}' endpoint. - - uri: '{uri}' - - @type id_name: string - @param: id_name: the id of the object. - - @type timeout: int - @param timeout: the number in second to poll for. - - @type poll_frequency: number - @param poll_frequency: the number in seconds to wait between - each poll - """.format( - ep=uri_name, uri=uri_value - ) - gen_wait_to_be_deleted.__name__ = "wait_for_{ep}_to_be_deleted".format( - ep=uri_name - ) - setattr(self, gen_wait_to_be_deleted.__name__, gen_wait_to_be_deleted) - - def add_function_endpoint(self, uri_name, uri_value): - """ - Creates generic run methods for the - given endpoint name and value. - - @type uri_name: string - @param uri_name: the name of the endpoint - - @type uri_value: string - @param uri_name: the uri for the endpoint - """ - - def gen_run(sub_endpoint="", *args, **kwargs): - """ - Acting like calling a function, send GET request - to the specific endpoint and return value from the make_request. - - @type sub_endpoint: string - @param sub_endpoint: child endpoint of the base endpoint - - @return: the return value from the make_request on the endpoint - """ - uri = "{uri}/{sub_ep}".format( - uri=uri_value, - sub_ep=( - sub_endpoint - if (sub_endpoint == "" or sub_endpoint.startswith("/")) - else "/{s}".format(s=sub_endpoint) - ), - ) - return self.conn.make_request("GET", uri, args, kwargs) - - gen_run.__doc__ = """ - Acting like calling a function, send GET request to the - '{ep}' endpoint and return value from the make_request. - - uri: '{uri}' - - @type sub_endpoint: string - @param sub_endpoint: child endpoint of the base endpoint - - @return: the return value from the make_request on the endpoint - """.format( - ep=uri_name, uri=uri_value - ) - gen_run.__name__ = "run_{ep}".format(ep=uri_name) - setattr(self, gen_run.__name__, gen_run) diff --git a/pytest_splunk_addon/helmut/util/searchhelpers.py b/pytest_splunk_addon/helmut/util/searchhelpers.py deleted file mode 100644 index ef3614b1e..000000000 --- a/pytest_splunk_addon/helmut/util/searchhelpers.py +++ /dev/null @@ -1,952 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import print_function - -from future import standard_library - -standard_library.install_aliases() -import urllib.request, urllib.error, urllib.parse -import logging -import urllib.request, urllib.parse, urllib.error -import threading -import json - -import os -from pytest_splunk_addon.helmut.util.Constants import Constants as const -from pytest_splunk_addon.helmut.connector.base import Connector - -LOGGER = logging.getLogger("search helper log") - - -class SearchHelpers(threading.Thread): - def edit_savedsearch( - self, - splunk, - savedsearchname, - appcontext="search", - arguments={"output_mode": "json"}, - request_type="GET", - acl=None, - splunk_user="", - splunk_pwd="", - ): - LOGGER.info("Creating edit a saved search") - if splunk_user == "": - splunk_user = splunk.username - if splunk_pwd == "": - splunk_pwd = splunk.password - request_url = const.TestConstants["EDIT_SAVED_SEARCH"].format(appcontext) - - if request_type == "POST": - request_args = arguments - - if request_type == "UPDATE": - request_type = "POST" - request_url = request_url + "/" + savedsearchname - request_args = arguments - - if request_type == "GET" or request_type == "DELETE": - request_url = request_url + "/" + savedsearchname - request_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, request_type, request_url, request_args, splunk_user, splunk_pwd - ) - - if acl != None: - acl_req_url = request_url + "/" + savedsearchname + "/acl" - res, cont = self.make_http_request( - splunk, request_type, acl_req_url, acl, splunk_user, splunk_pwd - ) - - return response, content - - def edit_savedsearch_with_uxt( - self, - splunk, - savedsearchname, - usercontext="admin", - appcontext="search", - arguments={"output_mode": "json"}, - request_type="GET", - acl=None, - ): - """ - usercontext:applicationcontext namespace - """ - LOGGER.info("Creating edit a saved search") - request_url = const.TestConstants["EDIT_SAVED_SEARCH_USRCTX"].format( - usercontext, appcontext - ) - - if request_type == "POST": - request_args = arguments - - if request_type == "UPDATE": - request_type = "POST" - request_url = request_url + "/" + savedsearchname - request_args = arguments - - if request_type == "GET" or request_type == "DELETE": - request_url = request_url + "/" + savedsearchname - request_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, - request_type, - request_url, - request_args, - splunk.username, - splunk.password, - ) - - if acl != None: - acl_req_url = request_url + "/" + savedsearchname + "/acl" - res, cont = self.make_http_request( - splunk, request_type, acl_req_url, acl, splunk.username, splunk.password - ) - - return response, content - - def tag_actions( - self, - splunk, - fieldname, - fieldvalue, - tagname, - tag_action, - usercontext="admin", - appcontext="search", - request_type="GET", - acl=None, - ): - """ - creates/edits/deletes a tag - REST EDNPOINT - search/fields/{field_name}/tags - """ - tag_url = const.TestConstants["ADD_TAG"].format(appcontext, fieldname) - tag_value_pair = urllib.parse.quote( - fieldname + "=" + fieldvalue + " : " + tagname - ) - - # print 'request type is {0}'.format(request_type) - - print(threading.currentThread().getName(), "starting") - - if request_type == "POST": - tag_args = {"value": fieldvalue, tag_action: tagname} - - if request_type == "GET" or request_type == "DELETE": - tag_args = {"output_mode": "json"} - tag_url = ( - const.TestConstants["GET_TAG"].format(appcontext, usercontext) - + "/" - + tag_value_pair - ) - - response, content = self.make_http_request( - splunk, request_type, tag_url, tag_args, splunk.username, splunk.password - ) - - if acl != None: - tag_acl_url = ( - const.TestConstants["GET_TAG"].format(appcontext, usercontext) - + "/" - + tag_value_pair - + "/acl" - ) - response, content = self.make_http_request( - splunk, request_type, tag_acl_url, acl, splunk.username, splunk.password - ) - - return response, content - - def tag_actions_uxt( - self, - splunk, - fieldname, - fieldvalue, - tagname, - tag_action, - usercontext="admin", - appcontext="search", - request_type="GET", - acl=None, - ): - """ - creates/edits/deletes a tag - REST EDNPOINT - search/fields/{field_name}/tags - """ - tag_url = const.TestConstants["ADD_TAG_USRCXT"].format( - usercontext, appcontext, fieldname - ) - tag_value_pair = urllib.parse.quote( - fieldname + "=" + fieldvalue + " : " + tagname - ) - - # print 'request type is {0}'.format(request_type) - - print(threading.currentThread().getName(), "starting") - - if request_type == "POST": - tag_args = {"value": fieldvalue, tag_action: tagname} - - if request_type == "GET" or request_type == "DELETE": - tag_args = {"output_mode": "json"} - tag_url = ( - const.TestConstants["GET_TAG"].format(appcontext, usercontext) - + "/" - + tag_value_pair - ) - - response, content = self.make_http_request( - splunk, request_type, tag_url, tag_args, splunk.username, splunk.password - ) - - if acl != None: - tag_acl_url = ( - const.TestConstants["GET_TAG"].format(appcontext, usercontext) - + "/" - + tag_value_pair - + "/acl" - ) - response, content = self.make_http_request( - splunk, request_type, tag_acl_url, acl, splunk.username, splunk.password - ) - - return response, content - - def edit_eventtype( - self, - splunk, - eventtypename, - search, - appcontext="search", - request_type="GET", - splunk_user="admin", - splunk_pwd="changeme", - acl=None, - ): - """ """ - LOGGER.info("create new eventtype") - - eventtype_url = const.TestConstants["EDIT_EVENTTYPE"].format(appcontext) - if request_type == "POST": - eventtype_args = {"name": eventtypename, "search": search} - urllib.parse.urlencode(eventtype_args) - - if acl != None: - eventtype_acl_url = eventtype_url + "/" + eventtypename + "/acl" - response, content = self.make_http_request( - splunk, request_type, eventtype_acl_url, acl, splunk_user, splunk_pwd - ) - - if request_type == "GET" or request_type == "DELETE": - eventtype_args = {"output_mode": "json"} - eventtype_url = ( - const.TestConstants["EDIT_EVENTTYPE"].format(appcontext) - + "/" - + eventtypename - ) - - response, content = self.make_http_request( - splunk, request_type, eventtype_url, eventtype_args, splunk_user, splunk_pwd - ) - return response, content - - def edit_field_transform( - self, - splunk, - fieldExtractionName, - stanza, - extractiontype, - fieldtobeExtracted, - appcontext="search", - request_type="GET", - splunk_user="admin", - splunk_pwd="changeme", - acl=None, - ): - """ """ - LOGGER.info("create field transform using interactive field extractor") - - ifx_url = const.TestConstants["EDIT_IFX"].format(appcontext) - fieldextraction = stanza + " : " + extractiontype + "-" + fieldExtractionName - - if request_type == "POST": - ifx_args = { - "name": fieldExtractionName, - "stanza": stanza, - "type": extractiontype, - "value": fieldtobeExtracted, - } - - if request_type == "DELETE" or request_type == "GET": - ifx_args = {"output_mode": "json"} - ifx_url = ifx_url + "/" + urllib.parse.quote(fieldextraction) - - response, content = self.make_http_request( - splunk, request_type, ifx_url, ifx_args, splunk_user, splunk_pwd - ) - - if acl != None: - ifx_acl_url = ifx_url + "/" + urllib.parse.quote(fieldextraction) + "/acl" - resp, cont = self.make_http_request( - splunk, request_type, ifx_acl_url, acl, splunk_user, splunk_pwd - ) - - return response, content - - def edit_field_transform_uxt( - self, - splunk, - fieldExtractionName, - stanza, - extractiontype, - fieldtobeExtracted, - appcontext="search", - usercontext="admin", - request_type="GET", - splunk_user="admin", - splunk_pwd="changeme", - acl=None, - ): - """ """ - LOGGER.info("create field transform using interactive field extractor") - - ifx_url = const.TestConstants["EDIT_IFX_USRCTX"].format(usercontext, appcontext) - fieldextraction = stanza + " : " + extractiontype + "-" + fieldExtractionName - - if request_type == "POST": - ifx_args = { - "name": fieldExtractionName, - "stanza": stanza, - "type": extractiontype, - "value": fieldtobeExtracted, - } - - if request_type == "DELETE" or request_type == "GET": - ifx_args = {"output_mode": "json"} - ifx_url = ifx_url + "/" + urllib.parse.quote(fieldextraction) - - response, content = self.make_http_request( - splunk, request_type, ifx_url, ifx_args, splunk.username, splunk.password - ) - - if acl != None: - ifx_acl_url = ifx_url + "/" + urllib.parse.quote(fieldextraction) + "/acl" - resp, cont = self.make_http_request( - splunk, request_type, ifx_acl_url, acl, splunk.username, splunk.password - ) - - return response, content - - def edit_sourcetype_rename( - self, - splunk, - oldsourcetypename, - newsourcetypename, - appcontext="search", - request_type="GET", - splunk_user="admin", - splunk_pwd="changeme", - acl=None, - ): - """ """ - source_url = const.TestConstants["SOURCE_TYPE_RENAME"].format(appcontext) - - if request_type == "POST": - source_args = {"name": oldsourcetypename, "value": newsourcetypename} - - if request_type == "DELETE" or request_type == "GET": - source_args = {"output_mode": "json"} - source_url = source_url + "/" + oldsourcetypename - - response, content = self.make_http_request( - splunk, request_type, source_url, source_args, splunk_user, splunk_pwd - ) - - if acl != None: - source_acl_url = source_url + "/" + oldsourcetypename + "/acl" - resp, cont = self.make_http_request( - splunk, request_type, source_acl_url, acl, splunk_user, splunk_pwd - ) - - return response, content - - # add user context sourcetype rename - def edit_sourcetype_rename_uxt( - self, - splunk, - oldsourcetypename, - newsourcetypename, - appcontext="search", - usercontext="admin", - request_type="GET", - splunk_user="admin", - splunk_pwd="changeme", - acl=None, - ): - """ """ - source_url = const.TestConstants["SOURCE_TYPE_RENAME_USRCXT"].format( - usercontext, appcontext - ) - - if request_type == "POST": - source_args = {"name": oldsourcetypename, "value": newsourcetypename} - - if request_type == "DELETE" or request_type == "GET": - source_args = {"output_mode": "json"} - source_url = source_url + "/" + oldsourcetypename - - response, content = self.make_http_request( - splunk, request_type, source_url, source_args, splunk_user, splunk_pwd - ) - - if acl != None: - source_acl_url = source_url + "/" + oldsourcetypename + "/acl" - resp, cont = self.make_http_request( - splunk, request_type, source_acl_url, acl, splunk_user, splunk_pwd - ) - - return response, content - - def edit_field_alias( - self, - splunk, - alias_name, - stanza_name, - field_name, - field_alias, - appcontext="search", - request_type="GET", - acl=None, - ): - LOGGER.info("create edit field alias") - alias_url = const.TestConstants["EDIT_FELD_ALIAS"].format(appcontext) - field_alias_name = urllib.parse.quote( - stanza_name + " : " + "FIELDALIAS-" + alias_name - ) - - if request_type == "POST": - field_name = "alias" + "." + field_name - alias_args = { - "name": alias_name, - "stanza": stanza_name, - field_name: field_alias, - } - - if request_type == "DELETE" or request_type == "GET": - alias_url = alias_url + "/" + field_alias_name - alias_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, - request_type, - alias_url, - alias_args, - splunk_user=splunk.username, - splunk_pwd=splunk.password, - ) - - if acl != None: - alias_acl_url = alias_url + "/" + field_alias_name + "/acl" - resp, cont = self.make_http_request( - splunk, - request_type, - alias_acl_url, - acl, - splunk_user=splunk.username, - splunk_pwd=splunk.password, - ) - - return response, content - - def edit_field_alias_uxt( - self, - splunk, - alias_name, - stanza_name, - field_name, - field_alias, - usercontext="admin", - appcontext="search", - request_type="GET", - acl=None, - ): - LOGGER.info("create edit field alias") - alias_url = const.TestConstants["EDIT_FELD_ALIAS_USRCXT"].format( - usercontext, appcontext - ) - field_alias_name = urllib.parse.quote( - stanza_name + " : " + "FIELDALIAS-" + alias_name - ) - - if request_type == "POST": - field_name = "alias" + "." + field_name - alias_args = { - "name": alias_name, - "stanza": stanza_name, - field_name: field_alias, - } - - if request_type == "DELETE" or request_type == "GET": - alias_url = alias_url + "/" + field_alias_name - alias_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, - request_type, - alias_url, - alias_args, - splunk_user=splunk.username, - splunk_pwd=splunk.password, - ) - - if acl != None: - alias_acl_url = alias_url + "/" + field_alias_name + "/acl" - resp, cont = self.make_http_request( - splunk, - request_type, - alias_acl_url, - acl, - splunk_user=splunk.username, - splunk_pwd=splunk.password, - ) - - return response, content - - def edit_calc_fields( - self, - splunk, - calc_field_name, - stanza_name, - calc_field_value, - appcontext="search", - request_type="GET", - acl=None, - ): - LOGGER.info("Creates calculcated fields") - calc_url = const.TestConstants["EDIT_CALC_FIELDS"].format(appcontext) - - derived_cal_field_name = urllib.parse.quote( - stanza_name + " : " + "EVAL-" + calc_field_name - ) - - if request_type == "POST": - calc_args = { - "name": calc_field_name, - "stanza": stanza_name, - "value": calc_field_value, - } - - if request_type == "DELETE" or request_type == "GET": - calc_url = calc_url + "/" + derived_cal_field_name - calc_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, - request_type, - calc_url, - calc_args, - splunk_user=splunk.username, - splunk_pwd=splunk.password, - ) - - if acl != None: - calc_acl_url = calc_url + "/" + derived_cal_field_name + "/acl" - resp, cont = self.make_http_request( - splunk, - request_type, - calc_acl_url, - acl, - splunk.username, - splunk.password, - ) - - return response, content - - def edit_field_extraction( - self, - splunk, - regex, - source_key, - transform_name, - appcontext="search", - request_type="GET", - acl=None, - ): - LOGGER.info("Creates edits field extractions") - ext_url = const.TestConstants["EDIT_FIELD_EXTRACTION"].format(appcontext) - - if request_type == "POST": - ext_args = { - "REGEX": regex, - "SOURCE_KEY": source_key, - "name": transform_name, - } - - if request_type == "DELETE" or request_type == "GET": - ext_url = ext_url + "/" + transform_name - ext_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, request_type, ext_url, ext_args, splunk.username, splunk.password - ) - - if acl != None: - ext_acl_url = ext_url + "/" + transform_name + "/" + "acl" - resp, cont = self.make_http_request( - splunk, request_type, ext_acl_url, acl, splunk.username, splunk.password - ) - - return response, content - - def edit_field_extraction_uxt( - self, - splunk, - regex, - source_key, - transform_name, - usercontext="admin", - appcontext="search", - request_type="GET", - acl=None, - ): - LOGGER.info("Creates edits field extractions") - ext_url = const.TestConstants["EDIT_FIELD_EXTRACTION_USRCXT"].format( - usercontext, appcontext - ) - - if request_type == "POST": - ext_args = { - "REGEX": regex, - "SOURCE_KEY": source_key, - "name": transform_name, - } - - if request_type == "DELETE" or request_type == "GET": - ext_url = ext_url + "/" + transform_name - ext_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, request_type, ext_url, ext_args, splunk.username, splunk.password - ) - - if acl != None: - ext_acl_url = ext_url + "/" + transform_name + "/" + "acl" - resp, cont = self.make_http_request( - splunk, request_type, ext_acl_url, acl, splunk.username, splunk.password - ) - - return response, content - - def edit_dashboard( - self, - splunk, - dashboard_name, - dashboard_xml, - appcontext, - request_type="GET", - acl=None, - ): - LOGGER.info("Creates a dashboard") - dashboard_url = const.TestConstants["EDIT_DASHBOARD"].format(appcontext) - - if request_type == "POST": - dashboard_args = {"name": dashboard_name, "eai:data": dashboard_xml} - - if request_type == "DELETE" or request_type == "GET": - dashboard_url = dashboard_url + "/" + dashboard_name - dashboard_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, - request_type, - dashboard_url, - dashboard_args, - splunk.username, - splunk.password, - ) - - if acl != None: - dashboard_acl_url = dashboard_url + "/" + dashboard_name - response, content = self.make_http_request( - splunk, - request_type, - dashboard_acl_url, - acl, - splunk.username, - splunk.password, - ) - - return response, content - - def edit_lookup( - self, - splunk, - lookupfilepath, - lookupfilename, - lookupname, - appcontext="search", - request_type="GET", - acl=None, - ): - - LOGGER.info("upload a lookup file") - if acl == "sharing=global": - lookup_url = "/servicesNS/nobody/{0}/data/lookup-table-files".format( - appcontext - ) - else: - lookup_url = const.TestConstants["UPLOAD_LOOKUP_FILE"].format(appcontext) - lookup_path = os.path.join( - splunk.splunk_home, "var", "run", "splunk", "lookup_tmp" - ) - lookup_args = { - "eai:data": lookup_path + os.sep + lookupfilename, - "name": lookupfilename, - } - cmd = 'cmd python -c "import os; os.makedirs(\\"{0}\\")"'.format(lookup_path) - (code, stdout, stderr) = splunk.execute(cmd) - # copy lookup file to lookup_tmp folder in $splunk_home/var/run/splunk - splunk._file_utils.send(lookupfilepath, lookup_path) - request_type = "POST" - response, content = self.make_http_request( - splunk, - request_type, - lookup_url, - lookup_args, - splunk.username, - splunk.password, - ) - LOGGER.info("Create a lookup using the lookup file") - lookup_url = const.TestConstants["CREATE_TABLE_LOOKUP"].format(appcontext) - lookup_args = {"name": lookupname, "filename": lookupfilename} - request_type = request_type - response, content = self.make_http_request( - splunk, - request_type, - lookup_url, - lookup_args, - splunk.username, - splunk.password, - ) - - return response, content - - def edit_lookup_file( - self, - splunk, - ssh_user="", - ssh_pwd="", - lookupfilepath="", - lookupfilename="", - lookupname="", - request_type="GET", - appcontext="search", - ): - - lookup_url = const.TestConstants["UPLOAD_LOOKUP_FILE"].format(appcontext) - - if request_type == "POST": - LOGGER.info("upload a lookup file") - lookup_path = os.path.join( - splunk.splunk_home, "var", "run", "splunk", "lookup_tmp" - ) - lookup_path_local = os.path.join(lookupfilepath, lookupfilename) - splunk.connection.execute("mkdir " + lookup_path) - splunk._file_utils.send(lookup_path_local, lookup_path) - LOGGER.info("Create a lookup using the lookup file") - lookup_args = { - "eai:data": os.path.join(lookup_path, lookupfilename), - "name": lookupname, - } - - if request_type == "DELETE" or request_type == "GET": - lookup_url = lookup_url + "/" + lookupname - lookup_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, - request_type, - lookup_url, - lookup_args, - splunk.username, - splunk.password, - ) - return response, content - - def edit_macros( - self, - splunk, - appcontext, - macro_name, - macro_definition, - request_type="GET", - acl=None, - ): - LOGGER.info("Create edit a macro") - macro_url = const.TestConstants["EDIT_MACRO"].format(appcontext) - - if request_type == "POST": - macro_args = {"name": macro_name, "definition": macro_definition} - - if acl != None: - macro_acl_url = macro_url + "/" + macro_name - response, content = self.make_http_request( - splunk, - request_type, - macro_acl_url, - acl, - splunk.username, - splunk.password, - ) - - if request_type == "DELETE" or request_type == "GET": - macro_url = macro_url + "/" + macro_name - macro_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, - request_type, - macro_url, - macro_args, - splunk.username, - splunk.password, - ) - - return response, content - - def embed_report( - self, - splunk, - reportname, - appcontext="search", - arguments={"output_mode": "json"}, - request_type="GET", - acl=None, - ): - LOGGER.info("Create embedded report") - request_url = const.TestConstants["EMBED_REPORT"].format(appcontext, reportname) - request_args = {"output_mode": "json"} - response, content = self.make_http_request( - splunk, - request_type, - request_url, - request_args, - splunk.username, - splunk.password, - ) - - return response, content - - def edit_datamodel( - self, - splunk, - appcontext, - dm_name, - dm_description, - request_type="GET", - acl=None, - splunk_user="admin", - splunk_pwd="changed", - ): - LOGGER.info("Edit data model") - - dm_url = const.TestConstants["EDIT_DATAMODEL"].format(appcontext) - - if request_type == "POST": - dm_args = {"name": dm_name} - - if acl != None: - dm_acl_url = dm_url + "/" + dm_name - response, content = self.make_http_request( - splunk, request_type, dm_url, dm_args, splunk_user, splunk_pwd - ) - - if request_type == "DELETE" or request_type == "GET": - dm_url = dm_url + "/" + dm_name - dm_args = {"output_mode": "json"} - - response, content = self.make_http_request( - splunk, request_type, dm_url, dm_args, splunk_user, splunk_pwd - ) - - return response, content - - def check_geobin(self, nightlysplunk, statsfunc, geobin): - query = ( - "search index=geo checkin.geolong>=%s checkin.geolong<%s checkin.geolat>=%s checkin.geolat<%s | stats %s" - % ( - geobin["_geo_bounds_west"], - geobin["_geo_bounds_east"], - geobin["_geo_bounds_south"], - geobin["_geo_bounds_north"], - statsfunc, - ) - ) - job = nightlysplunk.jobs().create(query) - job.wait() - result = job.get_results()[0] - for key in result: - self.logger.info(result) - assert result[key] == geobin[key] - - def make_http_request( - self, - splunk, - request_type, - request_url, - request_args, - splunk_user="admin", - splunk_pwd="changed", - ): - """ - This is a REST helper that will generate a http request - using request_type - GET/POST/... - request_url and request_args - """ - restconn = splunk.create_logged_in_connector( - contype=Connector.REST, username=splunk_user, password=splunk_pwd - ) - try: - response, content = restconn.make_request( - request_type, request_url, request_args - ) - return response, content - - except urllib.error.HTTPError as err: - print( - "Http error code is ({0}): {1} : {2}".format( - err.code, err.errno, err.strerror - ) - ) - - def get_fired_alerts(self, splunk, saved_search_name): - LOGGER.info("Creating edit a saved search") - splunk_user = splunk.username - splunk_pwd = splunk.password - req_args = {"output_mode": "json"} - req_url = const.TestConstants["FIRED_ALERT_DETAILS"].format(saved_search_name) - response, content = self.make_http_request( - splunk, "GET", req_url, req_args, splunk_user, splunk_pwd - ) - parsedresponse = json.loads(content) - alertlisting = [] - for job in parsedresponse["entry"]: - alertlisting.append(job["content"]) - return alertlisting diff --git a/pytest_splunk_addon/helmut/util/string_unicode_convert.py b/pytest_splunk_addon/helmut/util/string_unicode_convert.py deleted file mode 100644 index 0f3edac9a..000000000 --- a/pytest_splunk_addon/helmut/util/string_unicode_convert.py +++ /dev/null @@ -1,31 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -def normalize_to_unicode(value): - """ - string convert to unicode - """ - if hasattr(value, "decode") and not isinstance(value, str): - return value.decode("utf-8") - return value - - -def normalize_to_str(value): - """ - unicode convert to string - """ - if hasattr(value, "encode") and isinstance(value, str): - return value.encode("utf-8") - return value diff --git a/pytest_splunk_addon/helmut/util/ymlparser.py b/pytest_splunk_addon/helmut/util/ymlparser.py deleted file mode 100644 index 472270465..000000000 --- a/pytest_splunk_addon/helmut/util/ymlparser.py +++ /dev/null @@ -1,102 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import os -from builtins import object -from builtins import str - -import yaml -from pytest_splunk_addon.helmut.util.hosts import Host, Hosts - -LOGGER = logging.getLogger("..util.ymlparser") -YAML_HOLDER = {} - - -class YMLParser(object): - """ - Returns the entire contents of the yml file as a dictionary - """ - - @classmethod - def extract_key_values(self, yaml_file): - if os.path.isabs(yaml_file) == False: - path = os.getcwd() - while ( - path.endswith("new_test") == False - and path.endswith("new_test" + os.sep) == False - ): - path = os.path.abspath(os.path.join(path, os.pardir)) - # if the input yaml_file path is not absolute, then we expect that - # the path is relative to new_test/config/. so new_test/config/ - # + yaml_file path - yaml_file = path + os.sep + "config" + os.sep + yaml_file - if os.path.isfile(yaml_file) == False: - msg = "Invalid yaml_file path:" + yaml_file - LOGGER.warn(msg) - raise Exception(msg) - - if not yaml_file in YAML_HOLDER: - LOGGER.info("Trying to Open yml file: " + yaml_file) - file_object = open(yaml_file) - LOGGER.info("Successfully opened yml file: " + yaml_file) - yaml_dict = yaml.load(file_object) - for item in yaml_dict: - LOGGER.info( - "item in yaml file: {key}: {value}".format( - key=item, value=str(yaml_dict[item]) - ) - ) - YAML_HOLDER[yaml_file] = yaml_dict - return YAML_HOLDER[yaml_file] - - """ - Returns all the values of a given key in the yml file - """ - - @classmethod - def get_values(self, key, yaml_file): - dict = self.extract_key_values(yaml_file) - if key in dict: - return dict[key] - else: - return None - - """ - Returns the hosts that are in the yml file as a Hosts class. - """ - - @classmethod - def get_hosts(self, yaml_file): - if yaml_file is None: - return None - config = self.extract_key_values(yaml_file) - if not "hosts" in config: # if no hosts in the yml file - return None - host_list = config["hosts"] - hosts = Hosts() - for host in host_list: - hosts.add_host( - Host( - host_name=host, - ssh_user=config["ssh_user"], - ssh_password=config["ssh_password"], - splunk_home=config["splunk_home"], - ssh_domain=config["ssh_domain"], - ssh_port=config.get("ssh_port", 22), - ssh_identity=config.get("ssh_identity"), - ) - ) - return hosts diff --git a/pytest_splunk_addon/helmut_lib/SearchUtil.py b/pytest_splunk_addon/helmut_lib/SearchUtil.py deleted file mode 100644 index 21eac8681..000000000 --- a/pytest_splunk_addon/helmut_lib/SearchUtil.py +++ /dev/null @@ -1,1219 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import print_function - -from future import standard_library - -standard_library.install_aliases() -from builtins import zip -from builtins import str -from builtins import map -from builtins import object -import os -import re -import sys -import time -import string -import difflib -import subprocess -import csv -import io - -try: - import py -except ImportError: - # We're assuming here that pytest is unavailable - print("Pytest unavailable, running tests in dev context") - -TIMEOUT = 120 - - -class SearchUtilException(Exception): - def __init__(self, message): - self.message = message - - def __str__(self): - return repr(self.message) - - -class SearchUtil(object): - def __init__(self, jobs, logger): - """ - Constructor of the SearchUtil object. - """ - self.logger = logger - self.jobs = jobs - - def failTest(self, message): - """ - Fail the test appropriately, QA uses pytest. Dev gets a generic message - """ - if "pytest" in list(sys.modules.keys()): - py.test.fail(message) - else: - raise SearchUtilException(message) - - def checkQueryContainsRegex( - self, - query, - field, - regex, - interval=15, - retries=4, - number_results=100, - max_time=60, - ): - tryNum = 0 - r = re.compile(regex) - while tryNum <= retries: - job = self.jobs.create( - query, auto_finalize_ec=number_results, max_time=max_time - ) - job.wait() - results = job.get_results() - for result_no, result in enumerate(results.as_list): - if result_no > number_results: - self.logger.debug( - "could not find re: %s in first %d results", - regex, - number_results, - ) - return False - elif r.match(str(result[field])): - self.logger.debug( - "result['%s']='%s' matches re: %s", - field, - str(result[field]), - regex, - ) - return True - tryNum += 1 - time.sleep(interval) - self.logger.debug("could not find re: %s", regex) - return False - - def checkQueryCount(self, query, targetCount, interval=15, retries=4, max_time=120): - self.logger.debug("query is %s", query) - tryNum = 0 - while tryNum <= retries: - job = self.jobs.create(query, max_time=max_time) - job.wait(max_time) - result_count = job.get_result_count() - if result_count == targetCount: - return True - else: - self.logger.info( - "Count of results is not as expected, it is %d. Expected %d", - result_count, - targetCount, - ) - tryNum += 1 - time.sleep(interval) - - return False - - def checkQueryCountIsGreaterThanZero( - self, query, interval=15, retries=4, max_time=120 - ): - self.logger.debug("query is %s", query) - tryNum = 0 - while tryNum <= retries: - job = self.jobs.create(query, auto_finalize_ec=200, max_time=max_time) - job.wait(max_time) - result_count = len(job.get_results()) - if result_count > 0: - self.logger.debug("Count of results is > 0, it is:%d", result_count) - return True - else: - self.logger.debug("Count of results is 0") - tryNum += 1 - time.sleep(interval) - return False - - def deleteEventsFromIndex(self, index_name="*", max_wait_time=120): - """ - Hides events belonging to specified index from SPL Search using ``| delete`` command. - - Args: - index_name: Name of the index to delete events from. - max_wait_time: Amount of time job can wait to finish. - """ - query = f"search index={index_name} | delete" - self.logger.debug("query is %s", query) - try: - job = self.jobs.create(query) - job.wait(max_wait_time) - self.logger.info("Successfully deleted old events") - - except Exception as e: - self.logger.debug("CAREFUL - Could not delete old events!") - self.logger.debug(e) - - def checkQueryCountIsZero(self, query, max_time=120): - self.logger.debug("query is %s", query) - tryNum = 0 - - job = self.jobs.create(query, auto_finalize_ec=200, max_time=max_time) - job.wait(max_time) - result_count = len(job.get_results()) - - if result_count == 0: - self.logger.debug("Count of results is 0") - return True, None - else: - self.logger.debug("Count of results is > 0, it is:%d", result_count) - return False, job.get_results() - - def get_search_results(self, query, max_time=120): - """ - Execute a search query - Args: - query (str): query string for Splunk Search - max_time: Amount of time job can wait to finish. - Returns: - events that match the query - """ - - self.logger.debug("query is %s", query) - try: - job = self.jobs.create(query, auto_finalize_ec=120, max_time=max_time) - job.wait(max_time) - return job.get_results() - except Exception as e: - self.logger.debug("Errors when executing search!!!") - self.logger.debug(e) - - def checkQueryFields( - self, - query, - expected, # can be list, set, tuple, or string - expectedMinRow=1, - interval=15, - retries=4, - namespace="SA-ThreatIntelligence", - ): - - """Execute a query and check for a matching set (not necessarily - complete) of output fields, and secondarily for a minimum - number of results. - """ - - tryNum = 0 - status = False - - if not isinstance(expected, set): - expected = set(expected) - - while tryNum <= retries and not status: - - job = self.jobs.create(query, auto_finalize_ec=120, max_time=120) - job.wait(TIMEOUT) - results = job.get_results() - messages = job.get_messages() - - if len(job.get_results()) > 0: - fields = list(results[0].keys()) - if expected.issubset(fields): - self.wrapLogOutput( - msg="All expected fields found in result:", - actual=",".join(fields), - expected=",".join(expected), - errors="", - ) - status = True - else: - self.wrapLogOutput( - msg="Expected fields missing from result:", - actual=",".join(fields), - expected=",".join(expected), - errors=",".join(expected.difference(fields)), - ) - else: - self.wrapLogOutput( - msg="Zero results from search:", - actual="", - expected=",".join(expected), - errors="\n".join(messages), - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - return status - - def wrapLogOutput(self, msg, actual, expected, errors, level="debug"): - """Simple wrapper method for showing expected and actual output - in the debug log. Pass in level to adjust level from default (debug) - to error or warning. - """ - - errOutput = string.Template( - """${msg} - ===ACTUAL===== - ${actual} - ===EXPECTED=== - ${expected} - ===ERRORS===== - ${errors}\n""" - ) - - output = errOutput.substitute( - {"msg": msg, "actual": actual, "expected": expected, "errors": errors} - ) - - # This is solely so the definition of errOutput above can look good - # while making the debug output readable. - if level == "debug": - self.logger.debug( - "\n".join(map(str.strip, list(map(str, output.splitlines())))) - ) - elif level == "warning": - self.logger.warning( - "\n".join(map(str.strip, list(map(str, output.splitlines())))) - ) - elif level == "error": - self.logger.error( - "\n".join(map(str.strip, list(map(str, output.splitlines())))) - ) - else: - # Whatever, if level specified badly just print as debug - self.logger.debug( - "\n".join(map(str.strip, list(map(str, output.splitlines())))) - ) - - def compareContentIgnoreOrder(self, actual, expected): - """Compare two string sequences, generating a unified diff.""" - # Strip EOL to avoid a common error. Note that one or more - # inputs may be an empty list so check length first. - self.wrapLogOutput( - msg="Before Strip actual", - actual="", - expected="", - errors=actual, - ) - - self.wrapLogOutput( - msg="Before Strip expected", - actual="", - expected="", - errors=expected, - ) - - if len(actual) > 0: - actual[-1] = actual[-1].rstrip("\n") - - if len(expected) > 0: - expected[-1] = expected[-1].rstrip("\n") - - actual[-1] = actual[-1] + "\n" - expected[-1] = expected[-1] + "\n" - - self.wrapLogOutput( - msg="After Strip actual", - actual="", - expected="", - errors=actual, - ) - - self.wrapLogOutput( - msg="After Strip expected", - actual="", - expected="", - errors=expected, - ) - - status = True - # Compare the lines - for line in expected: - if line not in actual: - self.wrapLogOutput( - msg="The following line is not in the output from the command", - actual="", - expected="", - errors=line, - ) - status = False - - # Make sure the number of lines is the expected - if len(expected) != len(actual): - self.wrapLogOutput( - msg="The line counts differ; the canon file and the output are different", - actual="", - expected="", - errors=line, - ) - status = False - - if status is False: - # Log the difference - result = difflib.unified_diff(actual, expected) - - # result is a generator, so obtain the mismatched lines - mismatches = [] - for line in result: - mismatches.append(line) - - if len(mismatches) > 0: - self.wrapLogOutput( - msg="Script output did not match expected", - actual="", - expected="", - errors="\n".join(mismatches), - ) - - return status - - def checkExactQueryContent( - self, - query, - expected, - namespace="SA-ThreatIntelligence", - interval=15, - retries=4, - reformat=False, - ): - - """Check for exact content in a specific search result. - Script will issue failure IF no results are obtained - ( len(result) == 0 ) OR if the expected text does not exist - in the raw result text. - - If reformat is True, expected can be one of the following: - 1. a csv file name in ./data/ ; the csv can be exported by splunk web - 2. a list of list whose first row is header like a csv - - This function is for use only by tests which do NOT - require passing input to Splunk on STDIN. - """ - - tryNum = 0 - status = False - - path_to_output = os.path.join(os.getcwd(), "data") - try: - output = open(os.path.join(path_to_output, expected), "r") - except IOError: - self.failTest("Expected output file not found.") - except AttributeError: - if not reformat: - raise - # generate csv from expected (list of list) - output = io.StringIO() - csv.writer(output).writerows(expected) - output.seek(0) - - while tryNum <= retries and not status: - job = self.jobs.create(query, auto_finalize_ec=10, max_time=60) - job.wait(TIMEOUT) - results = job.get_results() - messages = job.get_messages() - - if len(results) > 0: - # Careful when using _raw as it is a splunk.search.RawEvent, - # not a string. Casting to a string converts it to a - # lxml.etree._ElementStringResult which behaves like a string. - if reformat: - # reformat both actual and expected so that csv file - # exported by splunk web is acceptable - actual = [ - "%s\n" % [str(field) for field in row] - for row in [list(results[0].keys())] - + [list(row.values()) for row in results] - ] - expected = [ - "%s\n" % [str(field) for field in row] - for row in csv.reader(output) - ] - else: - actual = [",".join(list(results[0].keys())) + "\n"] - for result in results: - actual.append(",".join(map(str, list(result.values()))) + "\n") - expected = output.readlines(True) - status = self.compareContent(actual, expected) - else: - self.wrapLogOutput( - msg="Zero results from search:", - actual="", - expected=expected, - errors="\n".join(messages), - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - output.close() - return status - - def checkGapSearch( - self, - query, - warningGapSizeLimit, - warningGapCountLimit, - interval=1, - retries=0, - namespace="splunk_for_vmware", - ): - - """ - Execute a gap detection search to check that a certain number or less of - gaps exist. Start by checking for gaps in a warning range. Issue Error for gaps longer - than the warning range. Or if the number of gaps in the warning range is greater than a - set threshold. - INPUTS: - query - gap detection search - warningGapSizeLimit - max size for a gap to be considered a warning (in seconds) - warningGapCountLimit - number warning sized gaps allowed before error is raised - interval - wait period (in seconds) if no data returned before retrying (default 1) - retries - number of times a gap search is rerun if it returns no results (default 0) - namespace - the app namespace in which to run the search (default splunk_for_vmware) - OUTPUTS: - status - boolean true for no errors, false for errors - LOGGING OUTPUT: - Logs the number of warning gaps. - Logs the number of error gaps. - GAP DETECTION SEARCHES: - A gap detection search is any search in which the result rows are gaps with a field delta - that is valued as their duration in seconds. - Typical Structure: - search [Event Gathering Search Command] - | streamstats first(_time) as prev_endtime window=1 current=f global=false by [Identifier] - | eval delta = (prev_endtime - _time) - | search delta>[Expected time between events of the same identifier] - | fields delta - """ - - tryNum = 0 - warningCount = 0 - errorCount = 0 - status = True - - while tryNum <= retries and status: - - job = self.jobs.create(query, auto_finalize_ec=10, max_time=60) - job.wait(TIMEOUT) - results = job.get_results() - messages = job.get_messages() - - self.logger.debug( - 'Ran Gap Detection search="%s" warningGapCountLimit=%s and warningGapSizeLimit=%s', - query, - str(warningGapCountLimit), - str(warningGapSizeLimit), - ) - - if len(job.get_results()) > 0: - warningCount = 0 - errorCount = 0 - for result in results: - if float(str(result["delta"])) <= warningGapSizeLimit: - # This is a warning sized gap. just report the warning and increase the warningCount - warningCount += 1 - self.wrapLogOutput( - msg="Warning Sized Gap detected with detection search", - actual="gapSize=" + str(result["delta"]), - expected="no gaps", - errors="None", - level="warning", - ) - else: - # The gap is bigger than the warning limit, flag error, set status to false. - errorCount += 1 - self.wrapLogOutput( - msg="Error Sized Gap detected with detection search", - actual="gapSize=" + str(result["delta"]), - expected="no gaps, or at least gaps shorter than " - + str(warningGapSizeLimit), - errors="Error sized gap detected.", - level="error", - ) - status = False - - if warningCount >= warningGapCountLimit: - # Too many Warning sized gaps, this test failed. - status = False - self.wrapLogOutput( - msg="Too many warning sized gaps detected with detection search", - actual="warningGapCount=" + str(warningCount), - expected="warningGapCountLimit=" + str(warningGapCountLimit), - errors="Too many warning gaps detected.", - level="error", - ) - elif warningCount > 0: - self.wrapLogOutput( - msg="Safe number of warning sized gaps detected with detection search", - actual="warningGapCount=" + str(warningCount), - expected="warningGapCountLimit=" + str(warningGapCountLimit), - errors="", - level="warning", - ) - else: - self.wrapLogOutput( - msg="No warning sized gaps detected with detection search", - actual="warningGapCount=" + str(warningCount), - expected="warningGapCountLimit=" + str(warningGapCountLimit), - errors="", - level="debug", - ) - - else: - self.wrapLogOutput( - msg="PASS: Zero results from gap detection search.", - actual="", - expected="", - errors="\n".join(messages), - ) - - if status: - tryNum += 1 - time.sleep(interval) - - self.wrapLogOutput( - msg="Error-Sized Gap Detection Results for search", - actual="errorGapCount=" + str(errorCount), - expected="0", - errors="", - level="debug", - ) - - return status - - def checkFieldAgainstCanon( - self, query, field, canon, interval=30, retries=4, namespace="splunk_for_vmware" - ): - """ - Execute a search that returns results containing a particular field. Then - check that every value of that field from every result is included in the - canon. Also check that every value in canon is represented in results. - Duplicates in the results set of values that exist in the canon are a pass. - Please note neither set can be empty and still pass (result nor canon) - INPUTS: - query - search - field - name of the field you are checking (string) - canon - set of values that should be contained in the results for the field (set) - interval - wait period (in seconds) if no data returned before retrying (default 30) - retries - number of times a gap search is rerun if it returns no results (default 4) - namespace - the app namespace in which to run the search (default splunk_for_vmware) - OUTPUTS: - status - boolean true for no errors, false for errors - LOGGING OUTPUT: - Logs the values in canon and not in results. - Logs the values in results and not in canon. - """ - status = False - tryNum = 0 - - self.logger.debug('Running canon test with canon="%s"', str(canon)) - - while tryNum <= retries and not status: - job = self.jobs.create(query, auto_finalize_ec=10, max_time=60) - job.wait(TIMEOUT) - results = job.get_results() - - self.logger.debug('Ran canon test search="%s"', query) - - if len(job.get_results()) > 0: - # Build Result Set - actual = set() - for result in results: - actual.add(str(result[field])) - - if canon == actual: - self.wrapLogOutput( - "PASS: results match canon exactly", - str(actual), - str(canon), - "None.", - "debug", - ) - status = True - else: - self.wrapLogOutput( - "FAIL: actual and canon do not match", - str(actual), - str(canon), - 'error="results do not match canon"', - "error", - ) - self.logger.debug( - "Results in canon and not in actual: %s", - str(canon.difference(actual)), - ) - self.logger.debug( - "Results in actual and not in canon: %s", - str(actual.difference(canon)), - ) - else: - self.logger.debug( - "No results from canon test search retrying after wait interval... (unless max tries exceeded)" - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - return status - - def checkQueryErrorMessage( - self, query, expected, namespace="SA-ThreatIntelligence" - ): - - """Check for specific error text from a search. - Unlike checkQueryContent(), it is typically not necessary - to repeat a test we expect to fail, so we do not retry. - """ - status = False - job = self.jobs.create(query, max_time=60) - job.wait(TIMEOUT) - messages = job.get_messages() - if len(messages) > 0: - errors = messages.get("error", None) - if errors is not None: - matches = [error for error in errors if expected in error] - if any(matches): - # Expected error message found: PASS - self.wrapLogOutput( - msg="Expected error text found:", - actual="\n".join(matches), - expected=expected, - errors="N/A", - ) - status = True - else: - # No matches, but error messages exist: FAIL. - pass - else: - # No error messages exist: FAIL. - pass - - if not status: - self.wrapLogOutput( - msg="Expected error text NOT found:", - actual="\n".join(map(str, messages)), - expected=expected, - errors="N/A", - ) - - return status - - def checkQueryFieldValues( - self, - query, - expected_values, # can be list, set, tuple, or string - expectedMinRow=0, - interval=15, - retries=4, - namespace="SA-ThreatIntelligence", - ): - - """Execute a query and check for a matching set (not necessarily - complete) of output fields, and secondarily for a minimum - number of results. - """ - - tryNum = 0 - status = False - - while tryNum <= retries and not status: - - job = self.jobs.create(query, auto_finalize_ec=10, max_time=60) - job.wait(TIMEOUT) - result_count = len(job.get_results()) - results = job.get_results() - messages = job.get_messages() - - if len(job.get_results()) > 0: - # we need to cast to str before int because it's a ResultField - # which can't be cast directly to str... - values = str(list(results[expectedMinRow - 1].values())) - print(values) - print(values.__class__.__name__) - if expected_values in values: - self.wrapLogOutput( - msg="All expected values found in result:", - actual=values, - expected=expected_values, - errors="", - ) - status = True - else: - self.wrapLogOutput( - msg="Expected field values missing from result:", - actual=values, - expected=expected_values, - errors="", - ) - else: - self.wrapLogOutput( - msg="Zero results from search:", - actual="", - expected=",".join(expected_values), - errors="\n".join(messages), - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - return status - - def checkQueryContent( - self, - query, - expected, - expectedRow, - interval=15, - retries=4, - namespace="SA-ThreatIntelligence", - ): - - """Check for specific content in a specific search result row. - Script will issue failure IF no results are obtained - ( len(result) == 0 ) OR if the expected text does not exist - in the raw result text. - """ - - tryNum = 0 - status = False - - while tryNum <= retries and not status: - job = self.jobs.create(query, auto_finalize_ec=10, max_time=60) - job.wait(TIMEOUT) - results = job.get_results() - messages = job.get_messages() - - # TODO: modify this to handle checking any specific row of the output. - if len(results) > 0 and expectedRow <= len(results): - # Careful when using _raw as it is a splunk.search.RawEvent, - # not a string. Casting to a string converts it to a - # lxml.etree._ElementStringResult which behaves like a string. - raw = str(results[0].get("_raw", None)) - if raw is not None: - if expected in raw: - self.wrapLogOutput( - msg="Expected text found in result:", - actual=raw, - expected=expected, - errors="", - ) - status = True - else: - self.wrapLogOutput( - msg="Expected text NOT found in result:", - actual=raw, - expected=expected, - errors="", - ) - else: - self.wrapLogOutput( - msg="Empty raw data from search:", - actual="", - expected=expected, - errors="\n".join(messages), - ) - else: - self.wrapLogOutput( - msg="Zero results from search:", - actual="", - expected=expected, - errors="\n".join(messages), - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - return status - - def compareContent(self, actual, expected): - """Compare two string sequences, generating a unified diff.""" - # Strip EOL to avoid a common error. Note that one or more - # inputs may be an empty list so check length first. - if len(actual) > 0: - actual[-1] = actual[-1].rstrip("\n") - - if len(expected) > 0: - expected[-1] = expected[-1].rstrip("\n") - - # Note that readlines() RETAINS trailing end-of-line characters. - # The True argument to splitlines() ensures that the actual output - # from the command also retains end of line characters. - result = difflib.unified_diff(actual, expected) - - # result is a generator, so obtain the mismatched lines - mismatches = [] - status = True - for line in result: - mismatches.append(line) - - if len(mismatches) > 0: - self.wrapLogOutput( - msg="Script output did not match expected", - actual="", - expected="", - errors="\n".join(mismatches), - ) - status = False - else: - self.wrapLogOutput( - msg="Script output matches.", - actual="", - expected="", - errors="No errors.", - ) - return status - - def getRealtimeNotableSearchResults( - self, searchName, interval=15, retries=4, minimumNumberEvents=1 - ): - self.logger.debug("Retry count: %d", retries) - - tryNum = 0 - searchQuery = "search `notable(" + searchName + ")`" - searchResults = [] - while tryNum <= retries: - self.logger.debug("tryNum Value: %d", tryNum) - job = self.jobs.create(searchQuery, max_time=60) - job.wait(TIMEOUT) - searchResults = job.get_results() - self.logger.debug("Results Count: %d", len(searchResults)) - self.logger.debug("Results : %s", searchResults) - if len(searchResults) >= minimumNumberEvents: - return searchResults - else: - self.logger.debug("Retries: %d", tryNum) - tryNum += 1 - time.sleep(interval) - return searchResults - - def gen_table(self, table): - """Return search query string that generates a table. - The columns are sorted by field name - - Parameters: - table = a list of dictionary - """ - - def _rows(t): - return " | ".join("append [stats count | %s]" % _row(r) for r in t) - - def _row(r): - return " | ".join("eval %s=%s" % i for i in list(r.items())) - - return "%s | fields - count" % _rows(table) - - def compareContentRegex(self, actual, expectedRx): - """Compare string to a regex.""" - match = expectedRx.search(actual) - - if not match: - self.wrapLogOutput( - msg="Script output did not match expected", - actual="", - expected="", - errors="", - ) - status = False - else: - self.wrapLogOutput( - msg="Script output matches.", - actual="", - expected="", - errors="No errors.", - ) - status = True - return status - - def checkQueryFieldValueIsGreaterThanZero( - self, - query, - field_name, # can be list, set, tuple, or string - expectedMinRow=0, - interval=15, - retries=4, - namespace="SA-ThreatIntelligence", - ): - - """Execute a query and check for a matching set (not necessarily - complete) of output fields, and secondarily for a minimum - number of results. - """ - - tryNum = 0 - status = False - - while tryNum <= retries and not status: - - job = self.jobs.create(query, max_time=60) - job.wait(240) - results = job.get_results() - messages = job.get_messages() - - if len(results) > 0: - # we need to cast to str before int because it's a ResultField - # which can't be cast directly to str... - keys = list(map(str, list(results[0].keys()))) - print(keys) - values = list(map(str, list(results[0].values()))) - print(values) - dictionary = dict(list(zip(keys, values))) - print(dictionary) - if int(dictionary[field_name]) > 0: - self.wrapLogOutput( - msg="Expected field value is greater than 0 ", - actual=values, - expected=int(dictionary[field_name]), - errors="", - ) - status = True - else: - self.wrapLogOutput( - msg="Expected field values missing from result:", - actual=values, - expected=int(dictionary[field_name]), - errors="", - ) - else: - self.wrapLogOutput( - msg="Zero results from search:", - actual="", - expected="greater than 0", - errors="\n".join(messages), - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - return status - - def getFieldValuesDict(self, query, interval=15, retries=4): - - """Execute a query and check for a matching set (not necessarily - complete) of output fields, and secondarily for a minimum - number of results. - """ - - tryNum = 0 - status = False - - while tryNum <= retries and not status: - - job = self.jobs.create(query, max_time=60) - job.wait(240) - result_count = len(job.get_results()) - results = job.get_results() - messages = job.get_messages() - - if result_count > 0: - # we need to cast to str before int because it's a ResultField - # which can't be cast directly to str... - keys = list(map(str, list(results[0].keys()))) - print(keys) - values = list(map(str, list(results[0].values()))) - print(values) - dictionary = dict(list(zip(keys, values))) - print(dictionary) - return dictionary - else: - self.wrapLogOutput( - msg="Zero results from search:", - actual="", - expected="greater than 0", - errors="\n".join(messages), - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - return status - - def getFieldValuesList(self, query, interval=15, retries=4): - """ - Get list of results from the query. Where each result will be - a dictionary. The search job will retry at given interval if - no results found. - - Args: - query (str): query to search on Splunk instance - interval (int): at what interval each retry should be made - retries (int): number of retries to make if no results found - """ - - tryNum = 0 - status = False - - while tryNum <= retries and not status: - job = self.jobs.create(query, max_time=60) - job.wait(240) - results = job.get_results(offset=0, count=4000) - result_count = len(results) - messages = job.get_messages() - - if result_count > 0: - for each_result in results: - keys = list(map(str, list(each_result.keys()))) - values = list(map(str, list(each_result.values()))) - yield dict(list(zip(keys, values))) - break - else: - self.wrapLogOutput( - msg="Zero results from search:", - actual="", - expected="greater than 0", - errors="\n".join(messages), - ) - - if not status: - tryNum += 1 - time.sleep(interval) - - return status - - def checkRemoteSearch(self, query, starts_with=None, max_time=120): - self.logger.debug("query is %s", query) - job = self.jobs.create(query, auto_finalize_ec=200, max_time=max_time) - job.wait(max_time) - if starts_with: - if job.get_remote_search().startswith(starts_with): - self.logger.debug("Remote search starts with :%s", starts_with) - return True - else: - self.logger.debug("starts_with is None ") - - return False - - def smbServiceCheck(self): - - # Skip test if running on Windows. - if sys.platform == "win32": - py.test.skip("nmblookup test on Windows not yet implemented.") - - # Skip if nmbd daemon is not running. - ps_task = subprocess.Popen(["ps", "-e"], stdout=subprocess.PIPE) - stdout, stderr = ps_task.communicate() - if "nmbd" not in stdout: - py.test.skip("NetBIOS services (nmbd) not running on host") - - def checkQueryFieldAllValuesContainsRegex( - self, - query, - field, - regex, - interval=15, - retries=4, - number_results=100, - max_time=60, - ): - tryNum = 0 - r = re.compile(regex) - match_found = False - while tryNum <= retries: - job = self.jobs.create( - query, auto_finalize_ec=number_results, max_time=max_time - ) - job.wait() - results = job.get_results() - - for result_no, result in enumerate(results.as_list): - if r.match(str(result[field])): - self.logger.debug( - "result['%s']='%s' matches re: %s", - field, - str(result[field]), - regex, - ) - match_found = True - else: - self.logger.debug( - "result['%s']='%s' does not match re: %s for %d row", - field, - str(result[field]), - regex, - result_no, - ) - match_found = False - return False - if result_no > number_results: - self.logger.debug( - "checked for re: %s in first %d results", regex, number_results - ) - return match_found - tryNum += 1 - time.sleep(interval) - return match_found - - def checkQueryAllFieldAllValuesContainsRegex( - self, - query, - field_regex_json, - interval=15, - retries=4, - number_results=100, - max_time=60, - ): - tryNum = 0 - match_found = False - field_names = list(field_regex_json.keys()) - while tryNum <= retries: - job = self.jobs.create( - query, auto_finalize_ec=number_results, max_time=max_time - ) - job.wait() - results = job.get_results() - - for result_no, result in enumerate(results.as_list): - for field in field_names: - regex = field_regex_json[field] - r = re.compile(regex) - self.logger.debug("Looking for field %s in result set", field) - if field in list(result.keys()): - if r.match(str(result[field])): - self.logger.debug( - "result['%s']='%s' matches re: %s", - field, - str(result[field]), - regex, - ) - match_found = True - else: - self.logger.debug( - "result['%s']='%s' does not match re: %s for %d row", - field, - str(result[field]), - regex, - result_no, - ) - match_found = False - return False - if result_no > number_results: - self.logger.debug( - "checked for re: %s in first %d results", - regex, - number_results, - ) - return match_found - else: - self.logger.debug( - "field %s not found in result set fields", - list(result.keys()), - ) - return False - tryNum += 1 - time.sleep(interval) - return match_found diff --git a/pytest_splunk_addon/helmut_lib/__init__.py b/pytest_splunk_addon/helmut_lib/__init__.py deleted file mode 100644 index 139dda2b0..000000000 --- a/pytest_splunk_addon/helmut_lib/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# Copyright 2021 Splunk Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -*- coding: utf-8 -*- diff --git a/pytest_splunk_addon/splunk.py b/pytest_splunk_addon/splunk.py index 84ebf7f24..320233b58 100644 --- a/pytest_splunk_addon/splunk.py +++ b/pytest_splunk_addon/splunk.py @@ -13,13 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# -*- coding: utf-8 -*- -""" -Module usage: -- helmut : To connect to a Splunk instance. source: splunk-sdk -- helmut_lib: Provides various Utility functions to search on Splunk. Source: splunk-sdk -""" - import logging import os import shutil @@ -29,9 +22,9 @@ import pytest import requests import splunklib.client as client -from .helmut.manager.jobs import Jobs -from .helmut.splunk.cloud import CloudSplunk -from .helmut_lib.SearchUtil import SearchUtil +from splunksplwrapper.manager.jobs import Jobs +from splunksplwrapper.splunk.cloud import CloudSplunk +from splunksplwrapper.SearchUtil import SearchUtil from .standard_lib.event_ingestors import IngestorHelper from .standard_lib.CIM_Models.datamodel_definition import datamodels import configparser @@ -358,7 +351,7 @@ def splunk_search_util(splunk, request): This is a simple connection to Splunk via the SplunkSDK Returns: - helmut_lib.SearchUtil.SearchUtil: The SearchUtil object + splunksplwrapper.SearchUtil.SearchUtil: The SearchUtil object """ LOGGER.info("Initializing SearchUtil for the Splunk instace.") cloud_splunk = CloudSplunk( diff --git a/pytest_splunk_addon/standard_lib/fields_tests/test_templates.py b/pytest_splunk_addon/standard_lib/fields_tests/test_templates.py index 8d792db92..5adf58ac1 100644 --- a/pytest_splunk_addon/standard_lib/fields_tests/test_templates.py +++ b/pytest_splunk_addon/standard_lib/fields_tests/test_templates.py @@ -318,7 +318,7 @@ def test_tags( and also checks that a tag is not assigned to the event if disabled. Args: - splunk_search_util (helmut_lib.SearchUtil.SearchUtil): + splunk_search_util (splunksplwrapper.SearchUtil.SearchUtil): object that helps to search on Splunk. splunk_searchtime_fields_tags (fixture): pytest parameters to test. record_property (fixture): pytest fixture to document facts of test cases. @@ -384,7 +384,7 @@ def test_datamodels( and also checks if there is no additional wrongly assigned datamodel. Args: - splunk_search_util (helmut_lib.SearchUtil.SearchUtil): + splunk_search_util (splunksplwrapper.SearchUtil.SearchUtil): object that helps to search on Splunk. splunk_ingest_data (fixture): Unused but required to ensure data was ingested before running test splunk_setup (fixture): Unused but required to ensure that test environment was set up before running test diff --git a/pytest_splunk_addon/tools/cim_field_report.py b/pytest_splunk_addon/tools/cim_field_report.py index 656b699a8..b44787d15 100644 --- a/pytest_splunk_addon/tools/cim_field_report.py +++ b/pytest_splunk_addon/tools/cim_field_report.py @@ -23,8 +23,8 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) -from pytest_splunk_addon.helmut.manager.jobs import Jobs -from pytest_splunk_addon.helmut.splunk.cloud import CloudSplunk +from splunksplwrapper.manager.jobs import Jobs +from splunksplwrapper.splunk.cloud import CloudSplunk from pytest_splunk_addon.standard_lib.addon_parser import AddonParser from splunklib import binding @@ -145,7 +145,7 @@ def collect_job_results(job, acc, fn): Parameters ---------- - job : pytest_splunk_addon.helmut.manager.jobs.job + job : splunksplwrapper.manager.jobs.job Finished job ready to collect results acc : any An accumulator object that collects job results @@ -205,7 +205,7 @@ def get_punct_by_eventtype(jobs, eventtypes, config): Parameters ---------- - jobs : pytest_splunk_addon.helmut.manager.jobs.Jobs + jobs : splunksplwrapper.manager.jobs.Jobs Jobs object capable to create a new splunk search job eventtypes : list List of splunk eventtypes names taken from TA configurations @@ -246,7 +246,7 @@ def get_field_names(jobs, eventtypes, config): Parameters ---------- - jobs : pytest_splunk_addon.helmut.manager.jobs.Jobs + jobs : splunksplwrapper.manager.jobs.Jobs Jobs object capable to create a new splunk search job eventtypes : list List of splunk eventtypes names taken from TA configurations @@ -311,7 +311,7 @@ def get_fieldsummary(jobs, punct_by_eventtype, config): Parameters ---------- - jobs : pytest_splunk_addon.helmut.manager.jobs.Jobs + jobs : splunksplwrapper.manager.jobs.Jobs Jobs object capable to create a new splunk search job punct_by_eventtype : list List of tuples of 2 elements, representing collected unique pairs of eventtype+punct @@ -362,7 +362,7 @@ def get_fieldsreport(jobs, eventtypes, fields, config): Parameters ---------- - jobs : pytest_splunk_addon.helmut.manager.jobs.Jobs + jobs : splunksplwrapper.manager.jobs.Jobs Jobs object capable to create a new splunk search job eventtypes : list List of splunk eventtypes names taken from TA configurations @@ -445,7 +445,7 @@ def build_report(jobs, eventtypes, config): Parameters ---------- - jobs : pytest_splunk_addon.helmut.manager.jobs.Jobs + jobs : splunksplwrapper.manager.jobs.Jobs Jobs object capable to create a new splunk search job eventtypes : list List of splunk eventtypes names taken from TA configurations From 4b4f7cfda368927c9fea366a5819030d4922a48a Mon Sep 17 00:00:00 2001 From: Artem Rys Date: Thu, 16 Feb 2023 16:19:40 +0100 Subject: [PATCH 2/2] ci: fix the typo in the pipeline --- .github/workflows/build-test-release.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/build-test-release.yml b/.github/workflows/build-test-release.yml index 655314064..d7a4bb27e 100644 --- a/.github/workflows/build-test-release.yml +++ b/.github/workflows/build-test-release.yml @@ -182,12 +182,11 @@ jobs: - test-splunk-doc - test-splunk-unit - review_secrets - - review-dog-misspell runs-on: ubuntu-latest strategy: fail-fast: false matrix: - splunk-version: ["8.1", "9.0"] + splunk-version: ["8.1", "8.2", "9.0"] test-marker: [ "splunk_connection_docker", "splunk_app_fiction",