From 72d5fe1b4979b26e622073bfd0f8b16767304035 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 6 May 2022 12:24:36 +0530 Subject: [PATCH 01/60] removing python 2.7 from CI and local testing --- .github/workflows/test.yml | 3 +-- Makefile | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 42713a68..31d43c8e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,6 @@ jobs: matrix: os: - ubuntu-latest - python: [ 2.7, 3.7 ] splunk-version: - "8.0" - "latest" @@ -35,7 +34,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python }} + python-version: 3.7 - name: Install tox run: pip install tox - name: Test Execution diff --git a/Makefile b/Makefile index 2810c6ae..0a3f5804 100644 --- a/Makefile +++ b/Makefile @@ -34,7 +34,7 @@ docs: .PHONY: test test: @echo "$(ATTN_COLOR)==> test $(NO_COLOR)" - @tox -e py27,py37 + @tox -e py37 .PHONY: test_specific test_specific: From e3a09e76af053283179a8f5601c3dec077cb17fc Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 6 May 2022 12:28:21 +0530 Subject: [PATCH 02/60] code changes based on Pylint and 2to3 tool suggestions --- tests/modularinput/modularinput_testlib.py | 6 +- tests/modularinput/test_event.py | 5 +- tests/modularinput/test_input_definition.py | 5 +- tests/modularinput/test_scheme.py | 16 +- tests/modularinput/test_script.py | 13 +- .../test_validation_definition.py | 6 +- tests/searchcommands/__init__.py | 4 - tests/searchcommands/chunked_data_stream.py | 8 +- tests/searchcommands/test_builtin_options.py | 32 ++- .../test_configuration_settings.py | 40 ++-- tests/searchcommands/test_decorators.py | 51 ++--- .../searchcommands/test_generator_command.py | 5 +- tests/searchcommands/test_internals_v1.py | 11 +- tests/searchcommands/test_internals_v2.py | 32 +-- .../test_multibyte_processing.py | 6 +- .../searchcommands/test_reporting_command.py | 4 +- tests/searchcommands/test_search_command.py | 204 ++++++++---------- .../searchcommands/test_searchcommands_app.py | 63 ++---- .../searchcommands/test_streaming_command.py | 23 +- tests/searchcommands/test_validators.py | 17 +- tests/test_all.py | 8 +- tests/test_app.py | 25 +-- tests/test_binding.py | 187 ++++++++-------- tests/test_collection.py | 87 +++----- tests/test_conf.py | 11 +- tests/test_data.py | 40 ++-- tests/test_event_type.py | 25 +-- tests/test_examples.py | 51 ++--- tests/test_fired_alert.py | 23 +- tests/test_index.py | 42 ++-- tests/test_input.py | 29 +-- tests/test_job.py | 37 ++-- tests/test_kvstore_batch.py | 21 +- tests/test_kvstore_conf.py | 16 +- tests/test_kvstore_data.py | 21 +- tests/test_logger.py | 11 +- tests/test_message.py | 14 +- tests/test_modular_input.py | 13 +- tests/test_modular_input_kinds.py | 25 +-- tests/test_results.py | 11 +- tests/test_role.py | 16 +- tests/test_saved_search.py | 31 ++- tests/test_service.py | 10 +- tests/test_storage_passwords.py | 19 +- tests/test_user.py | 17 +- tests/test_utils.py | 8 +- tests/testlib.py | 16 +- 47 files changed, 549 insertions(+), 816 deletions(-) diff --git a/tests/modularinput/modularinput_testlib.py b/tests/modularinput/modularinput_testlib.py index 81930173..d4846a40 100644 --- a/tests/modularinput/modularinput_testlib.py +++ b/tests/modularinput/modularinput_testlib.py @@ -15,11 +15,7 @@ # under the License. # Utility file for unit tests, import common functions and modules -from __future__ import absolute_import -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest import sys, os import io diff --git a/tests/modularinput/test_event.py b/tests/modularinput/test_event.py index 86565603..278abb81 100644 --- a/tests/modularinput/test_event.py +++ b/tests/modularinput/test_event.py @@ -14,13 +14,12 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import import sys import pytest -from tests.modularinput.modularinput_testlib import unittest, xml_compare, data_open +from tests.modularinput.modularinput_testlib import xml_compare, data_open from splunklib.modularinput.event import Event, ET from splunklib.modularinput.event_writer import EventWriter @@ -99,7 +98,7 @@ def test_writing_events_on_event_writer(capsys): first_out_part = captured.out with data_open("data/stream_with_one_event.xml") as data: - found = ET.fromstring("%s" % first_out_part) + found = ET.fromstring(f"{first_out_part}") expected = ET.parse(data).getroot() assert xml_compare(expected, found) diff --git a/tests/modularinput/test_input_definition.py b/tests/modularinput/test_input_definition.py index d0f59a04..520eafbc 100644 --- a/tests/modularinput/test_input_definition.py +++ b/tests/modularinput/test_input_definition.py @@ -14,10 +14,10 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests.modularinput.modularinput_testlib import unittest, data_open from splunklib.modularinput.input_definition import InputDefinition + class InputDefinitionTestCase(unittest.TestCase): def test_parse_inputdef_with_zero_inputs(self): @@ -72,5 +72,6 @@ def test_attempt_to_parse_malformed_input_definition_will_throw_exception(self): with self.assertRaises(ValueError): found = InputDefinition.parse(data_open("data/conf_with_invalid_inputs.xml")) + if __name__ == "__main__": - unittest.main() \ No newline at end of file + unittest.main() diff --git a/tests/modularinput/test_scheme.py b/tests/modularinput/test_scheme.py index e1b3463a..e38d81a5 100644 --- a/tests/modularinput/test_scheme.py +++ b/tests/modularinput/test_scheme.py @@ -13,15 +13,11 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import +import xml.etree.ElementTree as ET from tests.modularinput.modularinput_testlib import unittest, xml_compare, data_open from splunklib.modularinput.scheme import Scheme from splunklib.modularinput.argument import Argument -try: - import xml.etree.cElementTree as ET -except ImportError: - import xml.etree.ElementTree as ET class SchemeTest(unittest.TestCase): def test_generate_xml_from_scheme_with_default_values(self): @@ -40,7 +36,7 @@ def test_generate_xml_from_scheme(self): some arguments added matches what we expect.""" scheme = Scheme("abcd") - scheme.description = u"쎼 and 쎶 and <&> für" + scheme.description = "쎼 and 쎶 and <&> für" scheme.streaming_mode = Scheme.streaming_mode_simple scheme.use_external_validation = "false" scheme.use_single_instance = "true" @@ -50,7 +46,7 @@ def test_generate_xml_from_scheme(self): arg2 = Argument( name="arg2", - description=u"쎼 and 쎶 and <&> für", + description="쎼 and 쎶 and <&> für", validation="is_pos_int('some_name')", data_type=Argument.data_type_number, required_on_edit=True, @@ -69,7 +65,7 @@ def test_generate_xml_from_scheme_with_arg_title(self): some arguments added matches what we expect. Also sets the title on an argument.""" scheme = Scheme("abcd") - scheme.description = u"쎼 and 쎶 and <&> für" + scheme.description = "쎼 and 쎶 and <&> für" scheme.streaming_mode = Scheme.streaming_mode_simple scheme.use_external_validation = "false" scheme.use_single_instance = "true" @@ -79,7 +75,7 @@ def test_generate_xml_from_scheme_with_arg_title(self): arg2 = Argument( name="arg2", - description=u"쎼 and 쎶 and <&> für", + description="쎼 and 쎶 and <&> für", validation="is_pos_int('some_name')", data_type=Argument.data_type_number, required_on_edit=True, @@ -113,7 +109,7 @@ def test_generate_xml_from_argument(self): argument = Argument( name="some_name", - description=u"쎼 and 쎶 and <&> für", + description="쎼 and 쎶 and <&> für", validation="is_pos_int('some_name')", data_type=Argument.data_type_boolean, required_on_edit="true", diff --git a/tests/modularinput/test_script.py b/tests/modularinput/test_script.py index b15885dc..48be8826 100644 --- a/tests/modularinput/test_script.py +++ b/tests/modularinput/test_script.py @@ -1,16 +1,13 @@ import sys +import io +import xml.etree.ElementTree as ET from splunklib.client import Service from splunklib.modularinput import Script, EventWriter, Scheme, Argument, Event -import io from splunklib.modularinput.utils import xml_compare from tests.modularinput.modularinput_testlib import data_open -try: - import xml.etree.cElementTree as ET -except ImportError: - import xml.etree.ElementTree as ET TEST_SCRIPT_PATH = "__IGNORED_SCRIPT_PATH__" @@ -51,7 +48,7 @@ def test_scheme_properly_generated_by_script(capsys): class NewScript(Script): def get_scheme(self): scheme = Scheme("abcd") - scheme.description = u"\uC3BC and \uC3B6 and <&> f\u00FCr" + scheme.description = "\uC3BC and \uC3B6 and <&> f\u00FCr" scheme.streaming_mode = scheme.streaming_mode_simple scheme.use_external_validation = False scheme.use_single_instance = True @@ -60,7 +57,7 @@ def get_scheme(self): scheme.add_argument(arg1) arg2 = Argument("arg2") - arg2.description = u"\uC3BC and \uC3B6 and <&> f\u00FCr" + arg2.description = "\uC3BC and \uC3B6 and <&> f\u00FCr" arg2.data_type = Argument.data_type_number arg2.required_on_create = True arg2.required_on_edit = True @@ -208,7 +205,7 @@ def test_service_property(capsys): # Override abstract methods class NewScript(Script): def __init__(self): - super(NewScript, self).__init__() + super().__init__() self.authority_uri = None def get_scheme(self): diff --git a/tests/modularinput/test_validation_definition.py b/tests/modularinput/test_validation_definition.py index c8046f3b..43871c51 100644 --- a/tests/modularinput/test_validation_definition.py +++ b/tests/modularinput/test_validation_definition.py @@ -14,10 +14,11 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import + from tests.modularinput.modularinput_testlib import unittest, data_open from splunklib.modularinput.validation_definition import ValidationDefinition + class ValidationDefinitionTestCase(unittest.TestCase): def test_validation_definition_parse(self): """Check that parsing produces expected result""" @@ -42,5 +43,6 @@ def test_validation_definition_parse(self): self.assertEqual(expected, found) + if __name__ == "__main__": - unittest.main() \ No newline at end of file + unittest.main() diff --git a/tests/searchcommands/__init__.py b/tests/searchcommands/__init__.py index 2f282889..0f260b58 100644 --- a/tests/searchcommands/__init__.py +++ b/tests/searchcommands/__init__.py @@ -15,10 +15,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - -from sys import version_info as python_version - from os import path import logging diff --git a/tests/searchcommands/chunked_data_stream.py b/tests/searchcommands/chunked_data_stream.py index ae5363ef..29a21a1b 100644 --- a/tests/searchcommands/chunked_data_stream.py +++ b/tests/searchcommands/chunked_data_stream.py @@ -7,7 +7,7 @@ from splunklib import six -class Chunk(object): +class Chunk: def __init__(self, version, meta, data): self.version = six.ensure_str(version) self.meta = json.loads(meta) @@ -21,9 +21,9 @@ def __init__(self, chunk_stream): self.chunk_stream = chunk_stream def __next__(self): - return self.next() + return next(self) - def next(self): + def __next__(self): try: return self.chunk_stream.read_chunk() except EOFError: @@ -91,7 +91,7 @@ def _build_data_csv(data): headers = set() for datum in data: - headers.update(datum.keys()) + headers.update(list(datum.keys())) writer = csv.DictWriter(csvout, headers, dialect=splunklib.searchcommands.internals.CsvDialect) writer.writeheader() diff --git a/tests/searchcommands/test_builtin_options.py b/tests/searchcommands/test_builtin_options.py index e5c2dd8d..24073d9e 100644 --- a/tests/searchcommands/test_builtin_options.py +++ b/tests/searchcommands/test_builtin_options.py @@ -15,19 +15,15 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - -from splunklib.six.moves import cStringIO as StringIO -try: - from unittest2 import main, TestCase -except ImportError: - from unittest import main, TestCase import os import sys import logging import pytest +from unittest import main, TestCase +from splunklib.six.moves import cStringIO as StringIO + from splunklib.searchcommands import environment from splunklib.searchcommands.decorators import Configuration @@ -117,18 +113,18 @@ def test_logging_configuration(self): except ValueError: pass except BaseException as e: - self.fail('Expected ValueError, but {} was raised'.format(type(e))) + self.fail(f'Expected ValueError, but {type(e)} was raised') else: - self.fail('Expected ValueError, but logging_configuration={}'.format(command.logging_configuration)) + self.fail(f'Expected ValueError, but logging_configuration={command.logging_configuration}') try: command.logging_configuration = os.path.join(package_directory, 'non-existent.logging.conf') except ValueError: pass except BaseException as e: - self.fail('Expected ValueError, but {} was raised'.format(type(e))) + self.fail(f'Expected ValueError, but {type(e)} was raised') else: - self.fail('Expected ValueError, but logging_configuration={}'.format(command.logging_configuration)) + self.fail(f'Expected ValueError, but logging_configuration={command.logging_configuration}') def test_logging_level(self): @@ -146,7 +142,7 @@ def test_logging_level(self): self.assertEqual(warning, command.logging_level) for level in level_names(): - if type(level) is int: + if isinstance(level, int): command.logging_level = level level_name = logging.getLevelName(level) self.assertEqual(command.logging_level, warning if level_name == notset else level_name) @@ -171,9 +167,9 @@ def test_logging_level(self): except ValueError: pass except BaseException as e: - self.fail('Expected ValueError, but {} was raised'.format(type(e))) + self.fail(f'Expected ValueError, but {type(e)} was raised') else: - self.fail('Expected ValueError, but logging_level={}'.format(command.logging_level)) + self.fail(f'Expected ValueError, but logging_level={command.logging_level}') self.assertEqual(command.logging_level, current_value) @@ -211,13 +207,9 @@ def _test_boolean_option(self, option): except ValueError: pass except BaseException as error: - self.fail('Expected ValueError when setting {}={}, but {} was raised'.format( - option.name, repr(value), type(error))) + self.fail(f'Expected ValueError when setting {option.name}={repr(value)}, but {type(error)} was raised') else: - self.fail('Expected ValueError, but {}={} was accepted.'.format( - option.name, repr(option.fget(command)))) - - return + self.fail(f'Expected ValueError, but {option.name}={repr(option.fget(command))} was accepted.') if __name__ == "__main__": diff --git a/tests/searchcommands/test_configuration_settings.py b/tests/searchcommands/test_configuration_settings.py index dd07b57f..bf810edf 100644 --- a/tests/searchcommands/test_configuration_settings.py +++ b/tests/searchcommands/test_configuration_settings.py @@ -24,20 +24,20 @@ # * If a value is not set in code, the value specified in commands.conf is enforced # * If a value is set in code, it overrides the value specified in commands.conf -from __future__ import absolute_import, division, print_function, unicode_literals -from splunklib.searchcommands.decorators import Configuration from unittest import main, TestCase +import pytest +from splunklib.searchcommands.decorators import Configuration from splunklib import six -import pytest + @pytest.mark.smoke class TestConfigurationSettings(TestCase): def test_generating_command(self): - from splunklib.searchcommands import Configuration, GeneratingCommand + from splunklib.searchcommands import GeneratingCommand @Configuration() class TestCommand(GeneratingCommand): @@ -48,7 +48,7 @@ def generate(self): command._protocol_version = 1 self.assertTrue( - [(name, value) for name, value in six.iteritems(command.configuration)], + list(six.iteritems(command.configuration)), [('generating', True)]) self.assertIs(command.configuration.generates_timeorder, None) @@ -66,12 +66,12 @@ def generate(self): except AttributeError: pass except Exception as error: - self.fail('Expected AttributeError, not {}: {}'.format(type(error).__name__, error)) + self.fail(f'Expected AttributeError, not {type(error).__name__}: {error}') else: self.fail('Expected AttributeError') self.assertEqual( - [(name, value) for name, value in six.iteritems(command.configuration)], + list(six.iteritems(command.configuration)), [('generates_timeorder', True), ('generating', True), ('local', True), ('retainsevents', True), ('streaming', True)]) @@ -79,7 +79,7 @@ def generate(self): command._protocol_version = 2 self.assertEqual( - [(name, value) for name, value in six.iteritems(command.configuration)], + list(six.iteritems(command.configuration)), [('generating', True), ('type', 'stateful')]) self.assertIs(command.configuration.distributed, False) @@ -93,19 +93,17 @@ def generate(self): except AttributeError: pass except Exception as error: - self.fail('Expected AttributeError, not {}: {}'.format(type(error).__name__, error)) + self.fail(f'Expected AttributeError, not {type(error).__name__}: {error}') else: self.fail('Expected AttributeError') self.assertEqual( - [(name, value) for name, value in six.iteritems(command.configuration)], + list(six.iteritems(command.configuration)), [('generating', True), ('type', 'streaming')]) - return - def test_streaming_command(self): - from splunklib.searchcommands import Configuration, StreamingCommand + from splunklib.searchcommands import StreamingCommand @Configuration() class TestCommand(StreamingCommand): @@ -117,7 +115,7 @@ def stream(self, records): command._protocol_version = 1 self.assertEqual( - [(name, value) for name, value in six.iteritems(command.configuration)], + list(six.iteritems(command.configuration)), [('streaming', True)]) self.assertIs(command.configuration.clear_required_fields, None) @@ -136,19 +134,20 @@ def stream(self, records): except AttributeError: pass except Exception as error: - self.fail('Expected AttributeError, not {}: {}'.format(type(error).__name__, error)) + self.fail(f'Expected AttributeError, not {type(error).__name__}: {error}') else: self.fail('Expected AttributeError') self.assertEqual( - [(name, value) for name, value in six.iteritems(command.configuration)], - [('clear_required_fields', True), ('local', True), ('overrides_timeorder', True), ('required_fields', ['field_1', 'field_2', 'field_3']), ('streaming', True)]) + list(six.iteritems(command.configuration)), + [('clear_required_fields', True), ('local', True), ('overrides_timeorder', True), + ('required_fields', ['field_1', 'field_2', 'field_3']), ('streaming', True)]) command = TestCommand() command._protocol_version = 2 self.assertEqual( - [(name, value) for name, value in six.iteritems(command.configuration)], + list(six.iteritems(command.configuration)), [('type', 'streaming')]) self.assertIs(command.configuration.distributed, True) @@ -162,15 +161,14 @@ def stream(self, records): except AttributeError: pass except Exception as error: - self.fail('Expected AttributeError, not {}: {}'.format(type(error).__name__, error)) + self.fail(f'Expected AttributeError, not {type(error).__name__}: {error}') else: self.fail('Expected AttributeError') self.assertEqual( - [(name, value) for name, value in six.iteritems(command.configuration)], + list(six.iteritems(command.configuration)), [('required_fields', ['field_1', 'field_2', 'field_3']), ('type', 'stateful')]) - return if __name__ == "__main__": main() diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index dd65aa0a..2441959e 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -15,12 +15,8 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals -try: - from unittest2 import main, TestCase -except ImportError: - from unittest import main, TestCase +from unittest import main, TestCase import sys from io import TextIOWrapper @@ -121,7 +117,7 @@ class TestSearchCommand(SearchCommand): **Syntax:** **integer=**** **Description:** An integer value''', require=True, validate=validators.Integer()) - + float = Option( doc=''' **Syntax:** **float=**** @@ -268,7 +264,7 @@ def fix_up(cls, command_class): (True, False), (None, 'anything other than a bool')), ('streaming_preop', - (u'some unicode string', b'some byte string'), + ('some unicode string', b'some byte string'), (None, 0xdead)), ('type', # TODO: Do we need to validate byte versions of these strings? @@ -299,7 +295,6 @@ def fix_up(cls, command_class): self.assertIn(backing_field_name, settings_instance.__dict__), self.assertEqual(getattr(settings_instance, name), value) self.assertEqual(settings_instance.__dict__[backing_field_name], value) - pass for value in error_values: try: @@ -313,11 +308,9 @@ def fix_up(cls, command_class): settings_instance = settings_class(command=None) self.assertRaises(ValueError, setattr, settings_instance, name, value) - return - def test_new_configuration_setting(self): - class Test(object): + class Test(): generating = ConfigurationSetting() @ConfigurationSetting(name='required_fields') @@ -401,47 +394,46 @@ def test_option(self): self.assertEqual( validator.format(option.value), validator.format(validator.__call__(legal_value)), - "{}={}".format(option.name, legal_value)) + f"{option.name}={legal_value}") try: option.value = illegal_value except ValueError: pass except BaseException as error: - self.assertFalse('Expected ValueError for {}={}, not this {}: {}'.format( - option.name, illegal_value, type(error).__name__, error)) + self.assertFalse(f'Expected ValueError for {option.name}={illegal_value}, not this {type(error).__name__}: {error}') else: - self.assertFalse('Expected ValueError for {}={}, not a pass.'.format(option.name, illegal_value)) + self.assertFalse(f'Expected ValueError for {option.name}={illegal_value}, not a pass.') expected = { - u'foo': False, + 'foo': False, 'boolean': False, - 'code': u'foo == \"bar\"', + 'code': 'foo == \"bar\"', 'duration': 89999, - 'fieldname': u'some.field_name', + 'fieldname': 'some.field_name', 'file': six.text_type(repr(__file__)), 'integer': 100, 'float': 99.9, 'logging_configuration': environment.logging_configuration, - 'logging_level': u'WARNING', + 'logging_level': 'WARNING', 'map': 'foo', - 'match': u'123-45-6789', - 'optionname': u'some_option_name', + 'match': '123-45-6789', + 'optionname': 'some_option_name', 'record': False, - 'regularexpression': u'\\s+', + 'regularexpression': '\\s+', 'required_boolean': False, - 'required_code': u'foo == \"bar\"', + 'required_code': 'foo == \"bar\"', 'required_duration': 89999, - 'required_fieldname': u'some.field_name', + 'required_fieldname': 'some.field_name', 'required_file': six.text_type(repr(__file__)), 'required_integer': 100, 'required_float': 99.9, 'required_map': 'foo', - 'required_match': u'123-45-6789', - 'required_optionname': u'some_option_name', - 'required_regularexpression': u'\\s+', - 'required_set': u'bar', - 'set': u'bar', + 'required_match': '123-45-6789', + 'required_optionname': 'some_option_name', + 'required_regularexpression': '\\s+', + 'required_set': 'bar', + 'set': 'bar', 'show_configuration': False, } @@ -477,7 +469,6 @@ def test_option(self): observed = six.text_type(command.options) self.assertEqual(observed, expected) - return if __name__ == "__main__": diff --git a/tests/searchcommands/test_generator_command.py b/tests/searchcommands/test_generator_command.py index 63ae3ac8..7a3320d2 100644 --- a/tests/searchcommands/test_generator_command.py +++ b/tests/searchcommands/test_generator_command.py @@ -1,9 +1,8 @@ import io import time -from . import chunked_data_stream as chunky - from splunklib.searchcommands import Configuration, GeneratingCommand +from . import chunked_data_stream as chunky def test_simple_generator(): @@ -24,7 +23,7 @@ def generate(self): ds = chunky.ChunkedDataStream(out_stream) is_first_chunk = True finished_seen = False - expected = set(map(lambda i: str(i), range(1, 10))) + expected = set([str(i) for i in range(1, 10)]) seen = set() for chunk in ds: if is_first_chunk: diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index eb85d040..0b6c512a 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -14,7 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1 from splunklib.searchcommands.decorators import Configuration, Option @@ -145,19 +144,19 @@ def fix_up(cls, command_class): pass r'"Hello World!"' ] - for string, expected_value in izip(strings, expected_values): + for string, expected_value in zip(strings, expected_values): command = TestCommandLineParserCommand() argv = ['text', '=', string] CommandLineParser.parse(command, argv) self.assertEqual(command.text, expected_value) - for string, expected_value in izip(strings, expected_values): + for string, expected_value in zip(strings, expected_values): command = TestCommandLineParserCommand() argv = [string] CommandLineParser.parse(command, argv) self.assertEqual(command.fieldnames[0], expected_value) - for string, expected_value in izip(strings, expected_values): + for string, expected_value in zip(strings, expected_values): command = TestCommandLineParserCommand() argv = ['text', '=', string] + strings CommandLineParser.parse(command, argv) @@ -176,7 +175,6 @@ def fix_up(cls, command_class): pass argv = [string] self.assertRaises(SyntaxError, CommandLineParser.parse, command, argv) - return def test_command_line_parser_unquote(self): parser = CommandLineParser @@ -308,8 +306,6 @@ def test_input_header(self): self.assertEqual(sorted(input_header.keys()), sorted(collection.keys())) self.assertEqual(sorted(input_header.values()), sorted(collection.values())) - return - def test_messages_header(self): @Configuration() @@ -346,7 +342,6 @@ def fix_up(cls, command_class): pass '\r\n') self.assertEqual(output_buffer.getvalue().decode('utf-8'), expected) - return _package_path = os.path.dirname(__file__) diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index c221cc53..7a6d0e9e 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -15,7 +15,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals from splunklib.searchcommands.internals import MetadataDecoder, MetadataEncoder, Recorder, RecordWriterV2 from splunklib.searchcommands import SearchMetric @@ -98,7 +97,7 @@ def random_list(population, *args): def random_unicode(): - return ''.join(imap(lambda x: six.unichr(x), random.sample(range(MAX_NARROW_UNICODE), random.randint(0, max_length)))) + return ''.join([six.chr(x) for x in random.sample(list(range(MAX_NARROW_UNICODE)), random.randint(0, max_length))]) # endregion @@ -118,14 +117,9 @@ def test_object_view(self): json_output = encoder.encode(view) self.assertEqual(self._json_input, json_output) - return def test_recorder(self): - if (python_version[0] == 2 and python_version[1] < 7): - print("Skipping test since we're on {1}".format("".join(python_version))) - pass - # Grab an input/output recording, the results of a prior countmatches run recording = os.path.join(self._package_path, 'recordings', 'scpv2', 'Splunk-6.3', 'countmatches.') @@ -172,8 +166,6 @@ def test_recorder(self): ifile._recording.close() os.remove(ifile._recording.name) - return - def test_record_writer_with_random_data(self, save_recording=False): # Confirmed: [minint, maxint) covers the full range of values that xrange allows @@ -192,7 +184,7 @@ def test_record_writer_with_random_data(self, save_recording=False): for serial_number in range(0, 31): values = [serial_number, time(), random_bytes(), random_dict(), random_integers(), random_unicode()] - record = OrderedDict(izip(fieldnames, values)) + record = OrderedDict(list(zip(fieldnames, values))) #try: write_record(record) #except Exception as error: @@ -236,8 +228,8 @@ def test_record_writer_with_random_data(self, save_recording=False): self.assertListEqual(writer._inspector['messages'], messages) self.assertDictEqual( - dict(ifilter(lambda k_v: k_v[0].startswith('metric.'), six.iteritems(writer._inspector))), - dict(imap(lambda k_v1: ('metric.' + k_v1[0], k_v1[1]), six.iteritems(metrics)))) + dict([k_v for k_v in six.iteritems(writer._inspector) if k_v[0].startswith('metric.')]), + dict([('metric.' + k_v1[0], k_v1[1]) for k_v1 in six.iteritems(metrics)])) writer.flush(finished=True) @@ -267,18 +259,15 @@ def test_record_writer_with_random_data(self, save_recording=False): # P2 [ ] TODO: Verify that RecordWriter gives consumers the ability to finish early by calling # RecordWriter.flush(finish=True). - return - def _compare_chunks(self, chunks_1, chunks_2): self.assertEqual(len(chunks_1), len(chunks_2)) n = 0 - for chunk_1, chunk_2 in izip(chunks_1, chunks_2): + for chunk_1, chunk_2 in zip(chunks_1, chunks_2): self.assertDictEqual( chunk_1.metadata, chunk_2.metadata, 'Chunk {0}: metadata error: "{1}" != "{2}"'.format(n, chunk_1.metadata, chunk_2.metadata)) self.assertMultiLineEqual(chunk_1.body, chunk_2.body, 'Chunk {0}: data error'.format(n)) n += 1 - return def _load_chunks(self, ifile): import re @@ -335,7 +324,7 @@ def _load_chunks(self, ifile): _recordings_path = os.path.join(_package_path, 'recordings', 'scpv2', 'Splunk-6.3') -class TestRecorder(object): +class TestRecorder(): def __init__(self, test_case): @@ -348,7 +337,6 @@ def _not_implemented(self): raise NotImplementedError('class {} is not in playback or record mode'.format(self.__class__.__name__)) self.get = self.next_part = self.stop = MethodType(_not_implemented, self, self.__class__) - return @property def output(self): @@ -377,7 +365,6 @@ def stop(self): self._test_case.assertEqual(test_data['results'], self._output.getvalue()) self.stop = MethodType(stop, self, self.__class__) - return def record(self, path): @@ -412,7 +399,6 @@ def stop(self): pickle.dump(test, f) self.stop = MethodType(stop, self, self.__class__) - return def recorded(method): @@ -424,7 +410,7 @@ def _record(*args, **kwargs): return _record -class Test(object): +class Test(): def __init__(self, fieldnames, data_generators): @@ -473,11 +459,9 @@ def _run(self): names = self.fieldnames for self._serial_number in range(0, 31): - record = OrderedDict(izip(names, self.row)) + record = OrderedDict(list(zip(names, self.row))) write_record(record) - return - # test = Test(['random_bytes', 'random_unicode'], [random_bytes, random_unicode]) # test.record() diff --git a/tests/searchcommands/test_multibyte_processing.py b/tests/searchcommands/test_multibyte_processing.py index 4d6127fe..bf940263 100644 --- a/tests/searchcommands/test_multibyte_processing.py +++ b/tests/searchcommands/test_multibyte_processing.py @@ -25,15 +25,13 @@ def get_input_file(name): def test_multibyte_chunked(): data = gzip.open(get_input_file("multibyte_input")) - if not six.PY2: - data = io.TextIOWrapper(data) + data = io.TextIOWrapper(data) cmd = build_test_command() cmd._process_protocol_v2(sys.argv, data, sys.stdout) def test_v1_searchcommand(): data = gzip.open(get_input_file("v1_search_input")) - if not six.PY2: - data = io.TextIOWrapper(data) + data = io.TextIOWrapper(data) cmd = build_test_command() cmd._process_protocol_v1(["test_script.py", "__EXECUTE__"], data, sys.stdout) diff --git a/tests/searchcommands/test_reporting_command.py b/tests/searchcommands/test_reporting_command.py index e5add818..2111447d 100644 --- a/tests/searchcommands/test_reporting_command.py +++ b/tests/searchcommands/test_reporting_command.py @@ -1,6 +1,6 @@ import io -import splunklib.searchcommands as searchcommands +from splunklib import searchcommands from . import chunked_data_stream as chunky @@ -15,7 +15,7 @@ def reduce(self, records): cmd = TestReportingCommand() ifile = io.BytesIO() - data = list() + data = [] for i in range(0, 10): data.append({"value": str(i)}) ifile.write(chunky.build_getinfo_chunk()) diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index baa8edb7..b203cc76 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -15,7 +15,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals from splunklib import six from splunklib.searchcommands import Configuration, StreamingCommand @@ -43,8 +42,7 @@ def build_command_input(getinfo_metadata, execute_metadata, execute_body): ifile = BytesIO(six.ensure_binary(input)) - if not six.PY2: - ifile = TextIOWrapper(ifile) + ifile = TextIOWrapper(ifile) return ifile @@ -57,10 +55,7 @@ class TestCommand(SearchCommand): def echo(self, records): for record in records: if record.get('action') == 'raise_exception': - if six.PY2: - raise StandardError(self) - else: - raise Exception(self) + raise Exception(self) yield record def _execute(self, ifile, process): @@ -108,7 +103,6 @@ def stream(self, records): value = self.search_results_info if action == 'get_search_results_info' else None yield {'_serial': serial_number, 'data': value} serial_number += 1 - return @pytest.mark.smoke class TestSearchCommand(TestCase): @@ -151,14 +145,7 @@ def test_process_scpv1(self): self.assertEqual(str(command.configuration), '') - if six.PY2: - expected = ("[(u'clear_required_fields', None, [1]), (u'distributed', None, [2]), (u'generates_timeorder', None, [1]), " - "(u'generating', None, [1, 2]), (u'maxinputs', None, [2]), (u'overrides_timeorder', None, [1]), " - "(u'required_fields', None, [1, 2]), (u'requires_preop', None, [1]), (u'retainsevents', None, [1]), " - "(u'run_in_preview', None, [2]), (u'streaming', None, [1]), (u'streaming_preop', None, [1, 2]), " - "(u'type', None, [2])]") - else: - expected = ("[('clear_required_fields', None, [1]), ('distributed', None, [2]), ('generates_timeorder', None, [1]), " + expected = ("[('clear_required_fields', None, [1]), ('distributed', None, [2]), ('generates_timeorder', None, [1]), " "('generating', None, [1, 2]), ('maxinputs', None, [2]), ('overrides_timeorder', None, [1]), " "('required_fields', None, [1, 2]), ('requires_preop', None, [1]), ('retainsevents', None, [1]), " "('run_in_preview', None, [2]), ('streaming', None, [1]), ('streaming_preop', None, [1, 2]), " @@ -201,24 +188,12 @@ def test_process_scpv1(self): configuration.run_in_preview = True configuration.type = 'streaming' - if six.PY2: - expected = ('clear_required_fields="True", generates_timeorder="True", generating="True", overrides_timeorder="True", ' - 'required_fields="[u\'foo\', u\'bar\']", requires_preop="True", retainsevents="True", streaming="True", ' - 'streaming_preop="some streaming command"') - else: - expected = ('clear_required_fields="True", generates_timeorder="True", generating="True", overrides_timeorder="True", ' + expected = ('clear_required_fields="True", generates_timeorder="True", generating="True", overrides_timeorder="True", ' 'required_fields="[\'foo\', \'bar\']", requires_preop="True", retainsevents="True", streaming="True", ' 'streaming_preop="some streaming command"') self.assertEqual(str(command.configuration), expected) - if six.PY2: - expected = ("[(u'clear_required_fields', True, [1]), (u'distributed', True, [2]), (u'generates_timeorder', True, [1]), " - "(u'generating', True, [1, 2]), (u'maxinputs', 50000, [2]), (u'overrides_timeorder', True, [1]), " - "(u'required_fields', [u'foo', u'bar'], [1, 2]), (u'requires_preop', True, [1]), " - "(u'retainsevents', True, [1]), (u'run_in_preview', True, [2]), (u'streaming', True, [1]), " - "(u'streaming_preop', u'some streaming command', [1, 2]), (u'type', u'streaming', [2])]") - else: - expected = ("[('clear_required_fields', True, [1]), ('distributed', True, [2]), ('generates_timeorder', True, [1]), " + expected = ("[('clear_required_fields', True, [1]), ('distributed', True, [2]), ('generates_timeorder', True, [1]), " "('generating', True, [1, 2]), ('maxinputs', 50000, [2]), ('overrides_timeorder', True, [1]), " "('required_fields', ['foo', 'bar'], [1, 2]), ('requires_preop', True, [1]), " "('retainsevents', True, [1]), ('run_in_preview', True, [2]), ('streaming', True, [1]), " @@ -236,7 +211,7 @@ def test_process_scpv1(self): result.seek(0) reader = csv.reader(codecs.iterdecode(result, 'UTF-8')) self.assertEqual([], next(reader)) - observed = dict(izip(next(reader), next(reader))) + observed = dict(list(zip(next(reader), next(reader)))) self.assertRaises(StopIteration, lambda: next(reader)) expected = { @@ -383,8 +358,6 @@ def test_process_scpv1(self): self.assertIsNone(command.search_results_info) self.assertIsNone(command.service) - return - def test_process_scpv2(self): # SearchCommand.process should @@ -528,83 +501,83 @@ def test_process_scpv2(self): self.maxDiff = None self.assertDictEqual(command.search_results_info.__dict__, { - u'is_summary_index': 0, - u'bs_thread_count': 1, - u'rt_backfill': 0, - u'rtspan': '', - u'search_StartTime': 1433261392.934936, - u'read_raw': 1, - u'root_sid': '', - u'field_rendering': '', - u'query_finished': 1, - u'optional_fields_json': {}, - u'group_list': '', - u'remoteServers': '', - u'rt_latest': '', - u'remote_log_download_mode': 'disabled', - u'reduce_search': '', - u'request_finalization': 0, - u'auth_token': 'UQZSgWwE2f9oIKrj1QG^kVhW^T_cR4H5Z65bPtMhwlHytS5jFrFYyH^dGzjTusDjVTgoBNeR7bvIzctHF7DrLJ1ANevgDOWEWRvABNj6d_k0koqxw9Io', - u'indexed_realtime': 0, - u'ppc_bs': '$SPLUNK_HOME/etc', - u'drop_count': 0, - u'datamodel_map': '', - u'search_can_be_event_type': 0, - u'search_StartUp_Spent': 0, - u'realtime': 0, - u'splunkd_uri': 'https://127.0.0.1:8089', - u'columnOrder': '', - u'kv_store_settings': 'hosts;127.0.0.1:8191\\;;local;127.0.0.1:8191;read_preference;958513E3-8716-4ABF-9559-DA0C9678437F;replica_set_name;958513E3-8716-4ABF-9559-DA0C9678437F;status;ready;', - u'label': '', - u'summary_maxtimespan': '', - u'indexed_realtime_offset': 0, - u'sid': 1433261392.159, - u'msg': [], - u'internal_only': 0, - u'summary_id': '', - u'orig_search_head': '', - u'ppc_app': 'chunked_searchcommands', - u'countMap': { - u'invocations.dispatch.writeStatus': u'1', - u'duration.dispatch.writeStatus': u'2', - u'duration.startup.handoff': u'79', - u'duration.startup.configuration': u'34', - u'invocations.startup.handoff': u'1', - u'invocations.startup.configuration': u'1'}, - u'is_shc_mode': 0, - u'shp_id': '958513E3-8716-4ABF-9559-DA0C9678437F', - u'timestamp': 1433261392.936374, u'is_remote_sorted': 0, - u'remote_search': '', - u'splunkd_protocol': 'https', - u'site': '', - u'maxevents': 0, - u'keySet': '', - u'summary_stopped': 0, - u'search_metrics': { - u'ConsideredEvents': 0, - u'ConsideredBuckets': 0, - u'TotalSlicesInBuckets': 0, - u'EliminatedBuckets': 0, - u'DecompressedSlices': 0}, - u'summary_mode': 'all', u'now': 1433261392.0, - u'splunkd_port': 8089, u'is_saved_search': 0, - u'rtoptions': '', - u'search': '| inputlookup random_data max=50000 | sum total=total value1 record=t | export add_timestamp=f add_offset=t format=csv segmentation=raw', - u'bundle_version': 0, - u'generation_id': 0, - u'bs_thread_id': 0, - u'is_batch_mode': 0, - u'scan_count': 0, - u'rt_earliest': '', - u'default_group': '*', - u'tstats_reduce': '', - u'kv_store_additional_settings': 'hosts_guids;958513E3-8716-4ABF-9559-DA0C9678437F\\;;', - u'enable_event_stream': 0, - u'is_remote': 0, - u'is_scheduled': 0, - u'sample_ratio': 1, - u'ppc_user': 'admin', - u'sample_seed': 0}) + 'is_summary_index': 0, + 'bs_thread_count': 1, + 'rt_backfill': 0, + 'rtspan': '', + 'search_StartTime': 1433261392.934936, + 'read_raw': 1, + 'root_sid': '', + 'field_rendering': '', + 'query_finished': 1, + 'optional_fields_json': {}, + 'group_list': '', + 'remoteServers': '', + 'rt_latest': '', + 'remote_log_download_mode': 'disabled', + 'reduce_search': '', + 'request_finalization': 0, + 'auth_token': 'UQZSgWwE2f9oIKrj1QG^kVhW^T_cR4H5Z65bPtMhwlHytS5jFrFYyH^dGzjTusDjVTgoBNeR7bvIzctHF7DrLJ1ANevgDOWEWRvABNj6d_k0koqxw9Io', + 'indexed_realtime': 0, + 'ppc_bs': '$SPLUNK_HOME/etc', + 'drop_count': 0, + 'datamodel_map': '', + 'search_can_be_event_type': 0, + 'search_StartUp_Spent': 0, + 'realtime': 0, + 'splunkd_uri': 'https://127.0.0.1:8089', + 'columnOrder': '', + 'kv_store_settings': 'hosts;127.0.0.1:8191\\;;local;127.0.0.1:8191;read_preference;958513E3-8716-4ABF-9559-DA0C9678437F;replica_set_name;958513E3-8716-4ABF-9559-DA0C9678437F;status;ready;', + 'label': '', + 'summary_maxtimespan': '', + 'indexed_realtime_offset': 0, + 'sid': 1433261392.159, + 'msg': [], + 'internal_only': 0, + 'summary_id': '', + 'orig_search_head': '', + 'ppc_app': 'chunked_searchcommands', + 'countMap': { + 'invocations.dispatch.writeStatus': '1', + 'duration.dispatch.writeStatus': '2', + 'duration.startup.handoff': '79', + 'duration.startup.configuration': '34', + 'invocations.startup.handoff': '1', + 'invocations.startup.configuration': '1'}, + 'is_shc_mode': 0, + 'shp_id': '958513E3-8716-4ABF-9559-DA0C9678437F', + 'timestamp': 1433261392.936374, 'is_remote_sorted': 0, + 'remote_search': '', + 'splunkd_protocol': 'https', + 'site': '', + 'maxevents': 0, + 'keySet': '', + 'summary_stopped': 0, + 'search_metrics': { + 'ConsideredEvents': 0, + 'ConsideredBuckets': 0, + 'TotalSlicesInBuckets': 0, + 'EliminatedBuckets': 0, + 'DecompressedSlices': 0}, + 'summary_mode': 'all', 'now': 1433261392.0, + 'splunkd_port': 8089, 'is_saved_search': 0, + 'rtoptions': '', + 'search': '| inputlookup random_data max=50000 | sum total=total value1 record=t | export add_timestamp=f add_offset=t format=csv segmentation=raw', + 'bundle_version': 0, + 'generation_id': 0, + 'bs_thread_id': 0, + 'is_batch_mode': 0, + 'scan_count': 0, + 'rt_earliest': '', + 'default_group': '*', + 'tstats_reduce': '', + 'kv_store_additional_settings': 'hosts_guids;958513E3-8716-4ABF-9559-DA0C9678437F\\;;', + 'enable_event_stream': 0, + 'is_remote': 0, + 'is_scheduled': 0, + 'sample_ratio': 1, + 'ppc_user': 'admin', + 'sample_seed': 0}) self.assertIsInstance(command.service, Service) @@ -706,13 +679,7 @@ def test_process_scpv2(self): finished = r'\"finished\":true' - if six.PY2: - inspector = \ - r'\"inspector\":\{\"messages\":\[\[\"ERROR\",\"StandardError at \\\".+\\\", line \d+ : test ' \ - r'logging_configuration=\\\".+\\\" logging_level=\\\"WARNING\\\" record=\\\"f\\\" ' \ - r'required_option_1=\\\"value_1\\\" required_option_2=\\\"value_2\\\" show_configuration=\\\"f\\\"\"\]\]\}' - else: - inspector = \ + inspector = \ r'\"inspector\":\{\"messages\":\[\[\"ERROR\",\"Exception at \\\".+\\\", line \d+ : test ' \ r'logging_configuration=\\\".+\\\" logging_level=\\\"WARNING\\\" record=\\\"f\\\" ' \ r'required_option_1=\\\"value_1\\\" required_option_2=\\\"value_2\\\" show_configuration=\\\"f\\\"\"\]\]\}' @@ -781,8 +748,7 @@ def test_process_scpv2(self): '{"finished":true}' ) - self.assertEquals(result.getvalue().decode("UTF-8"), expected) - return + self.assertEqual(result.getvalue().decode("UTF-8"), expected) _package_directory = os.path.dirname(os.path.abspath(__file__)) diff --git a/tests/searchcommands/test_searchcommands_app.py b/tests/searchcommands/test_searchcommands_app.py index faf14abd..38f83836 100755 --- a/tests/searchcommands/test_searchcommands_app.py +++ b/tests/searchcommands/test_searchcommands_app.py @@ -24,8 +24,6 @@ # self.metadata, self.search_results_info, and self.service. Such mocks might be based on archived dispatch directories. -from __future__ import absolute_import, division, print_function, unicode_literals - from collections import namedtuple from splunklib.six.moves import cStringIO as StringIO from datetime import datetime @@ -37,10 +35,7 @@ from subprocess import PIPE, Popen from splunklib import six -try: - from unittest2 import main, skipUnless, TestCase -except ImportError: - from unittest import main, skipUnless, TestCase +from unittest import main, skipUnless, TestCase import gzip import json @@ -57,6 +52,7 @@ import pytest + def pypy(): try: process = Popen(['pypy', '--version'], stderr=PIPE, stdout=PIPE) @@ -67,7 +63,7 @@ def pypy(): return process.returncode == 0 -class Recording(object): +class Recording: def __init__(self, path): @@ -77,7 +73,7 @@ def __init__(self, path): if os.path.exists(self._dispatch_dir): with io.open(os.path.join(self._dispatch_dir, 'request.csv')) as ifile: reader = csv.reader(ifile) - for name, value in izip(next(reader), next(reader)): + for name, value in zip(next(reader), next(reader)): if name == 'search': self._search = value break @@ -124,32 +120,30 @@ def search(self): return self._search -class Recordings(object): +class Recordings: def __init__(self, name, action, phase, protocol_version): - basedir = Recordings._prefix + six.text_type(protocol_version) if not os.path.isdir(basedir): - raise ValueError('Directory "{}" containing recordings for protocol version {} does not exist'.format( - protocol_version, basedir)) + raise ValueError( + f'Directory "{protocol_version}" containing recordings for protocol version {basedir} does not exist') self._basedir = basedir - self._name = '.'.join(ifilter(lambda part: part is not None, (name, action, phase))) + self._name = '.'.join([part for part in (name, action, phase) if part is not None]) def __iter__(self): - basedir = self._basedir name = self._name - iterator = imap( - lambda directory: Recording(os.path.join(basedir, directory, name)), ifilter( - lambda filename: os.path.isdir(os.path.join(basedir, filename)), os.listdir(basedir))) + iterator = [Recording(os.path.join(basedir, directory, name)) for directory in + [filename for filename in os.listdir(basedir) if os.path.isdir(os.path.join(basedir, filename))]] return iterator _prefix = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'recordings', 'scpv') + @pytest.mark.smoke class TestSearchCommandsApp(TestCase): app_root = os.path.join(project_root, 'examples', 'searchcommands_app', 'build', 'searchcommands_app') @@ -160,7 +154,8 @@ def setUp(self): self.skipTest("You must build the searchcommands_app by running " + build_command) TestCase.setUp(self) - @pytest.mark.skipif(six.PY3, reason="Python 2 does not treat Unicode as words for regex, so Python 3 has broken fixtures") + @pytest.mark.skipif(six.PY3, + reason="Python 2 does not treat Unicode as words for regex, so Python 3 has broken fixtures") def test_countmatches_as_unit(self): expected, output, errors, exit_status = self._run_command('countmatches', action='getinfo', protocol=1) self.assertEqual(0, exit_status, msg=six.text_type(errors)) @@ -178,8 +173,6 @@ def test_countmatches_as_unit(self): self.assertEqual('', errors, msg=six.text_type(errors)) self._compare_chunks(expected, output) - return - def test_generatehello_as_unit(self): expected, output, errors, exit_status = self._run_command('generatehello', action='getinfo', protocol=1) @@ -197,8 +190,6 @@ def test_generatehello_as_unit(self): self.assertEqual('', errors, msg=six.text_type(errors)) self._compare_chunks(expected, output, time_sensitive=False) - return - def test_sum_as_unit(self): expected, output, errors, exit_status = self._run_command('sum', action='getinfo', phase='reduce', protocol=1) @@ -231,22 +222,20 @@ def test_sum_as_unit(self): self.assertEqual('', errors, msg=six.text_type(errors)) self._compare_chunks(expected, output) - return - def assertInfoEqual(self, output, expected): reader = csv.reader(StringIO(output)) self.assertEqual([], next(reader)) fields = next(reader) values = next(reader) - self.assertRaises(StopIteration, reader.next) - output = dict(izip(fields, values)) + self.assertRaises(StopIteration, reader.__next__) + output = dict(list(zip(fields, values))) reader = csv.reader(StringIO(expected)) self.assertEqual([], next(reader)) fields = next(reader) values = next(reader) - self.assertRaises(StopIteration, reader.next) - expected = dict(izip(fields, values)) + self.assertRaises(StopIteration, reader.__next__) + expected = dict(list(zip(fields, values))) self.assertDictEqual(expected, output) @@ -265,15 +254,13 @@ def _compare_chunks(self, expected, output, time_sensitive=True): self.assertEqual(len(chunks_1), len(chunks_2)) n = 0 - for chunk_1, chunk_2 in izip(chunks_1, chunks_2): + for chunk_1, chunk_2 in zip(chunks_1, chunks_2): self.assertDictEqual( chunk_1.metadata, chunk_2.metadata, - 'Chunk {0}: metadata error: "{1}" != "{2}"'.format(n, chunk_1.metadata, chunk_2.metadata)) + f'Chunk {n}: metadata error: "{chunk_1.metadata}" != "{chunk_2.metadata}"') compare_csv_files(chunk_1.body, chunk_2.body) n += 1 - return - def _compare_csv_files_time_insensitive(self, expected, output): skip_first_row = expected[0:2] == '\r\n' @@ -305,11 +292,6 @@ def _compare_csv_files_time_insensitive(self, expected, output): line_number += 1 - if six.PY2: - self.assertRaises(StopIteration, output.next) - - return - def _compare_csv_files_time_sensitive(self, expected, output): self.assertEqual(len(expected), len(output)) @@ -332,18 +314,13 @@ def _compare_csv_files_time_sensitive(self, expected, output): line_number, expected_row, output_row)) line_number += 1 - if six.PY2: - self.assertRaises(StopIteration, output.next) - - return - def _get_search_command_path(self, name): path = os.path.join( project_root, 'examples', 'searchcommands_app', 'build', 'searchcommands_app', 'bin', name + '.py') self.assertTrue(os.path.isfile(path)) return path - def _load_chunks(self, ifile): + def _load_chunks(ifile): import re pattern = re.compile(r'chunked 1.0,(?P\d+),(?P\d+)(\n)?') diff --git a/tests/searchcommands/test_streaming_command.py b/tests/searchcommands/test_streaming_command.py index ffe6a737..61c89947 100644 --- a/tests/searchcommands/test_streaming_command.py +++ b/tests/searchcommands/test_streaming_command.py @@ -16,7 +16,7 @@ def stream(self, records): cmd = TestStreamingCommand() ifile = io.BytesIO() ifile.write(chunky.build_getinfo_chunk()) - data = list() + data = [] for i in range(0, 10): data.append({"in_index": str(i)}) ifile.write(chunky.build_data_chunk(data, finished=True)) @@ -44,7 +44,7 @@ def stream(self, records): cmd = TestStreamingCommand() ifile = io.BytesIO() ifile.write(chunky.build_getinfo_chunk()) - data = list() + data = [] for i in range(0, 10): data.append({"in_index": str(i)}) ifile.write(chunky.build_data_chunk(data, finished=True)) @@ -53,14 +53,14 @@ def stream(self, records): cmd._process_protocol_v2([], ifile, ofile) ofile.seek(0) output_iter = chunky.ChunkedDataStream(ofile).__iter__() - output_iter.next() - output_records = [i for i in output_iter.next().data] + next(output_iter) + output_records = list(next(output_iter).data) # Assert that count of records having "odd_field" is 0 - assert len(list(filter(lambda r: "odd_field" in r, output_records))) == 0 + assert len(list([r for r in output_records if "odd_field" in r])) == 0 # Assert that count of records having "even_field" is 10 - assert len(list(filter(lambda r: "even_field" in r, output_records))) == 10 + assert len(list([r for r in output_records if "even_field" in r])) == 10 def test_field_preservation_positive(): @@ -78,7 +78,7 @@ def stream(self, records): cmd = TestStreamingCommand() ifile = io.BytesIO() ifile.write(chunky.build_getinfo_chunk()) - data = list() + data = [] for i in range(0, 10): data.append({"in_index": str(i)}) ifile.write(chunky.build_data_chunk(data, finished=True)) @@ -87,11 +87,12 @@ def stream(self, records): cmd._process_protocol_v2([], ifile, ofile) ofile.seek(0) output_iter = chunky.ChunkedDataStream(ofile).__iter__() - output_iter.next() - output_records = [i for i in output_iter.next().data] + next(output_iter) + output_records = list(next(output_iter).data) # Assert that count of records having "odd_field" is 10 - assert len(list(filter(lambda r: "odd_field" in r, output_records))) == 10 + assert len(list([r for r in output_records if "odd_field" in r])) == 10 # Assert that count of records having "even_field" is 10 - assert len(list(filter(lambda r: "even_field" in r, output_records))) == 10 + assert len(list([r for r in output_records if "even_field" in r])) == 10 + diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index cc524b30..e3cbb278 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -15,8 +15,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - from splunklib.searchcommands import validators from random import randint from unittest import main, TestCase @@ -58,8 +56,6 @@ def test_boolean(self): self.assertIsNone(validator.__call__(None)) self.assertRaises(ValueError, validator.__call__, 'anything-else') - return - def test_duration(self): # Duration validator should parse and format time intervals of the form @@ -97,8 +93,6 @@ def test_duration(self): self.assertRaises(ValueError, validator, '00:00:60') self.assertRaises(ValueError, validator, '00:60:00') - return - def test_fieldname(self): pass @@ -140,8 +134,6 @@ def test_file(self): if os.path.exists(full_path): os.unlink(full_path) - return - def test_integer(self): # Point of interest: @@ -168,10 +160,7 @@ def test(integer): for s in str(integer), six.text_type(integer): value = validator.__call__(s) self.assertEqual(value, integer) - if six.PY2: - self.assertIsInstance(value, long) - else: - self.assertIsInstance(value, int) + self.assertIsInstance(value, int) self.assertEqual(validator.format(integer), six.text_type(integer)) test(2 * minsize) @@ -203,8 +192,6 @@ def test(integer): self.assertEqual(validator.__call__(maxsize), maxsize) self.assertRaises(ValueError, validator.__call__, minsize - 1) self.assertRaises(ValueError, validator.__call__, maxsize + 1) - - return def test_float(self): # Float validator test @@ -261,8 +248,6 @@ def test(float_val): self.assertRaises(ValueError, validator.__call__, minsize - 1) self.assertRaises(ValueError, validator.__call__, maxsize + 1) - return - def test_list(self): validator = validators.List() diff --git a/tests/test_all.py b/tests/test_all.py index 7789f8fd..e7421797 100755 --- a/tests/test_all.py +++ b/tests/test_all.py @@ -16,15 +16,11 @@ """Runs all the Splunk SDK for Python unit tests.""" -from __future__ import absolute_import import os -try: - import unittest2 as unittest # We must be sure to get unittest2--not unittest--on Python 2.6 -except ImportError: - import unittest +import unittest os.chdir(os.path.dirname(os.path.abspath(__file__))) suite = unittest.defaultTestLoader.discover('.') if __name__ == '__main__': - unittest.TextTestRunner().run(suite) \ No newline at end of file + unittest.TextTestRunner().run(suite) diff --git a/tests/test_app.py b/tests/test_app.py index 3dbc4cff..39b68a08 100755 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -14,11 +14,9 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -from tests import testlib import logging - -import splunklib.client as client +from tests import testlib +from splunklib import client class TestApp(testlib.SDKTestCase): @@ -26,7 +24,7 @@ class TestApp(testlib.SDKTestCase): app_name = None def setUp(self): - super(TestApp, self).setUp() + super().setUp() if self.app is None: for app in self.service.apps: if app.name.startswith('delete-me'): @@ -37,18 +35,17 @@ def setUp(self): # than entities like indexes, this is okay. self.app_name = testlib.tmpname() self.app = self.service.apps.create(self.app_name) - logging.debug("Creating app %s", self.app_name) - else: - logging.debug("App %s already exists. Skipping creation.", self.app_name) + logging.debug(f"Creating app {self.app_name}") + logging.debug(f"App {self.app_name} already exists. Skipping creation.") if self.service.restart_required: self.service.restart(120) - return def tearDown(self): - super(TestApp, self).tearDown() + super().tearDown() # The rest of this will leave Splunk in a state requiring a restart. # It doesn't actually matter, though. self.service = client.connect(**self.opts.kwargs) + app_name = '' for app in self.service.apps: app_name = app.name if app_name.startswith('delete-me'): @@ -90,7 +87,7 @@ def test_delete(self): self.assertTrue(name in self.service.apps) self.service.apps.delete(name) self.assertFalse(name in self.service.apps) - self.clear_restart_message() # We don't actually have to restart here. + self.clear_restart_message() # We don't actually have to restart here. def test_package(self): p = self.app.package() @@ -103,9 +100,7 @@ def test_updateInfo(self): p = self.app.updateInfo() self.assertTrue(p is not None) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest unittest.main() diff --git a/tests/test_binding.py b/tests/test_binding.py index c101b19c..9f8b4029 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -15,14 +15,12 @@ # under the License. -from __future__ import absolute_import from io import BytesIO from threading import Thread from splunklib.six.moves import BaseHTTPServer from splunklib.six.moves.urllib.request import Request, urlopen from splunklib.six.moves.urllib.error import HTTPError -import splunklib.six as six from xml.etree.ElementTree import XML import json @@ -30,13 +28,12 @@ from tests import testlib import unittest import socket -import sys import ssl -import splunklib.six.moves.http_cookies +import splunklib.six.moves.http_cookies -import splunklib.binding as binding +from splunklib import binding from splunklib.binding import HTTPError, AuthenticationError, UrlEncoded -import splunklib.data as data +from splunklib import data from splunklib import six import pytest @@ -65,14 +62,17 @@ def load(response): return data.load(response.body.read()) + class BindingTestCase(unittest.TestCase): context = None + def setUp(self): logging.info("%s", self.__class__.__name__) self.opts = testlib.parse([], {}, ".env") self.context = binding.connect(**self.opts.kwargs) logging.debug("Connected to splunkd.") + class TestResponseReader(BindingTestCase): def test_empty(self): response = binding.ResponseReader(BytesIO(b"")) @@ -124,9 +124,6 @@ def test_readinto_bytearray(self): self.assertTrue(response.empty) def test_readinto_memoryview(self): - import sys - if sys.version_info < (2, 7, 0): - return # memoryview is new to Python 2.7 txt = b"Checking readinto works as expected" response = binding.ResponseReader(BytesIO(txt)) arr = bytearray(10) @@ -142,7 +139,6 @@ def test_readinto_memoryview(self): self.assertTrue(response.empty) - class TestUrlEncoded(BindingTestCase): def test_idempotent(self): a = UrlEncoded('abc') @@ -173,6 +169,7 @@ def test_chars(self): def test_repr(self): self.assertEqual(repr(UrlEncoded('% %')), "UrlEncoded('% %')") + class TestAuthority(unittest.TestCase): def test_authority_default(self): self.assertEqual(binding._authority(), @@ -198,6 +195,7 @@ def test_all_fields(self): port="471"), "http://splunk.utopia.net:471") + class TestUserManipulation(BindingTestCase): def setUp(self): BindingTestCase.setUp(self) @@ -278,12 +276,12 @@ class TestSocket(BindingTestCase): def test_socket(self): socket = self.context.connect() socket.write(("POST %s HTTP/1.1\r\n" % \ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) + self.context._abspath("some/path/to/post/to")).encode('utf-8')) socket.write(("Host: %s:%s\r\n" % \ - (self.context.host, self.context.port)).encode('utf-8')) + (self.context.host, self.context.port)).encode('utf-8')) socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) socket.write(("Authorization: %s\r\n" % \ - self.context.token).encode('utf-8')) + self.context.token).encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) socket.write("\r\n".encode('utf-8')) socket.close() @@ -308,6 +306,7 @@ def test_socket_gethostbyname(self): self.context.host = socket.gethostbyname(self.context.host) self.assertTrue(self.context.connect()) + class TestUnicodeConnect(BindingTestCase): def test_unicode_connect(self): opts = self.opts.kwargs.copy() @@ -317,6 +316,7 @@ def test_unicode_connect(self): response = context.get("/services") self.assertEqual(response.status, 200) + @pytest.mark.smoke class TestAutologin(BindingTestCase): def test_with_autologin(self): @@ -332,6 +332,7 @@ def test_without_autologin(self): self.assertRaises(AuthenticationError, self.context.get, "/services") + class TestAbspath(BindingTestCase): def setUp(self): BindingTestCase.setUp(self) @@ -339,7 +340,6 @@ def setUp(self): if 'app' in self.kwargs: del self.kwargs['app'] if 'owner' in self.kwargs: del self.kwargs['owner'] - def test_default(self): path = self.context._abspath("foo", owner=None, app=None) self.assertTrue(isinstance(path, UrlEncoded)) @@ -371,12 +371,12 @@ def test_sharing_app(self): self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") def test_sharing_global(self): - path = self.context._abspath("foo", owner="me", app="MyApp",sharing="global") + path = self.context._abspath("foo", owner="me", app="MyApp", sharing="global") self.assertTrue(isinstance(path, UrlEncoded)) self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") def test_sharing_system(self): - path = self.context._abspath("foo bar", owner="me", app="MyApp",sharing="system") + path = self.context._abspath("foo bar", owner="me", app="MyApp", sharing="system") self.assertTrue(isinstance(path, UrlEncoded)) self.assertEqual(path, "/servicesNS/nobody/system/foo%20bar") @@ -444,6 +444,7 @@ def test_context_with_owner_as_email(self): self.assertEqual(path, "/servicesNS/me%40me.com/system/foo") self.assertEqual(path, UrlEncoded("/servicesNS/me@me.com/system/foo")) + # An urllib2 based HTTP request handler, used to test the binding layers # support for pluggable request handlers. def urllib2_handler(url, message, **kwargs): @@ -452,13 +453,9 @@ def urllib2_handler(url, message, **kwargs): headers = dict(message.get('headers', [])) req = Request(url, data, headers) try: - # If running Python 2.7.9+, disable SSL certificate validation - if sys.version_info >= (2, 7, 9): - response = urlopen(req, context=ssl._create_unverified_context()) - else: - response = urlopen(req) + response = urlopen(req, context=ssl._create_unverified_context()) except HTTPError as response: - pass # Propagate HTTP errors via the returned response message + pass # Propagate HTTP errors via the returned response message return { 'status': response.code, 'reason': response.msg, @@ -466,6 +463,7 @@ def urllib2_handler(url, message, **kwargs): 'body': BytesIO(response.read()) } + def isatom(body): """Answers if the given response body looks like ATOM.""" root = XML(body) @@ -475,6 +473,7 @@ def isatom(body): root.find(XNAME_ID) is not None and \ root.find(XNAME_TITLE) is not None + class TestPluggableHTTP(testlib.SDKTestCase): # Verify pluggable HTTP reqeust handlers. def test_handlers(self): @@ -491,6 +490,7 @@ def test_handlers(self): body = context.get(path).body.read() self.assertTrue(isatom(body)) + @pytest.mark.smoke class TestLogout(BindingTestCase): def test_logout(self): @@ -516,7 +516,7 @@ def setUp(self): self.context = binding.connect(**self.opts.kwargs) # Skip these tests if running below Splunk 6.2, cookie-auth didn't exist before - import splunklib.client as client + from splunklib import client service = client.Service(**self.opts.kwargs) # TODO: Workaround the fact that skipTest is not defined by unittest2.TestCase service.login() @@ -603,14 +603,14 @@ def test_login_with_multiple_cookies(self): except AuthenticationError as ae: self.assertEqual(str(ae), "Login failed.") # Bring in a valid cookie now - for key, value in self.context.get_cookies().items(): + for key, value in list(self.context.get_cookies().items()): new_context.get_cookies()[key] = value self.assertEqual(len(new_context.get_cookies()), 2) self.assertTrue('bad' in list(new_context.get_cookies().keys())) self.assertTrue('cookie' in list(new_context.get_cookies().values())) - for k, v in self.context.get_cookies().items(): + for k, v in list(self.context.get_cookies().items()): self.assertEqual(new_context.get_cookies()[k], v) self.assertEqual(new_context.get("apps/local").status, 200) @@ -631,71 +631,71 @@ def test_login_fails_without_cookie_or_token(self): class TestNamespace(unittest.TestCase): def test_namespace(self): tests = [ - ({ }, - { 'sharing': None, 'owner': None, 'app': None }), + ({}, + {'sharing': None, 'owner': None, 'app': None}), - ({ 'owner': "Bob" }, - { 'sharing': None, 'owner': "Bob", 'app': None }), + ({'owner': "Bob"}, + {'sharing': None, 'owner': "Bob", 'app': None}), - ({ 'app': "search" }, - { 'sharing': None, 'owner': None, 'app': "search" }), + ({'app': "search"}, + {'sharing': None, 'owner': None, 'app': "search"}), - ({ 'owner': "Bob", 'app': "search" }, - { 'sharing': None, 'owner': "Bob", 'app': "search" }), + ({'owner': "Bob", 'app': "search"}, + {'sharing': None, 'owner': "Bob", 'app': "search"}), - ({ 'sharing': "user", 'owner': "Bob@bob.com" }, - { 'sharing': "user", 'owner': "Bob@bob.com", 'app': None }), + ({'sharing': "user", 'owner': "Bob@bob.com"}, + {'sharing': "user", 'owner': "Bob@bob.com", 'app': None}), - ({ 'sharing': "user" }, - { 'sharing': "user", 'owner': None, 'app': None }), + ({'sharing': "user"}, + {'sharing': "user", 'owner': None, 'app': None}), - ({ 'sharing': "user", 'owner': "Bob" }, - { 'sharing': "user", 'owner': "Bob", 'app': None }), + ({'sharing': "user", 'owner': "Bob"}, + {'sharing': "user", 'owner': "Bob", 'app': None}), - ({ 'sharing': "user", 'app': "search" }, - { 'sharing': "user", 'owner': None, 'app': "search" }), + ({'sharing': "user", 'app': "search"}, + {'sharing': "user", 'owner': None, 'app': "search"}), - ({ 'sharing': "user", 'owner': "Bob", 'app': "search" }, - { 'sharing': "user", 'owner': "Bob", 'app': "search" }), + ({'sharing': "user", 'owner': "Bob", 'app': "search"}, + {'sharing': "user", 'owner': "Bob", 'app': "search"}), - ({ 'sharing': "app" }, - { 'sharing': "app", 'owner': "nobody", 'app': None }), + ({'sharing': "app"}, + {'sharing': "app", 'owner': "nobody", 'app': None}), - ({ 'sharing': "app", 'owner': "Bob" }, - { 'sharing': "app", 'owner': "nobody", 'app': None }), + ({'sharing': "app", 'owner': "Bob"}, + {'sharing': "app", 'owner': "nobody", 'app': None}), - ({ 'sharing': "app", 'app': "search" }, - { 'sharing': "app", 'owner': "nobody", 'app': "search" }), + ({'sharing': "app", 'app': "search"}, + {'sharing': "app", 'owner': "nobody", 'app': "search"}), - ({ 'sharing': "app", 'owner': "Bob", 'app': "search" }, - { 'sharing': "app", 'owner': "nobody", 'app': "search" }), + ({'sharing': "app", 'owner': "Bob", 'app': "search"}, + {'sharing': "app", 'owner': "nobody", 'app': "search"}), - ({ 'sharing': "global" }, - { 'sharing': "global", 'owner': "nobody", 'app': None }), + ({'sharing': "global"}, + {'sharing': "global", 'owner': "nobody", 'app': None}), - ({ 'sharing': "global", 'owner': "Bob" }, - { 'sharing': "global", 'owner': "nobody", 'app': None }), + ({'sharing': "global", 'owner': "Bob"}, + {'sharing': "global", 'owner': "nobody", 'app': None}), - ({ 'sharing': "global", 'app': "search" }, - { 'sharing': "global", 'owner': "nobody", 'app': "search" }), + ({'sharing': "global", 'app': "search"}, + {'sharing': "global", 'owner': "nobody", 'app': "search"}), - ({ 'sharing': "global", 'owner': "Bob", 'app': "search" }, - { 'sharing': "global", 'owner': "nobody", 'app': "search" }), + ({'sharing': "global", 'owner': "Bob", 'app': "search"}, + {'sharing': "global", 'owner': "nobody", 'app': "search"}), - ({ 'sharing': "system" }, - { 'sharing': "system", 'owner': "nobody", 'app': "system" }), + ({'sharing': "system"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), - ({ 'sharing': "system", 'owner': "Bob" }, - { 'sharing': "system", 'owner': "nobody", 'app': "system" }), + ({'sharing': "system", 'owner': "Bob"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), - ({ 'sharing': "system", 'app': "search" }, - { 'sharing': "system", 'owner': "nobody", 'app': "system" }), + ({'sharing': "system", 'app': "search"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), - ({ 'sharing': "system", 'owner': "Bob", 'app': "search" }, - { 'sharing': "system", 'owner': "nobody", 'app': "system" }), + ({'sharing': "system", 'owner': "Bob", 'app': "search"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), - ({ 'sharing': 'user', 'owner': '-', 'app': '-'}, - { 'sharing': 'user', 'owner': '-', 'app': '-'})] + ({'sharing': 'user', 'owner': '-', 'app': '-'}, + {'sharing': 'user', 'owner': '-', 'app': '-'})] for kwargs, expected in tests: namespace = binding.namespace(**kwargs) @@ -705,6 +705,7 @@ def test_namespace(self): def test_namespace_fails(self): self.assertRaises(ValueError, binding.namespace, sharing="gobble") + @pytest.mark.smoke class TestBasicAuthentication(unittest.TestCase): def setUp(self): @@ -715,13 +716,13 @@ def setUp(self): opts["password"] = self.opts.kwargs["password"] self.context = binding.connect(**opts) - import splunklib.client as client + from splunklib import client service = client.Service(**opts) if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: def assertIsNotNone(self, obj, msg=None): - if obj is None: - raise self.failureException(msg or '%r is not None' % obj) + if obj is None: + raise self.failureException(msg or '%r is not None' % obj) def test_basic_in_auth_headers(self): self.assertIsNotNone(self.context._auth_headers) @@ -732,6 +733,7 @@ def test_basic_in_auth_headers(self): self.assertEqual(self.context._auth_headers[0][1][:6], "Basic ") self.assertEqual(self.context.get("/services").status, 200) + @pytest.mark.smoke class TestTokenAuthentication(BindingTestCase): def test_preexisting_token(self): @@ -747,12 +749,12 @@ def test_preexisting_token(self): socket = newContext.connect() socket.write(("POST %s HTTP/1.1\r\n" % \ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) + self.context._abspath("some/path/to/post/to")).encode('utf-8')) socket.write(("Host: %s:%s\r\n" % \ - (self.context.host, self.context.port)).encode('utf-8')) + (self.context.host, self.context.port)).encode('utf-8')) socket.write(("Accept-Encoding: identity\r\n").encode('utf-8')) socket.write(("Authorization: %s\r\n" % \ - self.context.token).encode('utf-8')) + self.context.token).encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) socket.write(("\r\n").encode('utf-8')) socket.close() @@ -774,18 +776,17 @@ def test_preexisting_token_sans_splunk(self): self.assertEqual(response.status, 200) socket = newContext.connect() - socket.write(("POST %s HTTP/1.1\r\n" %\ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) - socket.write(("Host: %s:%s\r\n" %\ - (self.context.host, self.context.port)).encode('utf-8')) + socket.write(("POST %s HTTP/1.1\r\n" % \ + self.context._abspath("some/path/to/post/to")).encode('utf-8')) + socket.write(("Host: %s:%s\r\n" % \ + (self.context.host, self.context.port)).encode('utf-8')) socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write(("Authorization: %s\r\n" %\ - self.context.token).encode('utf-8')) + socket.write(("Authorization: %s\r\n" % \ + self.context.token).encode('utf-8')) socket.write(("X-Splunk-Input-Mode: Streaming\r\n").encode('utf-8')) socket.write(("\r\n").encode('utf-8')) socket.close() - def test_connect_with_preexisting_token_sans_user_and_pass(self): token = self.context.token opts = self.opts.kwargs.copy() @@ -799,12 +800,12 @@ def test_connect_with_preexisting_token_sans_user_and_pass(self): socket = newContext.connect() socket.write(("POST %s HTTP/1.1\r\n" % \ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) + self.context._abspath("some/path/to/post/to")).encode('utf-8')) socket.write(("Host: %s:%s\r\n" % \ - (self.context.host, self.context.port)).encode('utf-8')) + (self.context.host, self.context.port)).encode('utf-8')) socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) socket.write(("Authorization: %s\r\n" % \ - self.context.token).encode('utf-8')) + self.context.token).encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) socket.write("\r\n".encode('utf-8')) socket.close() @@ -820,6 +821,7 @@ def handler(url, message, **kwargs): "status": 200, "headers": [], }) + ctx = binding.Context(handler=handler) ctx.post("foo/bar", owner="testowner", app="testapp", body={"testkey": "testvalue"}) @@ -831,6 +833,7 @@ def handler(url, message, **kwargs): "status": 200, "headers": [], }) + ctx = binding.Context(handler=handler) ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp", body={"testkey": "testvalue"}) @@ -842,6 +845,7 @@ def handler(url, message, **kwargs): "status": 200, "headers": [], }) + ctx = binding.Context(handler=handler) ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp") @@ -853,12 +857,13 @@ def wrapped(handler_self): handler_self.send_response(response_code) handler_self.end_headers() handler_self.wfile.write(body) + return wrapped -class MockServer(object): +class MockServer: def __init__(self, port=9093, **handlers): - methods = {"do_" + k: _wrap_handler(v) for (k, v) in handlers.items()} + methods = {"do_" + k: _wrap_handler(v) for (k, v) in list(handlers.items())} def init(handler_self, socket, address, server): BaseHTTPServer.BaseHTTPRequestHandler.__init__(handler_self, socket, address, server) @@ -875,6 +880,7 @@ def log(*args): # To silence server access logs def run(): self._svr.handle_request() + self._thread = Thread(target=run) self._thread.daemon = True @@ -907,7 +913,8 @@ def check_response(handler): assert json.loads(body)["baz"] == "baf" with MockServer(POST=check_response): - ctx = binding.connect(port=9093, scheme='http', token="waffle", headers=[("Content-Type", "application/json")]) + ctx = binding.connect(port=9093, scheme='http', token="waffle", + headers=[("Content-Type", "application/json")]) ctx.post("/", foo="bar", body='{"baz": "baf"}') def test_post_with_body_dict(self): @@ -923,8 +930,4 @@ def check_response(handler): if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest unittest.main() diff --git a/tests/test_collection.py b/tests/test_collection.py index 0fd9a1c3..8d99b05d 100755 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -14,13 +14,12 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib import logging from contextlib import contextmanager -import splunklib.client as client +from splunklib import client from splunklib.six.moves import range collections = [ @@ -41,9 +40,9 @@ class CollectionTestCase(testlib.SDKTestCase): def setUp(self): - super(CollectionTestCase, self).setUp() + super().setUp() if self.service.splunk_version[0] >= 5 and 'modular_input_kinds' not in collections: - collections.append('modular_input_kinds') # Not supported before Splunk 5.0 + collections.append('modular_input_kinds') # Not supported before Splunk 5.0 else: logging.info("Skipping modular_input_kinds; not supported by Splunk %s" % \ '.'.join(str(x) for x in self.service.splunk_version)) @@ -69,59 +68,51 @@ def test_metadata(self): found_fields_keys = set(metadata.fields.keys()) self.assertTrue(found_access_keys >= expected_access_keys, msg='metadata.access is missing keys on ' + \ - '%s (found: %s, expected: %s)' % \ - (coll, found_access_keys, - expected_access_keys)) + f'{coll} (found: {found_access_keys}, expected: {expected_access_keys})') self.assertTrue(found_fields_keys >= expected_fields_keys, msg='metadata.fields is missing keys on ' + \ - '%s (found: %s, expected: %s)' % \ - (coll, found_fields_keys, - expected_fields_keys)) + f'{coll} (found: {found_fields_keys}, expected: {expected_fields_keys})') def test_list(self): for coll_name in collections: coll = getattr(self.service, coll_name) expected = [ent.name for ent in coll.list(count=10, sort_mode="auto")] if len(expected) == 0: - logging.debug("No entities in collection %s; skipping test.", coll_name) + logging.debug(f"No entities in collection {coll_name}; skipping test.", coll_name) found = [ent.name for ent in coll.list()][:10] self.assertEqual(expected, found, - msg='on %s (expected: %s, found: %s)' % \ - (coll_name, expected, found)) + msg=f'on {coll_name} (expected {expected}, found {found})') def test_list_with_count(self): N = 5 for coll_name in collections: coll = getattr(self.service, coll_name) - expected = [ent.name for ent in coll.list(count=N+5)][:N] - N = len(expected) # in case there are v2") self.assertEqual(result, - {'e1': {'a1': 'v1', 'e2': {'$text': 'v2', 'a1': 'v1'}}}) + {'e1': {'a1': 'v1', 'e2': {'$text': 'v2', 'a1': 'v1'}}}) def test_real(self): """Test some real Splunk response examples.""" @@ -120,12 +119,8 @@ def test_invalid(self): if sys.version_info[1] >= 7: self.assertRaises(et.ParseError, data.load, "") else: - if six.PY2: - from xml.parsers.expat import ExpatError - self.assertRaises(ExpatError, data.load, "") - else: - from xml.etree.ElementTree import ParseError - self.assertRaises(ParseError, data.load, "") + from xml.etree.ElementTree import ParseError + self.assertRaises(ParseError, data.load, "") self.assertRaises(KeyError, data.load, "a") @@ -166,8 +161,8 @@ def test_dict(self): """) - self.assertEqual(result, - {'content': {'n1': {'n1n1': "n1v1"}, 'n2': {'n2n1': "n2v1"}}}) + self.assertEqual(result, + {'content': {'n1': {'n1n1': "n1v1"}, 'n2': {'n2n1': "n2v1"}}}) result = data.load(""" @@ -179,8 +174,8 @@ def test_dict(self): """) - self.assertEqual(result, - {'content': {'n1': ['1', '2', '3', '4']}}) + self.assertEqual(result, + {'content': {'n1': ['1', '2', '3', '4']}}) def test_list(self): result = data.load("""""") @@ -222,8 +217,8 @@ def test_list(self): v4 """) - self.assertEqual(result, - {'content': [{'n1':"v1"}, {'n2':"v2"}, {'n3':"v3"}, {'n4':"v4"}]}) + self.assertEqual(result, + {'content': [{'n1': "v1"}, {'n2': "v2"}, {'n3': "v3"}, {'n4': "v4"}]}) result = data.load(""" @@ -233,7 +228,7 @@ def test_list(self): """) self.assertEqual(result, - {'build': '101089', 'cpu_arch': 'i386', 'isFree': '0'}) + {'build': '101089', 'cpu_arch': 'i386', 'isFree': '0'}) def test_record(self): d = data.record() @@ -244,17 +239,14 @@ def test_record(self): 'bar.zrp.peem': 9}) self.assertEqual(d['foo'], 5) self.assertEqual(d['bar.baz'], 6) - self.assertEqual(d['bar'], {'baz': 6, 'qux': 7, 'zrp': {'meep': 8, 'peem':9}}) + self.assertEqual(d['bar'], {'baz': 6, 'qux': 7, 'zrp': {'meep': 8, 'peem': 9}}) self.assertEqual(d.foo, 5) self.assertEqual(d.bar.baz, 6) - self.assertEqual(d.bar, {'baz': 6, 'qux': 7, 'zrp': {'meep': 8, 'peem':9}}) + self.assertEqual(d.bar, {'baz': 6, 'qux': 7, 'zrp': {'meep': 8, 'peem': 9}}) self.assertRaises(KeyError, d.__getitem__, 'boris') if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest - unittest.main() + import unittest + unittest.main() diff --git a/tests/test_event_type.py b/tests/test_event_type.py index 5ae2c7ec..9e495977 100755 --- a/tests/test_event_type.py +++ b/tests/test_event_type.py @@ -14,17 +14,15 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib -import logging -import splunklib.client as client class TestRead(testlib.SDKTestCase): def test_read(self): for event_type in self.service.event_types.list(count=1): self.check_entity(event_type) + class TestCreate(testlib.SDKTestCase): def test_create(self): self.event_type_name = testlib.tmpname() @@ -42,22 +40,23 @@ def test_create(self): self.assertEqual(self.event_type_name, event_type.name) def tearDown(self): - super(TestCreate, self).setUp() + super().setUp() try: self.service.event_types.delete(self.event_type_name) except KeyError: pass + class TestEventType(testlib.SDKTestCase): def setUp(self): - super(TestEventType, self).setUp() + super().setUp() self.event_type_name = testlib.tmpname() self.event_type = self.service.event_types.create( self.event_type_name, search="index=_internal *") def tearDown(self): - super(TestEventType, self).setUp() + super().setUp() try: self.service.event_types.delete(self.event_type_name) except KeyError: @@ -69,16 +68,13 @@ def test_delete(self): self.assertFalse(self.event_type_name in self.service.event_types) def test_update(self): - kwargs = {} - kwargs['search'] = "index=_audit *" - kwargs['description'] = "An audit event" - kwargs['priority'] = '3' + kwargs = {'search': "index=_audit *", 'description': "An audit event", 'priority': '3'} self.event_type.update(**kwargs) self.event_type.refresh() self.assertEqual(self.event_type['search'], kwargs['search']) self.assertEqual(self.event_type['description'], kwargs['description']) self.assertEqual(self.event_type['priority'], kwargs['priority']) - + def test_enable_disable(self): self.assertEqual(self.event_type['disabled'], '0') self.event_type.disable() @@ -88,9 +84,8 @@ def test_enable_disable(self): self.event_type.refresh() self.assertEqual(self.event_type['disabled'], '0') + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_examples.py b/tests/test_examples.py index e2057ffb..304b6bad 100755 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -14,40 +14,33 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import import os from subprocess import PIPE, Popen -import time -import sys - -import io -try: - import unittest -except ImportError: - import unittest2 as unittest +import sys import pytest from tests import testlib -import splunklib.client as client +from splunklib import client from splunklib import six DIR_PATH = os.path.dirname(os.path.realpath(__file__)) EXAMPLES_PATH = os.path.join(DIR_PATH, '..', 'examples') + def check_multiline(testcase, first, second, message=None): """Assert that two multi-line strings are equal.""" testcase.assertTrue(isinstance(first, six.string_types), - 'First argument is not a string') + 'First argument is not a string') testcase.assertTrue(isinstance(second, six.string_types), - 'Second argument is not a string') + 'Second argument is not a string') # Unix-ize Windows EOL first = first.replace("\r", "") second = second.replace("\r", "") if first != second: - testcase.fail("Multiline strings are not equal: %s" % message) + testcase.fail(f"Multiline strings are not equal: {message}") # Run the given python script and return its exit code. @@ -72,11 +65,11 @@ class ExamplesTestCase(testlib.SDKTestCase): def check_commands(self, *args): for arg in args: result = run(arg) - self.assertEqual(result, 0, '"{0}" run failed with result code {1}'.format(arg, result)) + self.assertEqual(result, 0, f'"{arg}" run failed with result code {result}') self.service.login() # Because a Splunk restart invalidates our session def setUp(self): - super(ExamplesTestCase, self).setUp() + super().setUp() # Ignore result, it might already exist run("index.py create sdk-tests") @@ -104,7 +97,7 @@ def test_conf(self): conf = self.service.confs['server'] if 'SDK-STANZA' in conf: conf.delete("SDK-STANZA") - except Exception as e: + except Exception: pass try: @@ -154,7 +147,7 @@ def test_handlers(self): # Assumes that tiny-proxy.py is in the same directory as the sample - #This test seems to be flaky + # This test seems to be flaky # if six.PY2: # Needs to be fixed PY3 # process = start("handlers/tiny-proxy.py -p 8080", stderr=PIPE) # try: @@ -178,7 +171,6 @@ def test_index(self): "index.py disable sdk-tests", "index.py enable sdk-tests", "index.py clean sdk-tests") - return def test_info(self): self.check_commands( @@ -221,10 +213,11 @@ def test_saved_search(self): "saved_search/saved_search.py", ["saved_search/saved_search.py", "--help"], ["saved_search/saved_search.py", "list-all"], - ["saved_search/saved_search.py", "--operation", "create", "--name", temp_name, "--search", "search * | head 5"], + ["saved_search/saved_search.py", "--operation", "create", "--name", temp_name, "--search", + "search * | head 5"], ["saved_search/saved_search.py", "list", "--name", temp_name], ["saved_search/saved_search.py", "list", "--operation", "delete", "--name", temp_name], - ["saved_search/saved_search.py", "list", "--name", "Errors in the last 24 hours"] + ["saved_search/saved_search.py", "list", "--name", "Errors in the last 24 hours"] ) def test_search(self): @@ -257,7 +250,7 @@ def test_upload(self): file_to_upload = os.path.expandvars(os.environ.get("INPUT_EXAMPLE_UPLOAD", "./upload.py")) self.check_commands( "upload.py --help", - "upload.py --index=sdk-tests %s" % file_to_upload) + f"upload.py --index=sdk-tests {file_to_upload}") # The following tests are for the Analytics example def test_analytics(self): @@ -268,7 +261,7 @@ def test_analytics(self): # Create a tracker tracker = analytics.input.AnalyticsTracker( - "sdk-test", self.opts.kwargs, index = "sdk-test") + "sdk-test", self.opts.kwargs, index="sdk-test") service = client.connect(**self.opts.kwargs) @@ -284,7 +277,7 @@ def test_analytics(self): # Now, we create a retriever to retrieve the events retriever = analytics.output.AnalyticsRetriever( - "sdk-test", self.opts.kwargs, index = "sdk-test") + "sdk-test", self.opts.kwargs, index="sdk-test") # Assert applications applications = retriever.applications() @@ -308,7 +301,7 @@ def test_analytics(self): for prop in properties: name = prop["name"] count = prop["count"] - self.assertTrue(name in list(expected_properties.keys())) + self.assertTrue(name in list(expected_properties)) self.assertEqual(count, expected_properties[name]) # Assert property values @@ -321,12 +314,12 @@ def test_analytics(self): for value in values: name = value["name"] count = value["count"] - self.assertTrue(name in list(expected_property_values.keys())) + self.assertTrue(name in list(expected_property_values)) self.assertEqual(count, expected_property_values[name]) # Assert event over time over_time = retriever.events_over_time( - time_range = analytics.output.TimeRange.MONTH) + time_range=analytics.output.TimeRange.MONTH) self.assertEqual(len(over_time), 1) self.assertEqual(len(over_time["test_event"]), 1) self.assertEqual(over_time["test_event"][0]["count"], 2) @@ -334,10 +327,8 @@ def test_analytics(self): # Now that we're done, we'll clean the index index.clean() + if __name__ == "__main__": + import unittest os.chdir("../examples") - try: - import unittest2 as unittest - except ImportError: - import unittest unittest.main() diff --git a/tests/test_fired_alert.py b/tests/test_fired_alert.py index 2480d415..fb185dbe 100755 --- a/tests/test_fired_alert.py +++ b/tests/test_fired_alert.py @@ -14,22 +14,19 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib -import logging -import splunklib.client as client class FiredAlertTestCase(testlib.SDKTestCase): def setUp(self): - super(FiredAlertTestCase, self).setUp() + super().setUp() self.index_name = testlib.tmpname() self.assertFalse(self.index_name in self.service.indexes) self.index = self.service.indexes.create(self.index_name) saved_searches = self.service.saved_searches self.saved_search_name = testlib.tmpname() self.assertFalse(self.saved_search_name in saved_searches) - query = "search index=%s" % self.index_name + query = f"search index={self.index_name}" kwargs = {'alert_type': 'always', 'alert.severity': "3", 'alert.suppress': "0", @@ -43,7 +40,7 @@ def setUp(self): query, **kwargs) def tearDown(self): - super(FiredAlertTestCase, self).tearDown() + super().tearDown() if self.service.splunk_version >= (5,): self.service.indexes.delete(self.index_name) for saved_search in self.service.saved_searches: @@ -57,7 +54,7 @@ def test_new_search_is_empty(self): self.assertEqual(len(self.saved_search.history()), 0) self.assertEqual(len(self.saved_search.fired_alerts), 0) self.assertFalse(self.saved_search_name in self.service.fired_alerts) - + def test_alerts_on_events(self): self.assertEqual(self.saved_search.alert_count, 0) self.assertEqual(len(self.saved_search.fired_alerts), 0) @@ -71,14 +68,17 @@ def test_alerts_on_events(self): self.index.refresh() self.index.submit('This is a test ' + testlib.tmpname(), sourcetype='sdk_use', host='boris') + def f(): self.index.refresh() - return int(self.index['totalEventCount']) == eventCount+1 + return int(self.index['totalEventCount']) == eventCount + 1 + self.assertEventuallyTrue(f, timeout=50) def g(): self.saved_search.refresh() return self.saved_search.alert_count == 1 + self.assertEventuallyTrue(g, timeout=200) alerts = self.saved_search.fired_alerts @@ -90,9 +90,8 @@ def test_read(self): for alert in alert_group.alerts: alert.content + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_index.py b/tests/test_index.py index 9e2a5329..fb876496 100755 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -14,30 +14,23 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -from __future__ import print_function -from tests import testlib import logging -import os import time -import splunklib.client as client -try: - import unittest -except ImportError: - import unittest2 as unittest - import pytest +from tests import testlib +from splunklib import client + class IndexTest(testlib.SDKTestCase): def setUp(self): - super(IndexTest, self).setUp() + super().setUp() self.index_name = testlib.tmpname() self.index = self.service.indexes.create(self.index_name) self.assertEventuallyTrue(lambda: self.index.refresh()['disabled'] == '0') def tearDown(self): - super(IndexTest, self).tearDown() + super().tearDown() # We can't delete an index with the REST API before Splunk # 5.0. In 4.x, we just have to leave them lying around until # someone cares to go clean them up. Unique naming prevents @@ -92,14 +85,14 @@ def test_disable_enable(self): # self.assertEqual(self.index['totalEventCount'], '0') def test_prefresh(self): - self.assertEqual(self.index['disabled'], '0') # Index is prefreshed + self.assertEqual(self.index['disabled'], '0') # Index is prefreshed def test_submit(self): event_count = int(self.index['totalEventCount']) self.assertEqual(self.index['sync'], '0') self.assertEqual(self.index['disabled'], '0') self.index.submit("Hello again!", sourcetype="Boris", host="meep") - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+1, timeout=50) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 1, timeout=50) def test_submit_namespaced(self): s = client.connect(**{ @@ -114,14 +107,14 @@ def test_submit_namespaced(self): self.assertEqual(i['sync'], '0') self.assertEqual(i['disabled'], '0') i.submit("Hello again namespaced!", sourcetype="Boris", host="meep") - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+1, timeout=50) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 1, timeout=50) def test_submit_via_attach(self): event_count = int(self.index['totalEventCount']) cn = self.index.attach() cn.send(b"Hello Boris!\r\n") cn.close() - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+1, timeout=60) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 1, timeout=60) def test_submit_via_attach_using_token_header(self): # Remove the prefix from the token @@ -133,14 +126,14 @@ def test_submit_via_attach_using_token_header(self): cn = i.attach() cn.send(b"Hello Boris 5!\r\n") cn.close() - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+1, timeout=60) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 1, timeout=60) def test_submit_via_attached_socket(self): event_count = int(self.index['totalEventCount']) f = self.index.attached_socket with f() as sock: sock.send(b'Hello world!\r\n') - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+1, timeout=60) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 1, timeout=60) def test_submit_via_attach_with_cookie_header(self): # Skip this test if running below Splunk 6.2, cookie-auth didn't exist before @@ -156,7 +149,7 @@ def test_submit_via_attach_with_cookie_header(self): cn = service.indexes[self.index_name].attach() cn.send(b"Hello Boris!\r\n") cn.close() - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+1, timeout=60) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 1, timeout=60) def test_submit_via_attach_with_multiple_cookie_headers(self): # Skip this test if running below Splunk 6.2, cookie-auth didn't exist before @@ -171,7 +164,7 @@ def test_submit_via_attach_with_multiple_cookie_headers(self): cn = service.indexes[self.index_name].attach() cn.send(b"Hello Boris!\r\n") cn.close() - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+1, timeout=60) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 1, timeout=60) @pytest.mark.app def test_upload(self): @@ -181,11 +174,10 @@ def test_upload(self): path = self.pathInApp("file_to_upload", ["log.txt"]) self.index.upload(path) - self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count+4, timeout=60) + self.assertEventuallyTrue(lambda: self.totalEventCount() == event_count + 4, timeout=60) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_input.py b/tests/test_input.py index c7d48dc3..0fa23f33 100755 --- a/tests/test_input.py +++ b/tests/test_input.py @@ -13,22 +13,13 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -from __future__ import print_function - +import logging +import pytest from splunklib.binding import HTTPError from tests import testlib -import logging -from splunklib import six -try: - import unittest -except ImportError: - import unittest2 as unittest - -import splunklib.client as client +from splunklib import six, client -import pytest def highest_port(service, base_port, *kinds): @@ -42,7 +33,7 @@ def highest_port(service, base_port, *kinds): class TestTcpInputNameHandling(testlib.SDKTestCase): def setUp(self): - super(TestTcpInputNameHandling, self).setUp() + super().setUp() self.base_port = highest_port(self.service, 10000, 'tcp', 'splunktcp', 'udp') + 1 def tearDown(self): @@ -50,7 +41,7 @@ def tearDown(self): port = int(input.name.split(':')[-1]) if port >= self.base_port: input.delete() - super(TestTcpInputNameHandling, self).tearDown() + super().tearDown() def create_tcp_input(self, base_port, kind, **options): port = base_port @@ -149,7 +140,6 @@ def test_read_invalid_input(self): self.assertTrue("HTTP 404 Not Found" in str(he)) def test_inputs_list_on_one_kind_with_count(self): - N = 10 expected = [x.name for x in self.service.inputs.list('monitor')[:10]] found = [x.name for x in self.service.inputs.list('monitor', count=10)] self.assertEqual(expected, found) @@ -192,7 +182,7 @@ def test_oneshot_on_nonexistant_file(self): class TestInput(testlib.SDKTestCase): def setUp(self): - super(TestInput, self).setUp() + super().setUp() inputs = self.service.inputs unrestricted_port = str(highest_port(self.service, 10000, 'tcp', 'splunktcp', 'udp')+1) restricted_port = str(highest_port(self.service, int(unrestricted_port)+1, 'tcp', 'splunktcp')+1) @@ -209,7 +199,7 @@ def setUp(self): inputs.create(restricted_port, 'tcp', restrictToHost='boris') def tearDown(self): - super(TestInput, self).tearDown() + super().tearDown() for entity in six.itervalues(self._test_entities): try: self.service.inputs.delete( @@ -299,8 +289,5 @@ def test_delete(self): if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest unittest.main() diff --git a/tests/test_job.py b/tests/test_job.py index 19ec8900..3757192d 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -14,9 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -from __future__ import print_function - from io import BytesIO from time import sleep @@ -24,13 +21,10 @@ from tests import testlib -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest -import splunklib.client as client -import splunklib.results as results +from splunklib import client +from splunklib import results from splunklib.binding import _log_duration, HTTPError @@ -84,8 +78,8 @@ def test_export(self): self.assertTrue(len(nonmessages) <= 3) def test_export_docstring_sample(self): - import splunklib.client as client - import splunklib.results as results + from splunklib import client + from splunklib import results service = self.service # cheat rr = results.JSONResultsReader(service.jobs.export("search * | head 5", output_mode='json')) for result in rr: @@ -98,7 +92,7 @@ def test_export_docstring_sample(self): assert rr.is_preview == False def test_results_docstring_sample(self): - import splunklib.results as results + from splunklib import results service = self.service # cheat job = service.jobs.create("search * | head 5") while not job.is_done(): @@ -114,8 +108,8 @@ def test_results_docstring_sample(self): assert rr.is_preview == False def test_preview_docstring_sample(self): - import splunklib.client as client - import splunklib.results as results + from splunklib import client + from splunklib import results service = self.service # cheat job = service.jobs.create("search * | head 5") rr = results.JSONResultsReader(job.preview(output_mode='json')) @@ -132,8 +126,8 @@ def test_preview_docstring_sample(self): pass #print "Job is finished. Results are final." def test_oneshot_docstring_sample(self): - import splunklib.client as client - import splunklib.results as results + from splunklib import client + from splunklib import results service = self.service # cheat rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5", output_mode='json')) for result in rr: @@ -188,7 +182,6 @@ def check_job(self, job): 'statusBuckets', 'ttl'] for key in keys: self.assertTrue(key in job.content) - return def test_read_jobs(self): jobs = self.service.jobs @@ -212,11 +205,11 @@ def test_get_job(self): class TestJobWithDelayedDone(testlib.SDKTestCase): def setUp(self): - super(TestJobWithDelayedDone, self).setUp() + super().setUp() self.job = None def tearDown(self): - super(TestJobWithDelayedDone, self).tearDown() + super().tearDown() if self.job is not None: self.job.cancel() self.assertEventuallyTrue(lambda: self.job.sid not in self.service.jobs) @@ -243,7 +236,6 @@ def is_preview_enabled(): return self.job.content['isPreviewEnabled'] == '1' self.assertEventuallyTrue(is_preview_enabled) - return @pytest.mark.app def test_setpriority(self): @@ -279,12 +271,11 @@ def f(): return int(self.job.content['priority']) == new_priority self.assertEventuallyTrue(f, timeout=sleep_duration + 5) - return class TestJob(testlib.SDKTestCase): def setUp(self): - super(TestJob, self).setUp() + super().setUp() self.query = "search index=_internal | head 3" self.job = self.service.jobs.create( query=self.query, @@ -292,7 +283,7 @@ def setUp(self): latest_time="now") def tearDown(self): - super(TestJob, self).tearDown() + super().tearDown() self.job.cancel() @_log_duration diff --git a/tests/test_kvstore_batch.py b/tests/test_kvstore_batch.py index d32b665e..b32ee4d7 100755 --- a/tests/test_kvstore_batch.py +++ b/tests/test_kvstore_batch.py @@ -14,19 +14,16 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib from splunklib.six.moves import range -try: - import unittest -except ImportError: - import unittest2 as unittest -import splunklib.client as client + +from splunklib import client + class KVStoreBatchTestCase(testlib.SDKTestCase): def setUp(self): - super(KVStoreBatchTestCase, self).setUp() - #self.service.namespace['owner'] = 'nobody' + super().setUp() + # self.service.namespace['owner'] = 'nobody' self.service.namespace['app'] = 'search' confs = self.service.kvstore if ('test' in confs): @@ -69,15 +66,13 @@ def test_insert_find_update_data(self): self.assertEqual(testData[x][0]['data'], '#' + str(x + 1)) self.assertEqual(testData[x][0]['num'], x + 1) - def tearDown(self): confs = self.service.kvstore if ('test' in confs): confs['test'].delete() + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_kvstore_conf.py b/tests/test_kvstore_conf.py index a2453728..eba8996f 100755 --- a/tests/test_kvstore_conf.py +++ b/tests/test_kvstore_conf.py @@ -14,17 +14,12 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib -try: - import unittest -except ImportError: - import unittest2 as unittest -import splunklib.client as client +from splunklib import client class KVStoreConfTestCase(testlib.SDKTestCase): def setUp(self): - super(KVStoreConfTestCase, self).setUp() + super().setUp() #self.service.namespace['owner'] = 'nobody' self.service.namespace['app'] = 'search' self.confs = self.service.kvstore @@ -40,7 +35,7 @@ def test_create_delete_collection(self): self.confs.create('test') self.assertTrue('test' in self.confs) self.confs['test'].delete() - self.assertTrue(not 'test' in self.confs) + self.assertTrue('test' not in self.confs) def test_update_collection(self): self.confs.create('test') @@ -93,8 +88,5 @@ def tearDown(self): self.confs['test'].delete() if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest unittest.main() diff --git a/tests/test_kvstore_data.py b/tests/test_kvstore_data.py index 6ddeae68..7e7a147a 100755 --- a/tests/test_kvstore_data.py +++ b/tests/test_kvstore_data.py @@ -14,20 +14,17 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import import json from tests import testlib from splunklib.six.moves import range -try: - import unittest -except ImportError: - import unittest2 as unittest -import splunklib.client as client + +from splunklib import client + class KVStoreDataTestCase(testlib.SDKTestCase): def setUp(self): - super(KVStoreDataTestCase, self).setUp() - #self.service.namespace['owner'] = 'nobody' + super().setUp() + # self.service.namespace['owner'] = 'nobody' self.service.namespace['app'] = 'search' self.confs = self.service.kvstore if ('test' in self.confs): @@ -74,7 +71,6 @@ def test_query_data(self): data = self.col.query(limit=2, skip=9) self.assertEqual(len(data), 1) - def test_invalid_insert_update(self): self.assertRaises(client.HTTPError, lambda: self.col.insert('NOT VALID DATA')) id = self.col.insert(json.dumps({'foo': 'bar'}))['_key'] @@ -96,9 +92,8 @@ def tearDown(self): if ('test' in self.confs): self.confs['test'].delete() + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_logger.py b/tests/test_logger.py index 7e9f5c8e..0541d79a 100755 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -14,13 +14,13 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib -import splunklib.client as client +from splunklib import client LEVELS = ["INFO", "WARN", "ERROR", "DEBUG", "CRIT"] + class LoggerTestCase(testlib.SDKTestCase): def check_logger(self, logger): self.check_entity(logger) @@ -44,9 +44,8 @@ def test_crud(self): logger.refresh() self.assertEqual(self.service.loggers['AuditLogger']['level'], saved) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_message.py b/tests/test_message.py index cd76c783..0c94402e 100755 --- a/tests/test_message.py +++ b/tests/test_message.py @@ -14,10 +14,10 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib -import splunklib.client as client +from splunklib import client + class MessageTest(testlib.SDKTestCase): def setUp(self): @@ -31,6 +31,7 @@ def tearDown(self): testlib.SDKTestCase.tearDown(self) self.service.messages.delete(self.message_name) + class TestCreateDelete(testlib.SDKTestCase): def test_create_delete(self): message_name = testlib.tmpname() @@ -46,11 +47,10 @@ def test_create_delete(self): def test_invalid_name(self): self.assertRaises(client.InvalidNameException, self.service.messages.create, None, value="What?") self.assertRaises(client.InvalidNameException, self.service.messages.create, 42, value="Who, me?") - self.assertRaises(client.InvalidNameException, self.service.messages.create, [1,2,3], value="Who, me?") + self.assertRaises(client.InvalidNameException, self.service.messages.create, [1, 2, 3], value="Who, me?") + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_modular_input.py b/tests/test_modular_input.py index ae6e797d..688b26b6 100755 --- a/tests/test_modular_input.py +++ b/tests/test_modular_input.py @@ -14,12 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -from __future__ import print_function -try: - import unittest2 as unittest -except ImportError: - import unittest from tests import testlib import pytest @@ -27,7 +21,7 @@ @pytest.mark.smoke class ModularInputKindTestCase(testlib.SDKTestCase): def setUp(self): - super(ModularInputKindTestCase, self).setUp() + super().setUp() self.uncheckedRestartSplunk() @pytest.mark.app @@ -38,7 +32,7 @@ def test_lists_modular_inputs(self): self.uncheckedRestartSplunk() inputs = self.service.inputs - if ('abcd','test2') not in inputs: + if ('abcd', 'test2') not in inputs: inputs.create('abcd', 'test2', field1='boris') input = inputs['abcd', 'test2'] @@ -55,5 +49,8 @@ def check_modular_input_kind(self, m): self.assertEqual('test2', m['title']) self.assertEqual('simple', m['streaming_mode']) + if __name__ == "__main__": + import unittest + unittest.main() diff --git a/tests/test_modular_input_kinds.py b/tests/test_modular_input_kinds.py index c6b7391e..30380475 100755 --- a/tests/test_modular_input_kinds.py +++ b/tests/test_modular_input_kinds.py @@ -14,20 +14,16 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -from __future__ import print_function from tests import testlib -try: - import unittest -except ImportError: - import unittest2 as unittest -import splunklib.client as client + +from splunklib import client import pytest + class ModularInputKindTestCase(testlib.SDKTestCase): def setUp(self): - super(ModularInputKindTestCase, self).setUp() + super().setUp() self.uncheckedRestartSplunk() @pytest.mark.app @@ -40,9 +36,9 @@ def test_list_arguments(self): test1 = self.service.modular_input_kinds['test1'] - expected_args = set(["name", "resname", "key_id", "no_description", "empty_description", - "arg_required_on_edit", "not_required_on_edit", "required_on_create", - "not_required_on_create", "number_field", "string_field", "boolean_field"]) + expected_args = {"name", "resname", "key_id", "no_description", "empty_description", "arg_required_on_edit", + "not_required_on_edit", "required_on_create", "not_required_on_create", "number_field", + "string_field", "boolean_field"} found_args = set(test1.arguments.keys()) self.assertEqual(expected_args, found_args) @@ -77,9 +73,8 @@ def test_list_modular_inputs(self): for m in self.service.modular_input_kinds: self.check_modular_input_kind(m) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_results.py b/tests/test_results.py index 5fdca2b9..a55c037b 100755 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -14,14 +14,12 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import - from io import BytesIO from splunklib.six import StringIO from tests import testlib from time import sleep -import splunklib.results as results +from splunklib import results import io @@ -164,9 +162,8 @@ def assert_parsed_results_equals(self, xml_text, expected_results): actual_results = [x for x in results_reader] self.assertEqual(expected_results, actual_results) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_role.py b/tests/test_role.py index 16205d05..ca9f5009 100755 --- a/tests/test_role.py +++ b/tests/test_role.py @@ -14,20 +14,20 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib import logging -import splunklib.client as client +from splunklib import client + class RoleTestCase(testlib.SDKTestCase): def setUp(self): - super(RoleTestCase, self).setUp() + super().setUp() self.role_name = testlib.tmpname() self.role = self.service.roles.create(self.role_name) def tearDown(self): - super(RoleTestCase, self).tearDown() + super().tearDown() for role in self.service.roles: if role.name.startswith('delete-me'): self.service.roles.delete(role.name) @@ -91,7 +91,6 @@ def test_invalid_revoke(self): def test_revoke_capability_not_granted(self): self.role.revoke('change_own_password') - def test_update(self): kwargs = {} if 'user' in self.role['imported_roles']: @@ -105,9 +104,8 @@ def test_update(self): self.assertEqual(self.role['imported_roles'], kwargs['imported_roles']) self.assertEqual(int(self.role['srchJobsQuota']), kwargs['srchJobsQuota']) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_saved_search.py b/tests/test_saved_search.py index 28d6436d..1cbb664d 100755 --- a/tests/test_saved_search.py +++ b/tests/test_saved_search.py @@ -14,22 +14,22 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import import datetime from tests import testlib import logging from time import sleep -import splunklib.client as client +from splunklib import client from splunklib.six.moves import zip import pytest + @pytest.mark.smoke class TestSavedSearch(testlib.SDKTestCase): def setUp(self): - super(TestSavedSearch, self).setUp() + super().setUp() saved_searches = self.service.saved_searches logging.debug("Saved searches namespace: %s", saved_searches.service.namespace) self.saved_search_name = testlib.tmpname() @@ -37,7 +37,7 @@ def setUp(self): self.saved_search = saved_searches.create(self.saved_search_name, query) def tearDown(self): - super(TestSavedSearch, self).setUp() + super().setUp() for saved_search in self.service.saved_searches: if saved_search.name.startswith('delete-me'): try: @@ -91,7 +91,6 @@ def test_delete(self): self.assertRaises(client.HTTPError, self.saved_search.refresh) - def test_update(self): is_visible = testlib.to_bool(self.saved_search['is_visible']) self.saved_search.update(is_visible=not is_visible) @@ -148,7 +147,7 @@ def test_dispatch(self): def test_dispatch_with_options(self): try: - kwargs = { 'dispatch.buckets': 100 } + kwargs = {'dispatch.buckets': 100} job = self.saved_search.dispatch(**kwargs) while not job.is_ready(): sleep(0.1) @@ -165,7 +164,7 @@ def test_history(self): while not job.is_ready(): sleep(0.1) history = self.saved_search.history() - self.assertEqual(len(history), N+1) + self.assertEqual(len(history), N + 1) self.assertTrue(job.sid in [j.sid for j in history]) finally: job.cancel() @@ -178,13 +177,8 @@ def test_scheduled_times(self): for x in scheduled_times])) time_pairs = list(zip(scheduled_times[:-1], scheduled_times[1:])) for earlier, later in time_pairs: - diff = later-earlier - # diff is an instance of datetime.timedelta, which - # didn't get a total_seconds() method until Python 2.7. - # Since we support Python 2.6, we have to calculate the - # total seconds ourselves. - total_seconds = diff.days*24*60*60 + diff.seconds - self.assertEqual(total_seconds/60.0, 5) + diff = later - earlier + self.assertEqual(diff.total_seconds() / 60.0, 5) def test_no_equality(self): self.assertRaises(client.IncomparableException, @@ -193,7 +187,7 @@ def test_no_equality(self): def test_suppress(self): suppressed_time = self.saved_search['suppressed'] self.assertGreaterEqual(suppressed_time, 0) - new_suppressed_time = suppressed_time+100 + new_suppressed_time = suppressed_time + 100 self.saved_search.suppress(new_suppressed_time) self.assertLessEqual(self.saved_search['suppressed'], new_suppressed_time) @@ -202,9 +196,8 @@ def test_suppress(self): self.saved_search.unsuppress() self.assertEqual(self.saved_search['suppressed'], 0) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_service.py b/tests/test_service.py index 34afef2c..436438df 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -14,12 +14,11 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib import unittest -import splunklib.client as client -from splunklib.client import AuthenticationError +from splunklib import client +from splunklib.binding import AuthenticationError from splunklib.client import Service from splunklib.binding import HTTPError @@ -364,8 +363,5 @@ def test_proper_namespace_with_service_namespace(self): if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest unittest.main() diff --git a/tests/test_storage_passwords.py b/tests/test_storage_passwords.py index 4f2fee81..95ac037b 100644 --- a/tests/test_storage_passwords.py +++ b/tests/test_storage_passwords.py @@ -14,11 +14,10 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib import logging -import splunklib.client as client +from splunklib import client class Tests(testlib.SDKTestCase): @@ -153,11 +152,10 @@ def test_read(self): p = self.storage_passwords.create("changeme", username) self.assertEqual(start_count + 1, len(self.storage_passwords)) - for sp in self.storage_passwords: - self.assertTrue(p.name in self.storage_passwords) - # Name works with or without a trailing colon - self.assertTrue((":" + username + ":") in self.storage_passwords) - self.assertTrue((":" + username) in self.storage_passwords) + self.assertTrue(p.name in self.storage_passwords) + # Name works with or without a trailing colon + self.assertTrue((":" + username + ":") in self.storage_passwords) + self.assertTrue((":" + username) in self.storage_passwords) p.delete() self.assertEqual(start_count, len(self.storage_passwords)) @@ -233,9 +231,8 @@ def test_spaces_in_username(self): p.delete() self.assertEqual(start_count, len(self.storage_passwords)) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_user.py b/tests/test_user.py index b8a97f81..14013301 100755 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -14,20 +14,20 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from tests import testlib import logging -import splunklib.client as client +from splunklib import client + class UserTestCase(testlib.SDKTestCase): def check_user(self, user): self.check_entity(user) # Verify expected fields exist [user[f] for f in ['email', 'password', 'realname', 'roles']] - + def setUp(self): - super(UserTestCase, self).setUp() + super().setUp() self.username = testlib.tmpname() self.user = self.service.users.create( self.username, @@ -35,7 +35,7 @@ def setUp(self): roles=['power', 'user']) def tearDown(self): - super(UserTestCase, self).tearDown() + super().tearDown() for user in self.service.users: if user.name.startswith('delete-me'): self.service.users.delete(user.name) @@ -84,9 +84,8 @@ def test_delete_is_case_insensitive(self): self.assertFalse(self.username in users) self.assertFalse(self.username.upper() in users) + if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest + unittest.main() diff --git a/tests/test_utils.py b/tests/test_utils.py index 51080a29..4c01b3cc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import from tests import testlib try: @@ -18,7 +17,7 @@ class TestUtils(testlib.SDKTestCase): def setUp(self): - super(TestUtils, self).setUp() + super().setUp() # Test dslice when a dict is passed to change key names def test_dslice_dict_args(self): @@ -78,8 +77,5 @@ def test_dslice_all_args(self): if __name__ == "__main__": - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest unittest.main() diff --git a/tests/testlib.py b/tests/testlib.py index ae3246a2..ba9b48ce 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -15,8 +15,6 @@ # under the License. """Shared unit test utilities.""" -from __future__ import absolute_import -from __future__ import print_function import contextlib import sys @@ -26,14 +24,11 @@ sys.path.insert(0, '../') sys.path.insert(0, '../examples') -import splunklib.client as client +from splunklib import client from time import sleep from datetime import datetime, timedelta -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest try: from utils import parse @@ -63,10 +58,9 @@ class WaitTimedOutError(Exception): def to_bool(x): if x == '1': return True - elif x == '0': + if x == '0': return False - else: - raise ValueError("Not a boolean value: %s", x) + raise ValueError("Not a boolean value: %s", x) def tmpname(): @@ -269,6 +263,6 @@ def tearDown(self): except HTTPError as error: if not (os.name == 'nt' and error.status == 500): raise - print('Ignoring failure to delete {0} during tear down: {1}'.format(appName, error)) + print(f'Ignoring failure to delete {appName} during tear down: {error}') if self.service.restart_required: self.clear_restart_message() From 6429ae4c148586037233b4356d868dac812191bc Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 6 May 2022 12:57:29 +0530 Subject: [PATCH 03/60] refractoring --- tests/searchcommands/chunked_data_stream.py | 4 +- tests/searchcommands/test_decorators.py | 3 +- tests/searchcommands/test_internals_v2.py | 11 +- tests/searchcommands/test_search_command.py | 127 +++++++++++--------- 4 files changed, 82 insertions(+), 63 deletions(-) diff --git a/tests/searchcommands/chunked_data_stream.py b/tests/searchcommands/chunked_data_stream.py index 29a21a1b..9c128ffb 100644 --- a/tests/searchcommands/chunked_data_stream.py +++ b/tests/searchcommands/chunked_data_stream.py @@ -16,7 +16,7 @@ def __init__(self, version, meta, data): dialect=dialect) -class ChunkedDataStreamIter(collections.Iterator): +class ChunkedDataStreamIter(collections.abc.Iterator): def __init__(self, chunk_stream): self.chunk_stream = chunk_stream @@ -30,7 +30,7 @@ def __next__(self): raise StopIteration -class ChunkedDataStream(collections.Iterable): +class ChunkedDataStream(collections.abc.Iterable): def __iter__(self): return ChunkedDataStreamIter(self) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 2441959e..7efe4f19 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -310,7 +310,7 @@ def fix_up(cls, command_class): def test_new_configuration_setting(self): - class Test(): + class Test: generating = ConfigurationSetting() @ConfigurationSetting(name='required_fields') @@ -470,6 +470,7 @@ def test_option(self): self.assertEqual(observed, expected) +TestSearchCommand.__test__ = False if __name__ == "__main__": main() diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index 7a6d0e9e..a3c7e2b4 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -97,7 +97,7 @@ def random_list(population, *args): def random_unicode(): - return ''.join([six.chr(x) for x in random.sample(list(range(MAX_NARROW_UNICODE)), random.randint(0, max_length))]) + return ''.join([str(x) for x in random.sample(list(range(MAX_NARROW_UNICODE)), random.randint(0, max_length))]) # endregion @@ -324,7 +324,7 @@ def _load_chunks(self, ifile): _recordings_path = os.path.join(_package_path, 'recordings', 'scpv2', 'Splunk-6.3') -class TestRecorder(): +class TestRecorder: def __init__(self, test_case): @@ -410,12 +410,11 @@ def _record(*args, **kwargs): return _record -class Test(): +class Test: def __init__(self, fieldnames, data_generators): TestCase.__init__(self) - self._data_generators = list(chain((lambda: self._serial_number, time), data_generators)) self._fieldnames = list(chain(('_serial', '_time'), fieldnames)) self._recorder = TestRecorder(self) @@ -467,5 +466,9 @@ def _run(self): # test.record() # test.playback() +Test.__test__ = False +TestRecorder.__test__ = False + + if __name__ == "__main__": main() diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index b203cc76..22b9e423 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -36,9 +36,11 @@ import pytest + def build_command_input(getinfo_metadata, execute_metadata, execute_body): input = ('chunked 1.0,{},0\n{}'.format(len(six.ensure_binary(getinfo_metadata)), getinfo_metadata) + - 'chunked 1.0,{},{}\n{}{}'.format(len(six.ensure_binary(execute_metadata)), len(six.ensure_binary(execute_body)), execute_metadata, execute_body)) + 'chunked 1.0,{},{}\n{}{}'.format(len(six.ensure_binary(execute_metadata)), + len(six.ensure_binary(execute_body)), execute_metadata, execute_body)) ifile = BytesIO(six.ensure_binary(input)) @@ -46,9 +48,9 @@ def build_command_input(getinfo_metadata, execute_metadata, execute_body): return ifile + @Configuration() class TestCommand(SearchCommand): - required_option_1 = Option(require=True) required_option_2 = Option(require=True) @@ -104,6 +106,7 @@ def stream(self, records): yield {'_serial': serial_number, 'data': value} serial_number += 1 + @pytest.mark.smoke class TestSearchCommand(TestCase): def setUp(self): @@ -145,7 +148,8 @@ def test_process_scpv1(self): self.assertEqual(str(command.configuration), '') - expected = ("[('clear_required_fields', None, [1]), ('distributed', None, [2]), ('generates_timeorder', None, [1]), " + expected = ( + "[('clear_required_fields', None, [1]), ('distributed', None, [2]), ('generates_timeorder', None, [1]), " "('generating', None, [1, 2]), ('maxinputs', None, [2]), ('overrides_timeorder', None, [1]), " "('required_fields', None, [1, 2]), ('requires_preop', None, [1]), ('retainsevents', None, [1]), " "('run_in_preview', None, [2]), ('streaming', None, [1]), ('streaming_preop', None, [1, 2]), " @@ -160,7 +164,8 @@ def test_process_scpv1(self): except BaseException as error: self.fail('{0}: {1}: {2}\n'.format(type(error).__name__, error, result.getvalue().decode('UTF-8'))) - self.assertEqual('\r\n\r\n\r\n', result.getvalue().decode('UTF-8')) # No message header and no configuration settings + self.assertEqual('\r\n\r\n\r\n', + result.getvalue().decode('UTF-8')) # No message header and no configuration settings ifile = StringIO('\n') result = BytesIO() @@ -188,12 +193,14 @@ def test_process_scpv1(self): configuration.run_in_preview = True configuration.type = 'streaming' - expected = ('clear_required_fields="True", generates_timeorder="True", generating="True", overrides_timeorder="True", ' - 'required_fields="[\'foo\', \'bar\']", requires_preop="True", retainsevents="True", streaming="True", ' - 'streaming_preop="some streaming command"') + expected = ( + 'clear_required_fields="True", generates_timeorder="True", generating="True", overrides_timeorder="True", ' + 'required_fields="[\'foo\', \'bar\']", requires_preop="True", retainsevents="True", streaming="True", ' + 'streaming_preop="some streaming command"') self.assertEqual(str(command.configuration), expected) - expected = ("[('clear_required_fields', True, [1]), ('distributed', True, [2]), ('generates_timeorder', True, [1]), " + expected = ( + "[('clear_required_fields', True, [1]), ('distributed', True, [2]), ('generates_timeorder', True, [1]), " "('generating', True, [1, 2]), ('maxinputs', 50000, [2]), ('overrides_timeorder', True, [1]), " "('required_fields', ['foo', 'bar'], [1, 2]), ('requires_preop', True, [1]), " "('retainsevents', True, [1]), ('run_in_preview', True, [2]), ('streaming', True, [1]), " @@ -215,21 +222,20 @@ def test_process_scpv1(self): self.assertRaises(StopIteration, lambda: next(reader)) expected = { - 'clear_required_fields': '1', '__mv_clear_required_fields': '', - 'generating': '1', '__mv_generating': '', - 'generates_timeorder': '1', '__mv_generates_timeorder': '', - 'overrides_timeorder': '1', '__mv_overrides_timeorder': '', - 'requires_preop': '1', '__mv_requires_preop': '', - 'required_fields': 'foo,bar', '__mv_required_fields': '', - 'retainsevents': '1', '__mv_retainsevents': '', - 'streaming': '1', '__mv_streaming': '', + 'clear_required_fields': '1', '__mv_clear_required_fields': '', + 'generating': '1', '__mv_generating': '', + 'generates_timeorder': '1', '__mv_generates_timeorder': '', + 'overrides_timeorder': '1', '__mv_overrides_timeorder': '', + 'requires_preop': '1', '__mv_requires_preop': '', + 'required_fields': 'foo,bar', '__mv_required_fields': '', + 'retainsevents': '1', '__mv_retainsevents': '', + 'streaming': '1', '__mv_streaming': '', 'streaming_preop': 'some streaming command', '__mv_streaming_preop': '', } self.assertDictEqual(expected, observed) # No message header and no configuration settings for action in '__GETINFO__', '__EXECUTE__': - # TestCommand.process should produce an error record on parser errors argv = [ @@ -366,42 +372,43 @@ def test_process_scpv2(self): metadata = ( '{{' - '"action": "getinfo", "preview": false, "searchinfo": {{' - '"latest_time": "0",' - '"splunk_version": "20150522",' - '"username": "admin",' - '"app": "searchcommands_app",' - '"args": [' - '"logging_configuration={logging_configuration}",' - '"logging_level={logging_level}",' - '"record={record}",' - '"show_configuration={show_configuration}",' - '"required_option_1=value_1",' - '"required_option_2=value_2"' - '],' - '"search": "A%7C%20inputlookup%20tweets%20%7C%20countmatches%20fieldname%3Dword_count%20pattern%3D%22%5Cw%2B%22%20text%20record%3Dt%20%7C%20export%20add_timestamp%3Df%20add_offset%3Dt%20format%3Dcsv%20segmentation%3Draw",' - '"earliest_time": "0",' - '"session_key": "0JbG1fJEvXrL6iYZw9y7tmvd6nHjTKj7ggaE7a4Jv5R0UIbeYJ65kThn^3hiNeoqzMT_LOtLpVR3Y8TIJyr5bkHUElMijYZ8l14wU0L4n^Oa5QxepsZNUIIQCBm^",' - '"owner": "admin",' - '"sid": "1433261372.158",' - '"splunkd_uri": "https://127.0.0.1:8089",' - '"dispatch_dir": {dispatch_dir},' - '"raw_args": [' - '"logging_configuration={logging_configuration}",' - '"logging_level={logging_level}",' - '"record={record}",' - '"show_configuration={show_configuration}",' - '"required_option_1=value_1",' - '"required_option_2=value_2"' - '],' - '"maxresultrows": 10,' - '"command": "countmatches"' - '}}' + '"action": "getinfo", "preview": false, "searchinfo": {{' + '"latest_time": "0",' + '"splunk_version": "20150522",' + '"username": "admin",' + '"app": "searchcommands_app",' + '"args": [' + '"logging_configuration={logging_configuration}",' + '"logging_level={logging_level}",' + '"record={record}",' + '"show_configuration={show_configuration}",' + '"required_option_1=value_1",' + '"required_option_2=value_2"' + '],' + '"search": "A%7C%20inputlookup%20tweets%20%7C%20countmatches%20fieldname%3Dword_count%20pattern%3D%22%5Cw%2B%22%20text%20record%3Dt%20%7C%20export%20add_timestamp%3Df%20add_offset%3Dt%20format%3Dcsv%20segmentation%3Draw",' + '"earliest_time": "0",' + '"session_key": "0JbG1fJEvXrL6iYZw9y7tmvd6nHjTKj7ggaE7a4Jv5R0UIbeYJ65kThn^3hiNeoqzMT_LOtLpVR3Y8TIJyr5bkHUElMijYZ8l14wU0L4n^Oa5QxepsZNUIIQCBm^",' + '"owner": "admin",' + '"sid": "1433261372.158",' + '"splunkd_uri": "https://127.0.0.1:8089",' + '"dispatch_dir": {dispatch_dir},' + '"raw_args": [' + '"logging_configuration={logging_configuration}",' + '"logging_level={logging_level}",' + '"record={record}",' + '"show_configuration={show_configuration}",' + '"required_option_1=value_1",' + '"required_option_2=value_2"' + '],' + '"maxresultrows": 10,' + '"command": "countmatches"' + '}}' '}}') basedir = self._package_directory - default_logging_configuration = os.path.join(basedir, 'apps', 'app_with_logging_configuration', 'default', 'logging.conf') + default_logging_configuration = os.path.join(basedir, 'apps', 'app_with_logging_configuration', 'default', + 'logging.conf') dispatch_dir = os.path.join(basedir, 'recordings', 'scpv2', 'Splunk-6.3', 'countmatches.dispatch_dir') logging_configuration = os.path.join(basedir, 'apps', 'app_with_logging_configuration', 'logging.conf') logging_level = 'ERROR' @@ -480,14 +487,18 @@ def test_process_scpv2(self): self.assertEqual(command_metadata.preview, input_header['preview']) self.assertEqual(command_metadata.searchinfo.app, 'searchcommands_app') - self.assertEqual(command_metadata.searchinfo.args, ['logging_configuration=' + logging_configuration, 'logging_level=ERROR', 'record=false', 'show_configuration=true', 'required_option_1=value_1', 'required_option_2=value_2']) + self.assertEqual(command_metadata.searchinfo.args, + ['logging_configuration=' + logging_configuration, 'logging_level=ERROR', 'record=false', + 'show_configuration=true', 'required_option_1=value_1', 'required_option_2=value_2']) self.assertEqual(command_metadata.searchinfo.dispatch_dir, os.path.dirname(input_header['infoPath'])) self.assertEqual(command_metadata.searchinfo.earliest_time, 0.0) self.assertEqual(command_metadata.searchinfo.latest_time, 0.0) self.assertEqual(command_metadata.searchinfo.owner, 'admin') self.assertEqual(command_metadata.searchinfo.raw_args, command_metadata.searchinfo.args) - self.assertEqual(command_metadata.searchinfo.search, 'A| inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text record=t | export add_timestamp=f add_offset=t format=csv segmentation=raw') - self.assertEqual(command_metadata.searchinfo.session_key, '0JbG1fJEvXrL6iYZw9y7tmvd6nHjTKj7ggaE7a4Jv5R0UIbeYJ65kThn^3hiNeoqzMT_LOtLpVR3Y8TIJyr5bkHUElMijYZ8l14wU0L4n^Oa5QxepsZNUIIQCBm^') + self.assertEqual(command_metadata.searchinfo.search, + 'A| inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text record=t | export add_timestamp=f add_offset=t format=csv segmentation=raw') + self.assertEqual(command_metadata.searchinfo.session_key, + '0JbG1fJEvXrL6iYZw9y7tmvd6nHjTKj7ggaE7a4Jv5R0UIbeYJ65kThn^3hiNeoqzMT_LOtLpVR3Y8TIJyr5bkHUElMijYZ8l14wU0L4n^Oa5QxepsZNUIIQCBm^') self.assertEqual(command_metadata.searchinfo.sid, '1433261372.158') self.assertEqual(command_metadata.searchinfo.splunk_version, '20150522') self.assertEqual(command_metadata.searchinfo.splunkd_uri, 'https://127.0.0.1:8089') @@ -668,7 +679,8 @@ def test_process_scpv2(self): except BaseException as error: self.fail('{0}: {1}: {2}\n'.format(type(error).__name__, error, result.getvalue().decode('utf-8'))) else: - self.fail('Expected SystemExit, not a return from TestCommand.process: {}\n'.format(result.getvalue().decode('utf-8'))) + self.fail('Expected SystemExit, not a return from TestCommand.process: {}\n'.format( + result.getvalue().decode('utf-8'))) self.assertEqual(command.logging_configuration, logging_configuration) self.assertEqual(command.logging_level, logging_level) @@ -680,9 +692,9 @@ def test_process_scpv2(self): finished = r'\"finished\":true' inspector = \ - r'\"inspector\":\{\"messages\":\[\[\"ERROR\",\"Exception at \\\".+\\\", line \d+ : test ' \ - r'logging_configuration=\\\".+\\\" logging_level=\\\"WARNING\\\" record=\\\"f\\\" ' \ - r'required_option_1=\\\"value_1\\\" required_option_2=\\\"value_2\\\" show_configuration=\\\"f\\\"\"\]\]\}' + r'\"inspector\":\{\"messages\":\[\[\"ERROR\",\"Exception at \\\".+\\\", line \d+ : test ' \ + r'logging_configuration=\\\".+\\\" logging_level=\\\"WARNING\\\" record=\\\"f\\\" ' \ + r'required_option_1=\\\"value_1\\\" required_option_2=\\\"value_2\\\" show_configuration=\\\"f\\\"\"\]\]\}' six.assertRegex( self, @@ -753,5 +765,8 @@ def test_process_scpv2(self): _package_directory = os.path.dirname(os.path.abspath(__file__)) +TestCommand.__test__ = False +TestStreamingCommand.__test__ = False + if __name__ == "__main__": main() From 4cb0f4a7d61b0f28f2a4fc66512319bfbf0c79da Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 6 May 2022 14:59:45 +0530 Subject: [PATCH 04/60] code changes --- tests/__init__.py | 1 - tests/modularinput/__init__.py | 0 tests/searchcommands/test_builtin_options.py | 2 +- tests/searchcommands/test_decorators.py | 12 ++--- .../searchcommands/test_generator_command.py | 2 +- tests/searchcommands/test_internals_v1.py | 49 +++++++++---------- tests/searchcommands/test_internals_v2.py | 41 +++++++--------- .../test_multibyte_processing.py | 1 - tests/searchcommands/test_search_command.py | 24 +++++---- .../searchcommands/test_searchcommands_app.py | 22 +++------ .../searchcommands/test_streaming_command.py | 12 ++--- tests/searchcommands/test_validators.py | 10 ++-- tests/test_collection.py | 2 +- tests/testlib.py | 4 +- 14 files changed, 76 insertions(+), 106 deletions(-) delete mode 100644 tests/__init__.py delete mode 100644 tests/modularinput/__init__.py diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 2ae28399..00000000 --- a/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -pass diff --git a/tests/modularinput/__init__.py b/tests/modularinput/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/searchcommands/test_builtin_options.py b/tests/searchcommands/test_builtin_options.py index 24073d9e..122d650b 100644 --- a/tests/searchcommands/test_builtin_options.py +++ b/tests/searchcommands/test_builtin_options.py @@ -20,8 +20,8 @@ import sys import logging -import pytest from unittest import main, TestCase +import pytest from splunklib.six.moves import cStringIO as StringIO diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 7efe4f19..0a9f427f 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -20,21 +20,17 @@ import sys from io import TextIOWrapper +import pytest from splunklib.searchcommands import Configuration, Option, environment, validators from splunklib.searchcommands.decorators import ConfigurationSetting from splunklib.searchcommands.internals import json_encode_string from splunklib.searchcommands.search_command import SearchCommand -try: - from tests.searchcommands import rebase_environment -except ImportError: - # Skip on Python 2.6 - pass +from tests.searchcommands import rebase_environment from splunklib import six -import pytest @Configuration() @@ -302,7 +298,7 @@ def fix_up(cls, command_class): except Exception as error: self.assertIsInstance(error, ValueError, 'Expected ValueError, not {}({}) for {}={}'.format(type(error).__name__, error, name, repr(value))) else: - self.fail('Expected ValueError, not success for {}={}'.format(name, repr(value))) + self.fail(f'Expected ValueError, not success for {name}={repr(value)}') settings_class = new_configuration_settings_class() settings_instance = settings_class(command=None) @@ -451,7 +447,7 @@ def test_option(self): elif type(x.validator).__name__ == 'RegularExpression': self.assertEqual(expected[x.name], x.value.pattern) elif isinstance(x.value, TextIOWrapper): - self.assertEqual(expected[x.name], "'%s'" % x.value.name) + self.assertEqual(expected[x.name], f"'{x.value.name}'" ) elif not isinstance(x.value, (bool,) + (float,) + (six.text_type,) + (six.binary_type,) + tuplewrap(six.integer_types)): self.assertEqual(expected[x.name], repr(x.value)) else: diff --git a/tests/searchcommands/test_generator_command.py b/tests/searchcommands/test_generator_command.py index 7a3320d2..9a56e8bb 100644 --- a/tests/searchcommands/test_generator_command.py +++ b/tests/searchcommands/test_generator_command.py @@ -23,7 +23,7 @@ def generate(self): ds = chunky.ChunkedDataStream(out_stream) is_first_chunk = True finished_seen = False - expected = set([str(i) for i in range(1, 10)]) + expected = set(str(i) for i in range(1, 10)) seen = set() for chunk in ds: if is_first_chunk: diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index 0b6c512a..793cd768 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -14,6 +14,11 @@ # License for the specific language governing permissions and limitations # under the License. +from contextlib import closing +from unittest import main, TestCase +import os +import pytest +from functools import reduce from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1 from splunklib.searchcommands.decorators import Configuration, Option @@ -21,19 +26,11 @@ from splunklib.searchcommands.search_command import SearchCommand -from contextlib import closing from splunklib.six import StringIO, BytesIO -from splunklib.six.moves import zip as izip - -from unittest import main, TestCase - -import os from splunklib import six from splunklib.six.moves import range -from functools import reduce -import pytest @pytest.mark.smoke class TestInternals(TestCase): @@ -93,7 +90,8 @@ def fix_up(cls, command_class): pass CommandLineParser.parse(command, ['required_option=true'] + fieldnames) for option in six.itervalues(command.options): - if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', 'show_configuration']: + if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', + 'show_configuration']: self.assertFalse(option.is_set) continue self.assertTrue(option.is_set) @@ -111,7 +109,8 @@ def fix_up(cls, command_class): pass # Command line with unrecognized options - self.assertRaises(ValueError, CommandLineParser.parse, command, ['unrecognized_option_1=foo', 'unrecognized_option_2=bar']) + self.assertRaises(ValueError, CommandLineParser.parse, command, + ['unrecognized_option_1=foo', 'unrecognized_option_2=bar']) # Command line with a variety of quoted/escaped text options @@ -175,24 +174,23 @@ def fix_up(cls, command_class): pass argv = [string] self.assertRaises(SyntaxError, CommandLineParser.parse, command, argv) - def test_command_line_parser_unquote(self): parser = CommandLineParser options = [ - r'foo', # unquoted string with no escaped characters - r'fo\o\ b\"a\\r', # unquoted string with some escaped characters - r'"foo"', # quoted string with no special characters - r'"""foobar1"""', # quoted string with quotes escaped like this: "" - r'"\"foobar2\""', # quoted string with quotes escaped like this: \" - r'"foo ""x"" bar"', # quoted string with quotes escaped like this: "" - r'"foo \"x\" bar"', # quoted string with quotes escaped like this: \" - r'"\\foobar"', # quoted string with an escaped backslash - r'"foo \\ bar"', # quoted string with an escaped backslash - r'"foobar\\"', # quoted string with an escaped backslash - r'foo\\\bar', # quoted string with an escaped backslash and an escaped 'b' - r'""', # pair of quotes - r''] # empty string + r'foo', # unquoted string with no escaped characters + r'fo\o\ b\"a\\r', # unquoted string with some escaped characters + r'"foo"', # quoted string with no special characters + r'"""foobar1"""', # quoted string with quotes escaped like this: "" + r'"\"foobar2\""', # quoted string with quotes escaped like this: \" + r'"foo ""x"" bar"', # quoted string with quotes escaped like this: "" + r'"foo \"x\" bar"', # quoted string with quotes escaped like this: \" + r'"\\foobar"', # quoted string with an escaped backslash + r'"foo \\ bar"', # quoted string with an escaped backslash + r'"foobar\\"', # quoted string with an escaped backslash + r'foo\\\bar', # quoted string with an escaped backslash and an escaped 'b' + r'""', # pair of quotes + r''] # empty string expected = [ r'foo', @@ -286,7 +284,7 @@ def test_input_header(self): 'sentence': 'hello world!'} input_header = InputHeader() - text = reduce(lambda value, item: value + '{}:{}\n'.format(item[0], item[1]), six.iteritems(collection), '') + '\n' + text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', six.iteritems(collection), '') + '\n' with closing(StringIO(text)) as input_file: input_header.read(input_file) @@ -310,7 +308,6 @@ def test_messages_header(self): @Configuration() class TestMessagesHeaderCommand(SearchCommand): - class ConfigurationSettings(SearchCommand.ConfigurationSettings): @classmethod diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index a3c7e2b4..b4215f5b 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -15,38 +15,31 @@ # License for the specific language governing permissions and limitations # under the License. +import gzip +import io +import json +import os +import random -from splunklib.searchcommands.internals import MetadataDecoder, MetadataEncoder, Recorder, RecordWriterV2 -from splunklib.searchcommands import SearchMetric -from splunklib import six -from splunklib.six.moves import range -from collections import OrderedDict -from collections import namedtuple, deque -from splunklib.six import BytesIO as BytesIO +import pytest from functools import wraps -from glob import iglob from itertools import chain -from splunklib.six.moves import filter as ifilter -from splunklib.six.moves import map as imap -from splunklib.six.moves import zip as izip from sys import float_info from tempfile import mktemp from time import time from types import MethodType -from sys import version_info as python_version -try: - from unittest2 import main, TestCase -except ImportError: - from unittest import main, TestCase +from unittest import main, TestCase +from collections import OrderedDict +from collections import namedtuple, deque + +from splunklib.searchcommands.internals import MetadataDecoder, MetadataEncoder, Recorder, RecordWriterV2 +from splunklib.searchcommands import SearchMetric +from splunklib import six +from splunklib.six.moves import range +from splunklib.six import BytesIO as BytesIO import splunklib.six.moves.cPickle as pickle -import gzip -import io -import json -import os -import random -import pytest # region Functions for producing random apps @@ -228,8 +221,8 @@ def test_record_writer_with_random_data(self, save_recording=False): self.assertListEqual(writer._inspector['messages'], messages) self.assertDictEqual( - dict([k_v for k_v in six.iteritems(writer._inspector) if k_v[0].startswith('metric.')]), - dict([('metric.' + k_v1[0], k_v1[1]) for k_v1 in six.iteritems(metrics)])) + dict(k_v for k_v in six.iteritems(writer._inspector) if k_v[0].startswith('metric.')), + dict(('metric.' + k_v1[0], k_v1[1]) for k_v1 in six.iteritems(metrics))) writer.flush(finished=True) diff --git a/tests/searchcommands/test_multibyte_processing.py b/tests/searchcommands/test_multibyte_processing.py index bf940263..1d021eed 100644 --- a/tests/searchcommands/test_multibyte_processing.py +++ b/tests/searchcommands/test_multibyte_processing.py @@ -4,7 +4,6 @@ from os import path -from splunklib import six from splunklib.searchcommands import StreamingCommand, Configuration diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index 22b9e423..7df283c7 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -15,15 +15,6 @@ # License for the specific language governing permissions and limitations # under the License. - -from splunklib import six -from splunklib.searchcommands import Configuration, StreamingCommand -from splunklib.searchcommands.decorators import ConfigurationSetting, Option -from splunklib.searchcommands.search_command import SearchCommand -from splunklib.client import Service - -from splunklib.six import StringIO, BytesIO -from splunklib.six.moves import zip as izip from json.encoder import encode_basestring as encode_string from unittest import main, TestCase @@ -36,11 +27,18 @@ import pytest +from splunklib import six +from splunklib.searchcommands import Configuration, StreamingCommand +from splunklib.searchcommands.decorators import ConfigurationSetting, Option +from splunklib.searchcommands.search_command import SearchCommand +from splunklib.client import Service + +from splunklib.six import StringIO, BytesIO + def build_command_input(getinfo_metadata, execute_metadata, execute_body): - input = ('chunked 1.0,{},0\n{}'.format(len(six.ensure_binary(getinfo_metadata)), getinfo_metadata) + - 'chunked 1.0,{},{}\n{}{}'.format(len(six.ensure_binary(execute_metadata)), - len(six.ensure_binary(execute_body)), execute_metadata, execute_body)) + input = (f'chunked 1.0,{len(six.ensure_binary(getinfo_metadata))},0\n{getinfo_metadata}' + + f'chunked 1.0,{len(six.ensure_binary(execute_metadata))},{len(six.ensure_binary(execute_body))}\n{execute_metadata}{execute_body}') ifile = BytesIO(six.ensure_binary(input)) @@ -311,7 +309,7 @@ def test_process_scpv1(self): # noinspection PyTypeChecker command.process(argv, ifile, ofile=result) except BaseException as error: - self.fail('Expected no exception, but caught {}: {}'.format(type(error).__name__, error)) + self.fail(f'Expected no exception, but caught {type(error).__name__}: {error}') else: six.assertRegex( self, diff --git a/tests/searchcommands/test_searchcommands_app.py b/tests/searchcommands/test_searchcommands_app.py index 38f83836..25435054 100755 --- a/tests/searchcommands/test_searchcommands_app.py +++ b/tests/searchcommands/test_searchcommands_app.py @@ -25,15 +25,9 @@ from collections import namedtuple -from splunklib.six.moves import cStringIO as StringIO from datetime import datetime -from splunklib.six.moves import filter as ifilter -from splunklib.six.moves import map as imap -from splunklib.six.moves import zip as izip - from subprocess import PIPE, Popen -from splunklib import six from unittest import main, skipUnless, TestCase @@ -43,14 +37,12 @@ import io import os import sys +import pytest -try: - from tests.searchcommands import project_root -except ImportError: - # Python 2.6 - pass +from splunklib.six.moves import cStringIO as StringIO +from splunklib import six -import pytest +from tests.searchcommands import project_root def pypy(): @@ -287,8 +279,7 @@ def _compare_csv_files_time_insensitive(self, expected, output): output_row['_time'] = expected_row['_time'] self.assertDictEqual( - expected_row, output_row, 'Error on line {0}: expected {1}, not {2}'.format( - line_number, expected_row, output_row)) + expected_row, output_row, f'Error on line {line_number}: expected {expected_row}, not {output_row}') line_number += 1 @@ -310,8 +301,7 @@ def _compare_csv_files_time_sensitive(self, expected, output): for expected_row in expected: output_row = next(output) self.assertDictEqual( - expected_row, output_row, 'Error on line {0}: expected {1}, not {2}'.format( - line_number, expected_row, output_row)) + expected_row, output_row, f'Error on line {line_number}: expected {expected_row}, not {output_row}') line_number += 1 def _get_search_command_path(self, name): diff --git a/tests/searchcommands/test_streaming_command.py b/tests/searchcommands/test_streaming_command.py index 61c89947..579c334c 100644 --- a/tests/searchcommands/test_streaming_command.py +++ b/tests/searchcommands/test_streaming_command.py @@ -1,7 +1,8 @@ import io -from . import chunked_data_stream as chunky from splunklib.searchcommands import StreamingCommand, Configuration +from . import chunked_data_stream as chunky + def test_simple_streaming_command(): @@ -57,10 +58,10 @@ def stream(self, records): output_records = list(next(output_iter).data) # Assert that count of records having "odd_field" is 0 - assert len(list([r for r in output_records if "odd_field" in r])) == 0 + assert len(list(r for r in output_records if "odd_field" in r)) == 0 # Assert that count of records having "even_field" is 10 - assert len(list([r for r in output_records if "even_field" in r])) == 10 + assert len(list(r for r in output_records if "even_field" in r)) == 10 def test_field_preservation_positive(): @@ -91,8 +92,7 @@ def stream(self, records): output_records = list(next(output_iter).data) # Assert that count of records having "odd_field" is 10 - assert len(list([r for r in output_records if "odd_field" in r])) == 10 + assert len(list(r for r in output_records if "odd_field" in r)) == 10 # Assert that count of records having "even_field" is 10 - assert len(list([r for r in output_records if "even_field" in r])) == 10 - + assert len(list(r for r in output_records if "even_field" in r)) == 10 diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index e3cbb278..45d6f3b5 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -15,7 +15,6 @@ # License for the specific language governing permissions and limitations # under the License. -from splunklib.searchcommands import validators from random import randint from unittest import main, TestCase @@ -23,10 +22,11 @@ import re import sys import tempfile +import pytest from splunklib import six from splunklib.six.moves import range +from splunklib.searchcommands import validators -import pytest # P2 [ ] TODO: Verify that all format methods produce 'None' when value is None @@ -67,10 +67,10 @@ def test_duration(self): value = six.text_type(seconds) self.assertEqual(validator(value), seconds) self.assertEqual(validator(validator.format(seconds)), seconds) - value = '%d:%02d' % (seconds / 60, seconds % 60) + value = f'{seconds/60}:{seconds%60:02} ' self.assertEqual(validator(value), seconds) self.assertEqual(validator(validator.format(seconds)), seconds) - value = '%d:%02d:%02d' % (seconds / 3600, (seconds / 60) % 60, seconds % 60) + value = f'{seconds/3600}:{(seconds/60)%60:02}:{seconds%60}' self.assertEqual(validator(value), seconds) self.assertEqual(validator(validator.format(seconds)), seconds) @@ -192,7 +192,7 @@ def test(integer): self.assertEqual(validator.__call__(maxsize), maxsize) self.assertRaises(ValueError, validator.__call__, minsize - 1) self.assertRaises(ValueError, validator.__call__, maxsize + 1) - + def test_float(self): # Float validator test diff --git a/tests/test_collection.py b/tests/test_collection.py index 8d99b05d..2423d77b 100755 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -78,7 +78,7 @@ def test_list(self): coll = getattr(self.service, coll_name) expected = [ent.name for ent in coll.list(count=10, sort_mode="auto")] if len(expected) == 0: - logging.debug(f"No entities in collection {coll_name}; skipping test.", coll_name) + logging.debug(f"No entities in collection {coll_name}; skipping test.") found = [ent.name for ent in coll.list()][:10] self.assertEqual(expected, found, msg=f'on {coll_name} (expected {expected}, found {found})') diff --git a/tests/testlib.py b/tests/testlib.py index ba9b48ce..00c3a60e 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -149,9 +149,7 @@ def clear_restart_message(self): try: self.service.delete("messages/restart_required") except client.HTTPError as he: - if he.status == 404: - pass - else: + if he.status != 404: raise @contextlib.contextmanager From aeae735ea8f66f4586e05037c1f1c74c66a58e7e Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 6 May 2022 15:34:21 +0530 Subject: [PATCH 05/60] reverting f string change --- tests/searchcommands/test_validators.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index 45d6f3b5..cc3dd6f2 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -67,10 +67,10 @@ def test_duration(self): value = six.text_type(seconds) self.assertEqual(validator(value), seconds) self.assertEqual(validator(validator.format(seconds)), seconds) - value = f'{seconds/60}:{seconds%60:02} ' + value = '%d:%02d' % (seconds / 60, seconds % 60) self.assertEqual(validator(value), seconds) self.assertEqual(validator(validator.format(seconds)), seconds) - value = f'{seconds/3600}:{(seconds/60)%60:02}:{seconds%60}' + value = '%d:%02d:%02d' % (seconds / 3600, (seconds / 60) % 60, seconds % 60) self.assertEqual(validator(value), seconds) self.assertEqual(validator(validator.format(seconds)), seconds) From b0ca411af931c827228c69647329c064c4ef9c59 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 9 May 2022 11:50:53 +0530 Subject: [PATCH 06/60] adding python 3.9 to CI --- .github/workflows/test.yml | 3 ++- Makefile | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 31d43c8e..9e08e290 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,6 +11,7 @@ jobs: matrix: os: - ubuntu-latest + python: [ 3.7, 3.9] splunk-version: - "8.0" - "latest" @@ -34,7 +35,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: ${{ matrix.python }} - name: Install tox run: pip install tox - name: Test Execution diff --git a/Makefile b/Makefile index 0a3f5804..9f1bbd8b 100644 --- a/Makefile +++ b/Makefile @@ -34,7 +34,7 @@ docs: .PHONY: test test: @echo "$(ATTN_COLOR)==> test $(NO_COLOR)" - @tox -e py37 + @tox -e py37,py39 .PHONY: test_specific test_specific: From 9329894abe3b0de50e1dbdce5eeee6477cb6631a Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Wed, 11 May 2022 11:31:15 +0530 Subject: [PATCH 07/60] removed support for python2 from modularinput --- splunklib/modularinput/argument.py | 11 ++++------- splunklib/modularinput/event.py | 10 +++------- splunklib/modularinput/event_writer.py | 11 +++-------- splunklib/modularinput/input_definition.py | 9 ++------- splunklib/modularinput/scheme.py | 11 ++++------- splunklib/modularinput/script.py | 13 +++---------- splunklib/modularinput/utils.py | 7 +++---- splunklib/modularinput/validation_definition.py | 10 +++------- 8 files changed, 25 insertions(+), 57 deletions(-) diff --git a/splunklib/modularinput/argument.py b/splunklib/modularinput/argument.py index 04214d16..e8aca493 100644 --- a/splunklib/modularinput/argument.py +++ b/splunklib/modularinput/argument.py @@ -12,13 +12,10 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -try: - import xml.etree.ElementTree as ET -except ImportError: - import xml.etree.cElementTree as ET +import xml.etree.ElementTree as ET -class Argument(object): + +class Argument: """Class representing an argument to a modular input kind. ``Argument`` is meant to be used with ``Scheme`` to generate an XML @@ -100,4 +97,4 @@ def add_to_document(self, parent): for name, value in subelements: ET.SubElement(arg, name).text = str(value).lower() - return arg \ No newline at end of file + return arg diff --git a/splunklib/modularinput/event.py b/splunklib/modularinput/event.py index 9cd6cf3a..93759b06 100644 --- a/splunklib/modularinput/event.py +++ b/splunklib/modularinput/event.py @@ -12,16 +12,12 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from io import TextIOBase +import xml.etree.ElementTree as ET from splunklib.six import ensure_text -try: - import xml.etree.cElementTree as ET -except ImportError as ie: - import xml.etree.ElementTree as ET -class Event(object): +class Event: """Represents an event or fragment of an event to be written by this modular input to Splunk. To write an input to a stream, call the ``write_to`` function, passing in a stream. @@ -111,4 +107,4 @@ def write_to(self, stream): stream.write(ensure_text(ET.tostring(event))) else: stream.write(ET.tostring(event)) - stream.flush() \ No newline at end of file + stream.flush() diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py index 5f8c5aa8..75a96a68 100755 --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -12,18 +12,13 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import import sys from splunklib.six import ensure_str from .event import ET -try: - from splunklib.six.moves import cStringIO as StringIO -except ImportError: - from splunklib.six import StringIO -class EventWriter(object): +class EventWriter: """``EventWriter`` writes events and error messages to Splunk from a modular input. Its two important methods are ``writeEvent``, which takes an ``Event`` object, and ``log``, which takes a severity and an error message. @@ -68,7 +63,7 @@ def log(self, severity, message): :param message: ``string``, message to log. """ - self._err.write("%s %s\n" % (severity, message)) + self._err.write(f"{severity} {message}\n") self._err.flush() def write_xml_document(self, document): @@ -83,5 +78,5 @@ def write_xml_document(self, document): def close(self): """Write the closing tag to make this XML well formed.""" if self.header_written: - self._out.write("") + self._out.write("") self._out.flush() diff --git a/splunklib/modularinput/input_definition.py b/splunklib/modularinput/input_definition.py index fdc7cbb3..c0e8e1ac 100644 --- a/splunklib/modularinput/input_definition.py +++ b/splunklib/modularinput/input_definition.py @@ -12,12 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -try: - import xml.etree.cElementTree as ET -except ImportError as ie: - import xml.etree.ElementTree as ET - +import xml.etree.ElementTree as ET from .utils import parse_xml_data class InputDefinition: @@ -57,4 +52,4 @@ def parse(stream): else: definition.metadata[node.tag] = node.text - return definition \ No newline at end of file + return definition diff --git a/splunklib/modularinput/scheme.py b/splunklib/modularinput/scheme.py index 4104e4a3..e84ce00d 100644 --- a/splunklib/modularinput/scheme.py +++ b/splunklib/modularinput/scheme.py @@ -12,13 +12,10 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import -try: - import xml.etree.cElementTree as ET -except ImportError: - import xml.etree.ElementTree as ET +import xml.etree.ElementTree as ET -class Scheme(object): + +class Scheme: """Class representing the metadata for a modular input kind. A ``Scheme`` specifies a title, description, several options of how Splunk should run modular inputs of this @@ -82,4 +79,4 @@ def to_xml(self): for arg in self.arguments: arg.add_to_document(args) - return root \ No newline at end of file + return root diff --git a/splunklib/modularinput/script.py b/splunklib/modularinput/script.py index 8595dc4b..1502774a 100644 --- a/splunklib/modularinput/script.py +++ b/splunklib/modularinput/script.py @@ -12,24 +12,18 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import from abc import ABCMeta, abstractmethod -from splunklib.six.moves.urllib.parse import urlsplit import sys +import xml.etree.ElementTree as ET +from urllib.parse import urlsplit from ..client import Service from .event_writer import EventWriter from .input_definition import InputDefinition from .validation_definition import ValidationDefinition -from splunklib import six -try: - import xml.etree.cElementTree as ET -except ImportError: - import xml.etree.ElementTree as ET - -class Script(six.with_metaclass(ABCMeta, object)): +class Script(metaclass=ABCMeta): """An abstract base class for implementing modular inputs. Subclasses should override ``get_scheme``, ``stream_events``, @@ -165,7 +159,6 @@ def validate_input(self, definition): :param definition: The parameters for the proposed input passed by splunkd. """ - pass @abstractmethod def stream_events(self, inputs, ew): diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index 3d42b632..57f00330 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -14,8 +14,7 @@ # File for utility functions -from __future__ import absolute_import -from splunklib.six.moves import zip + def xml_compare(expected, found): """Checks equality of two ``ElementTree`` objects. @@ -39,7 +38,7 @@ def xml_compare(expected, found): return False # compare children - if not all([xml_compare(a, b) for a, b in zip(expected_children, found_children)]): + if not all(xml_compare(a, b) for a, b in zip(expected_children, found_children)): return False # compare elements, if there is no text node, return True @@ -59,7 +58,7 @@ def parse_parameters(param_node): parameters.append(mvp.text) return parameters else: - raise ValueError("Invalid configuration scheme, %s tag unexpected." % param_node.tag) + raise ValueError(f"Invalid configuration scheme, {param_node.tag} tag unexpected.") def parse_xml_data(parent_node, child_node_tag): data = {} diff --git a/splunklib/modularinput/validation_definition.py b/splunklib/modularinput/validation_definition.py index 3bbe9760..0ad40e9e 100644 --- a/splunklib/modularinput/validation_definition.py +++ b/splunklib/modularinput/validation_definition.py @@ -13,16 +13,12 @@ # under the License. -from __future__ import absolute_import -try: - import xml.etree.cElementTree as ET -except ImportError as ie: - import xml.etree.ElementTree as ET +import xml.etree.ElementTree as ET from .utils import parse_xml_data -class ValidationDefinition(object): +class ValidationDefinition: """This class represents the XML sent by Splunk for external validation of a new modular input. @@ -83,4 +79,4 @@ def parse(stream): # Store anything else in metadata definition.metadata[node.tag] = node.text - return definition \ No newline at end of file + return definition From 83ecdf4b19185f2c2709922c0e172afb4044200f Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Wed, 11 May 2022 12:18:53 +0530 Subject: [PATCH 08/60] Update __init__.py --- splunklib/__init__.py | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 87d26b74..8f808d64 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -14,8 +14,6 @@ """Python library for Splunk.""" -from __future__ import absolute_import -from splunklib.six.moves import map import logging DEFAULT_LOG_FORMAT = '%(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, ' \ @@ -31,5 +29,35 @@ def setup_logging(level, log_format=DEFAULT_LOG_FORMAT, date_format=DEFAULT_DATE format=log_format, datefmt=date_format) + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """ + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, str): + return s.encode(encoding, errors) + + if isinstance(s, bytes): + return s + + raise TypeError(f"not expecting type '{type(s)}'") + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """ + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, bytes): + return s.decode(encoding, errors) + + if isinstance(s, str): + return s + + raise TypeError(f"not expecting type '{type(s)}'") + + __version_info__ = (1, 6, 19) + __version__ = ".".join(map(str, __version_info__)) From fe37aeb66f19198e6e3e3585a0dfa95997db0472 Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Wed, 11 May 2022 12:19:00 +0530 Subject: [PATCH 09/60] Update binding.py --- splunklib/binding.py | 164 +++++++++++++++++++++++-------------------- 1 file changed, 88 insertions(+), 76 deletions(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 4a4098df..a387ecef 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -24,30 +24,22 @@ :mod:`splunklib.client` module. """ -from __future__ import absolute_import - import io import logging import socket import ssl -import sys import time from base64 import b64encode from contextlib import contextmanager from datetime import datetime from functools import wraps from io import BytesIO -from xml.etree.ElementTree import XML - -from splunklib import six -from splunklib.six.moves import urllib +from urllib import parse +from http import client +from http.cookies import SimpleCookie +from xml.etree.ElementTree import XML, ParseError -from .data import record - -try: - from xml.etree.ElementTree import ParseError -except ImportError as e: - from xml.parsers.expat import ExpatError as ParseError +from splunklib.data import record logger = logging.getLogger(__name__) @@ -56,7 +48,12 @@ "connect", "Context", "handler", - "HTTPError" + "HTTPError", + "UrlEncoded", + "_encode", + "_make_cookie_header", + "_NoAuthenticationToken", + "namespace" ] # If you change these, update the docstring @@ -65,14 +62,16 @@ DEFAULT_PORT = "8089" DEFAULT_SCHEME = "https" + def _log_duration(f): @wraps(f) def new_f(*args, **kwargs): start_time = datetime.now() val = f(*args, **kwargs) end_time = datetime.now() - logger.debug("Operation took %s", end_time-start_time) + logger.debug("Operation took %s", end_time - start_time) return val + return new_f @@ -92,8 +91,8 @@ def _parse_cookies(cookie_str, dictionary): :param dictionary: A dictionary to update with any found key-value pairs. :type dictionary: ``dict`` """ - parsed_cookie = six.moves.http_cookies.SimpleCookie(cookie_str) - for cookie in parsed_cookie.values(): + parsed_cookie = SimpleCookie(cookie_str) + for cookie in list(parsed_cookie.values()): dictionary[cookie.key] = cookie.coded_value @@ -114,10 +113,11 @@ def _make_cookie_header(cookies): :return: ``str` An HTTP header cookie string. :rtype: ``str`` """ - return "; ".join("%s=%s" % (key, value) for key, value in cookies) + return "; ".join(f"{key}={value}" for key, value in cookies) + # Singleton values to eschew None -class _NoAuthenticationToken(object): +class _NoAuthenticationToken: """The value stored in a :class:`Context` or :class:`splunklib.client.Service` class that is not logged in. @@ -129,7 +129,6 @@ class that is not logged in. Likewise, after a ``Context`` or ``Service`` object has been logged out, the token is set to this value again. """ - pass class UrlEncoded(str): @@ -155,7 +154,7 @@ class UrlEncoded(str): **Example**:: import urllib - UrlEncoded('%s://%s' % (scheme, urllib.quote(host)), skip_encode=True) + UrlEncoded(f'{scheme}://{urllib.quote(host)}', skip_encode=True) If you append ``str`` strings and ``UrlEncoded`` strings, the result is also URL encoded. @@ -165,6 +164,7 @@ class UrlEncoded(str): UrlEncoded('ab c') + 'de f' == UrlEncoded('ab cde f') 'ab c' + UrlEncoded('de f') == UrlEncoded('ab cde f') """ + def __new__(self, val='', skip_encode=False, encode_slash=False): if isinstance(val, UrlEncoded): # Don't urllib.quote something already URL encoded. @@ -172,12 +172,12 @@ def __new__(self, val='', skip_encode=False, encode_slash=False): elif skip_encode: return str.__new__(self, val) elif encode_slash: - return str.__new__(self, urllib.parse.quote_plus(val)) + return str.__new__(self, parse.quote_plus(val)) else: - # When subclassing str, just call str's __new__ method + # When subclassing str, just call str.__new__ method # with your class and the value you want to have in the # new string. - return str.__new__(self, urllib.parse.quote(val)) + return str.__new__(self, parse.quote(val)) def __add__(self, other): """self + other @@ -187,8 +187,8 @@ def __add__(self, other): """ if isinstance(other, UrlEncoded): return UrlEncoded(str.__add__(self, other), skip_encode=True) - else: - return UrlEncoded(str.__add__(self, urllib.parse.quote(other)), skip_encode=True) + + return UrlEncoded(str.__add__(self, parse.quote(other)), skip_encode=True) def __radd__(self, other): """other + self @@ -198,8 +198,8 @@ def __radd__(self, other): """ if isinstance(other, UrlEncoded): return UrlEncoded(str.__radd__(self, other), skip_encode=True) - else: - return UrlEncoded(str.__add__(urllib.parse.quote(other), self), skip_encode=True) + + return UrlEncoded(str.__add__(parse.quote(other), self), skip_encode=True) def __mod__(self, fields): """Interpolation into ``UrlEncoded``s is disabled. @@ -208,15 +208,17 @@ def __mod__(self, fields): ``TypeError``. """ raise TypeError("Cannot interpolate into a UrlEncoded object.") + def __repr__(self): - return "UrlEncoded(%s)" % repr(urllib.parse.unquote(str(self))) + return f"UrlEncoded({repr(parse.unquote(str(self)))})" + @contextmanager def _handle_auth_error(msg): - """Handle reraising HTTP authentication errors as something clearer. + """Handle re-raising HTTP authentication errors as something clearer. If an ``HTTPError`` is raised with status 401 (access denied) in - the body of this context manager, reraise it as an + the body of this context manager, re-raise it as an ``AuthenticationError`` instead, with *msg* as its message. This function adds no round trips to the server. @@ -237,6 +239,7 @@ def _handle_auth_error(msg): else: raise + def _authentication(request_fun): """Decorator to handle autologin and authentication errors. @@ -271,10 +274,10 @@ def f(): return 42 print _authentication(f) """ + @wraps(request_fun) def wrapper(self, *args, **kwargs): - if self.token is _NoAuthenticationToken and \ - not self.has_cookies(): + if self.token is _NoAuthenticationToken and not self.has_cookies(): # Not yet logged in. if self.autologin and self.username and self.password: # This will throw an uncaught @@ -296,8 +299,8 @@ def wrapper(self, *args, **kwargs): # an AuthenticationError and give up. with _handle_auth_error("Autologin failed."): self.login() - with _handle_auth_error( - "Authentication Failed! If session token is used, it seems to have been expired."): + with _handle_auth_error("Autologin succeeded, but there was an auth error on next request. Something " + "is very wrong."): return request_fun(self, *args, **kwargs) elif he.status == 401 and not self.autologin: raise AuthenticationError( @@ -347,10 +350,10 @@ def _authority(scheme=DEFAULT_SCHEME, host=DEFAULT_HOST, port=DEFAULT_PORT): """ if ':' in host: - # IPv6 addresses must be enclosed in [ ] in order to be well - # formed. + # IPv6 addresses must be enclosed in [ ] in order to be well-formed. host = '[' + host + ']' - return UrlEncoded("%s://%s:%s" % (scheme, host, port), skip_encode=True) + return UrlEncoded(f"{scheme}://{host}:{port}", skip_encode=True) + # kwargs: sharing, owner, app def namespace(sharing=None, owner=None, app=None, **kwargs): @@ -405,7 +408,7 @@ def namespace(sharing=None, owner=None, app=None, **kwargs): n = binding.namespace(sharing="global", app="search") """ if sharing in ["system"]: - return record({'sharing': sharing, 'owner': "nobody", 'app': "system" }) + return record({'sharing': sharing, 'owner': "nobody", 'app': "system"}) if sharing in ["global", "app"]: return record({'sharing': sharing, 'owner': "nobody", 'app': app}) if sharing in ["user", None]: @@ -413,7 +416,7 @@ def namespace(sharing=None, owner=None, app=None, **kwargs): raise ValueError("Invalid value for argument: 'sharing'") -class Context(object): +class Context: """This class represents a context that encapsulates a splunkd connection. The ``Context`` class encapsulates the details of HTTP requests, @@ -432,7 +435,7 @@ class Context(object): :type port: ``integer`` :param scheme: The scheme for accessing the service (the default is "https"). :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verrification for https connections. + :param verify: Enable (True) or disable (False) SSL verification for https connections. :type verify: ``Boolean`` :param sharing: The sharing mode for the namespace (the default is "user"). :type sharing: "global", "system", "app", or "user" @@ -475,12 +478,14 @@ class Context(object): # Or if you already have a valid cookie c = binding.Context(cookie="splunkd_8089=...") """ + def __init__(self, handler=None, **kwargs): self.http = HttpLib(handler, kwargs.get("verify", False), key_file=kwargs.get("key_file"), - cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), # Default to False for backward compat + cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), + # Default to False for backward compat retries=kwargs.get("retries", 0), retryDelay=kwargs.get("retryDelay", 10)) self.token = kwargs.get("token", _NoAuthenticationToken) - if self.token is None: # In case someone explicitly passes token=None + if self.token is None: # In case someone explicitly passes token=None self.token = _NoAuthenticationToken self.scheme = kwargs.get("scheme", DEFAULT_SCHEME) self.host = kwargs.get("host", DEFAULT_HOST) @@ -513,7 +518,7 @@ def has_cookies(self): :rtype: ``bool`` """ auth_token_key = "splunkd_" - return any(auth_token_key in key for key in self.get_cookies().keys()) + return any(auth_token_key in key for key in list(self.get_cookies().keys())) # Shared per-context request headers @property @@ -529,10 +534,11 @@ def _auth_headers(self): if self.has_cookies(): return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] elif self.basic and (self.username and self.password): - token = 'Basic %s' % b64encode(("%s:%s" % (self.username, self.password)).encode('utf-8')).decode('ascii') + encoded_username_password = b64encode(f"{self.username}:{self.password}".encode('utf-8')).decode('ascii') + token = f'Basic {encoded_username_password}' return [("Authorization", token)] elif self.bearerToken: - token = 'Bearer %s' % self.bearerToken + token = f"Bearer {self.bearerToken}" return [("Authorization", token)] elif self.token is _NoAuthenticationToken: return [] @@ -541,7 +547,7 @@ def _auth_headers(self): if self.token.startswith('Splunk '): token = self.token else: - token = 'Splunk %s' % self.token + token = f"Splunk {self.token}" return [("Authorization", token)] def connect(self): @@ -834,12 +840,12 @@ def request(self, path_segment, method="GET", headers=None, body={}, headers = [] path = self.authority \ - + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) + + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) all_headers = headers + self.additional_headers + self._auth_headers logger.debug("%s request to %s (headers: %s, body: %s)", - method, path, str(all_headers), repr(body)) + method, path, str(all_headers), repr(body)) if body: body = _encode(**body) @@ -881,14 +887,14 @@ def login(self): """ if self.has_cookies() and \ - (not self.username and not self.password): + (not self.username and not self.password): # If we were passed session cookie(s), but no username or # password, then login is a nop, since we're automatically # logged in. return if self.token is not _NoAuthenticationToken and \ - (not self.username and not self.password): + (not self.username and not self.password): # If we were passed a session token, but no username or # password, then login is a nop, since we're automatically # logged in. @@ -910,11 +916,11 @@ def login(self): username=self.username, password=self.password, headers=self.additional_headers, - cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header + cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header body = response.body.read() session = XML(body).findtext("./sessionKey") - self.token = "Splunk %s" % session + self.token = f"Splunk {session}" return self except HTTPError as he: if he.status == 401: @@ -929,7 +935,7 @@ def logout(self): return self def _abspath(self, path_segment, - owner=None, app=None, sharing=None): + owner=None, app=None, sharing=None): """Qualifies *path_segment* into an absolute path for a URL. If *path_segment* is already absolute, returns it unchanged. @@ -981,12 +987,11 @@ def _abspath(self, path_segment, # namespace. If only one of app and owner is specified, use # '-' for the other. if ns.app is None and ns.owner is None: - return UrlEncoded("/services/%s" % path_segment, skip_encode=skip_encode) + return UrlEncoded(f"/services/{path_segment}", skip_encode=skip_encode) oname = "nobody" if ns.owner is None else ns.owner aname = "system" if ns.app is None else ns.app - path = UrlEncoded("/servicesNS/%s/%s/%s" % (oname, aname, path_segment), - skip_encode=skip_encode) + path = UrlEncoded(f"/servicesNS/{oname}/{aname}/{path_segment}", skip_encode=skip_encode) return path @@ -1037,21 +1042,23 @@ def connect(**kwargs): c.login() return c + # Note: the error response schema supports multiple messages but we only # return the first, although we do return the body so that an exception # handler that wants to read multiple messages can do so. class HTTPError(Exception): """This exception is raised for HTTP responses that return an error.""" + def __init__(self, response, _message=None): status = response.status reason = response.reason body = response.body.read() try: detail = XML(body).findtext("./messages/msg") - except ParseError as err: + except ParseError: detail = body - message = "HTTP %d %s%s" % ( - status, reason, "" if detail is None else " -- %s" % detail) + detail_formatted = "" if detail is None else f" -- {detail}" + message = f"HTTP {status} {reason}{detail_formatted}" Exception.__init__(self, _message or message) self.status = status self.reason = reason @@ -1059,6 +1066,7 @@ def __init__(self, response, _message=None): self.body = body self._response = response + class AuthenticationError(HTTPError): """Raised when a login request to Splunk fails. @@ -1066,6 +1074,7 @@ class AuthenticationError(HTTPError): in a call to :meth:`Context.login` or :meth:`splunklib.client.Service.login`, this exception is raised. """ + def __init__(self, message, cause): # Put the body back in the response so that HTTPError's constructor can # read it again. @@ -1073,6 +1082,7 @@ def __init__(self, message, cause): HTTPError.__init__(self, cause._response, message) + # # The HTTP interface used by the Splunk binding layer abstracts the underlying # HTTP library using request & response 'messages' which are implemented as @@ -1100,16 +1110,17 @@ def __init__(self, message, cause): # 'foo=1&foo=2&foo=3'. def _encode(**kwargs): items = [] - for key, value in six.iteritems(kwargs): + for key, value in list(kwargs.items()): if isinstance(value, list): items.extend([(key, item) for item in value]) else: items.append((key, value)) - return urllib.parse.urlencode(items) + return parse.urlencode(items) + # Crack the given url into (scheme, host, port, path) def _spliturl(url): - parsed_url = urllib.parse.urlparse(url) + parsed_url = parse.urlparse(url) host = parsed_url.hostname port = parsed_url.port path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path @@ -1118,9 +1129,10 @@ def _spliturl(url): if port is None: port = DEFAULT_PORT return parsed_url.scheme, host, port, path + # Given an HTTP request handler, this wrapper objects provides a related # family of convenience methods built using that handler. -class HttpLib(object): +class HttpLib: """A set of convenient methods for making HTTP calls. ``HttpLib`` provides a general :meth:`request` method, and :meth:`delete`, @@ -1162,7 +1174,9 @@ class HttpLib(object): If using the default handler, SSL verification can be disabled by passing verify=False. """ - def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, retryDelay=10): + + def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, + retryDelay=10): if custom_handler is None: self.handler = handler(verify=verify, key_file=key_file, cert_file=cert_file, context=context) else: @@ -1223,7 +1237,7 @@ def get(self, url, headers=None, **kwargs): # the query to be encoded or it will get automatically URL # encoded by being appended to url. url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - return self.request(url, { 'method': "GET", 'headers': headers }) + return self.request(url, {'method': "GET", 'headers': headers}) def post(self, url, headers=None, **kwargs): """Sends a POST request to a URL. @@ -1319,6 +1333,7 @@ class ResponseReader(io.RawIOBase): types of HTTP libraries used with this SDK. This class also provides a preview of the stream and a few useful predicates. """ + # For testing, you can use a StringIO as the argument to # ``ResponseReader`` instead of an ``httplib.HTTPResponse``. It # will work equally well. @@ -1328,10 +1343,7 @@ def __init__(self, response, connection=None): self._buffer = b'' def __str__(self): - if six.PY2: - return self.read() - else: - return str(self.read(), 'UTF-8') + return str(self.read(), 'UTF-8') @property def empty(self): @@ -1357,7 +1369,7 @@ def close(self): self._connection.close() self._response.close() - def read(self, size = None): + def read(self, size=None): """Reads a given number of characters from the response. :param size: The number of characters to read, or "None" to read the @@ -1410,7 +1422,7 @@ def connect(scheme, host, port): kwargs = {} if timeout is not None: kwargs['timeout'] = timeout if scheme == "http": - return six.moves.http_client.HTTPConnection(host, port, **kwargs) + return client.HTTPConnection(host, port, **kwargs) if scheme == "https": if key_file is not None: kwargs['key_file'] = key_file if cert_file is not None: kwargs['cert_file'] = cert_file @@ -1421,8 +1433,8 @@ def connect(scheme, host, port): # verify is True in elif branch and context is not None kwargs['context'] = context - return six.moves.http_client.HTTPSConnection(host, port, **kwargs) - raise ValueError("unsupported scheme: %s" % scheme) + return client.HTTPSConnection(host, port, **kwargs) + raise ValueError(f"unsupported scheme: {scheme}") def request(url, message, **kwargs): scheme, host, port, path = _spliturl(url) @@ -1433,7 +1445,7 @@ def request(url, message, **kwargs): "User-Agent": "splunk-sdk-python/1.6.19", "Accept": "*/*", "Connection": "Close", - } # defaults + } # defaults for key, value in message["headers"]: head[key] = value method = message.get("method", "GET") From 32315499d5887544587b8e0f4c806c379825dabe Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Wed, 11 May 2022 12:19:12 +0530 Subject: [PATCH 10/60] Update client.py --- splunklib/client.py | 249 +++++++++++++++++++++++--------------------- 1 file changed, 133 insertions(+), 116 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index ab276c3e..5a7d6f0f 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -65,15 +65,13 @@ import socket from datetime import datetime, timedelta from time import sleep +from urllib import parse -from splunklib import six -from splunklib.six.moves import urllib - -from . import data -from .binding import (AuthenticationError, Context, HTTPError, UrlEncoded, - _encode, _make_cookie_header, _NoAuthenticationToken, - namespace) -from .data import record +from splunklib import data +from splunklib.data import record +from splunklib.binding import (AuthenticationError, Context, HTTPError, UrlEncoded, + _encode, _make_cookie_header, _NoAuthenticationToken, + namespace) logger = logging.getLogger(__name__) @@ -83,7 +81,8 @@ "OperationError", "IncomparableException", "Service", - "namespace" + "namespace", + "AuthenticationError" ] PATH_APPS = "apps/local/" @@ -104,7 +103,7 @@ PATH_MODULAR_INPUTS = "data/modular-inputs" PATH_ROLES = "authorization/roles/" PATH_SAVED_SEARCHES = "saved/searches/" -PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) +PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) PATH_USERS = "authentication/users/" PATH_RECEIVERS_STREAM = "/services/receivers/stream" PATH_RECEIVERS_SIMPLE = "/services/receivers/simple" @@ -114,45 +113,38 @@ XNAME_ENTRY = XNAMEF_ATOM % "entry" XNAME_CONTENT = XNAMEF_ATOM % "content" -MATCH_ENTRY_CONTENT = "%s/%s/*" % (XNAME_ENTRY, XNAME_CONTENT) +MATCH_ENTRY_CONTENT = f"{XNAME_ENTRY}/{XNAME_CONTENT}/*" class IllegalOperationException(Exception): """Thrown when an operation is not possible on the Splunk instance that a :class:`Service` object is connected to.""" - pass class IncomparableException(Exception): """Thrown when trying to compare objects (using ``==``, ``<``, ``>``, and so on) of a type that doesn't support it.""" - pass class AmbiguousReferenceException(ValueError): """Thrown when the name used to fetch an entity matches more than one entity.""" - pass class InvalidNameException(Exception): """Thrown when the specified name contains characters that are not allowed in Splunk entity names.""" - pass class NoSuchCapability(Exception): """Thrown when the capability that has been referred to doesn't exist.""" - pass class OperationError(Exception): - """Raised for a failed operation, such as a time out.""" - pass + """Raised for a failed operation, such as a timeout.""" class NotSupportedError(Exception): """Raised for operations that are not supported on a given object.""" - pass def _trailing(template, *targets): @@ -188,8 +180,9 @@ def _trailing(template, *targets): def _filter_content(content, *args): if len(args) > 0: return record((k, content[k]) for k in args) - return record((k, v) for k, v in six.iteritems(content) - if k not in ['eai:acl', 'eai:attributes', 'type']) + return record((k, v) for k, v in list(content.items()) + if k not in ['eai:acl', 'eai:attributes', 'type']) + # Construct a resource path from the given base path + resource name def _path(base, name): @@ -248,7 +241,7 @@ def _parse_atom_entry(entry): metadata = _parse_atom_metadata(content) # Filter some of the noise out of the content record - content = record((k, v) for k, v in six.iteritems(content) + content = record((k, v) for k, v in list(content.items()) if k not in ['eai:acl', 'eai:attributes']) if 'type' in content: @@ -287,6 +280,7 @@ def _parse_atom_metadata(content): return record({'access': access, 'fields': fields}) + # kwargs: scheme, host, port, app, owner, username, password def connect(**kwargs): """This function connects and logs in to a Splunk instance. @@ -415,8 +409,9 @@ class Service(_BaseService): # Or if you already have a valid cookie s = client.Service(cookie="splunkd_8089=...") """ + def __init__(self, **kwargs): - super(Service, self).__init__(**kwargs) + super().__init__(**kwargs) self._splunk_version = None self._kvstore_owner = None @@ -584,7 +579,7 @@ def restart(self, timeout=None): :param timeout: A timeout period, in seconds. :type timeout: ``integer`` """ - msg = { "value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} + msg = {"value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} # This message will be deleted once the server actually restarts. self.messages.create(name="restart_required", **msg) result = self.post("/services/server/control/restart") @@ -708,7 +703,6 @@ def kvstore_owner(self, value): kvstore is refreshed, when the owner value is changed """ self._kvstore_owner = value - self.kvstore @property def kvstore(self): @@ -730,13 +724,14 @@ def users(self): return Users(self) -class Endpoint(object): +class Endpoint: """This class represents individual Splunk resources in the Splunk REST API. An ``Endpoint`` object represents a URI, such as ``/services/saved/searches``. This class provides the common functionality of :class:`Collection` and :class:`Entity` (essentially HTTP GET and POST methods). """ + def __init__(self, service, path): self.service = service self.path = path @@ -956,14 +951,12 @@ def __eq__(self, other): but then ``x != saved_searches['asearch']``. whether or not there was a change on the server. Rather than - try to do something fancy, we simple declare that equality is + try to do something fancy, we simply declare that equality is undefined for Entities. Makes no roundtrips to the server. """ - raise IncomparableException( - "Equality is undefined for objects of class %s" % \ - self.__class__.__name__) + raise IncomparableException(f"Equality is undefined for objects of class {self.__class__.__name__}") def __getattr__(self, key): # Called when an attribute was not found by the normal method. In this @@ -989,7 +982,7 @@ def _load_atom_entry(self, response): apps = [ele.entry.content.get('eai:appName') for ele in elem] raise AmbiguousReferenceException( - "Fetch from server returned multiple entries for name '%s' in apps %s." % (elem[0].entry.title, apps)) + f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") else: return elem.entry @@ -1024,7 +1017,7 @@ def _proper_namespace(self, owner=None, app=None, sharing=None): :param sharing: :return: """ - if owner is None and app is None and sharing is None: # No namespace provided + if owner is None and app is None and sharing is None: # No namespace provided if self._state is not None and 'access' in self._state: return (self._state.access.owner, self._state.access.app, @@ -1034,7 +1027,7 @@ def _proper_namespace(self, owner=None, app=None, sharing=None): self.service.namespace['app'], self.service.namespace['sharing']) else: - return (owner,app,sharing) + return owner, app, sharing def delete(self): owner, app, sharing = self._proper_namespace() @@ -1042,11 +1035,11 @@ def delete(self): def get(self, path_segment="", owner=None, app=None, sharing=None, **query): owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super(Entity, self).get(path_segment, owner=owner, app=app, sharing=sharing, **query) + return super().get(path_segment, owner=owner, app=app, sharing=sharing, **query) def post(self, path_segment="", owner=None, app=None, sharing=None, **query): owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super(Entity, self).post(path_segment, owner=owner, app=app, sharing=sharing, **query) + return super().post(path_segment, owner=owner, app=app, sharing=sharing, **query) def refresh(self, state=None): """Refreshes the state of this entity. @@ -1137,7 +1130,7 @@ def read(self, response): # text to be dispatched via HTTP. However, these links are already # URL encoded when they arrive, and we need to mark them as such. unquoted_links = dict([(k, UrlEncoded(v, skip_encode=True)) - for k,v in six.iteritems(results['links'])]) + for k, v in list(results['links'].items())]) results['links'] = unquoted_links return results @@ -1182,7 +1175,7 @@ def update(self, **kwargs): """ # The peculiarity in question: the REST API creates a new # Entity if we pass name in the dictionary, instead of the - # expected behavior of updating this Entity. Therefore we + # expected behavior of updating this Entity. Therefore, we # check for 'name' in kwargs and throw an error if it is # there. if 'name' in kwargs: @@ -1195,9 +1188,10 @@ class ReadOnlyCollection(Endpoint): """This class represents a read-only collection of entities in the Splunk instance. """ + def __init__(self, service, path, item=Entity): Endpoint.__init__(self, service, path) - self.item = item # Item accessor + self.item = item # Item accessor self.null_count = -1 def __contains__(self, name): @@ -1229,7 +1223,7 @@ def __getitem__(self, key): name. Where there is no conflict, ``__getitem__`` will fetch the - entity given just the name. If there is a conflict and you + entity given just the name. If there is a conflict, and you pass just a name, it will raise a ``ValueError``. In that case, add the namespace as a second argument. @@ -1276,13 +1270,14 @@ def __getitem__(self, key): response = self.get(key) entries = self._load_list(response) if len(entries) > 1: - raise AmbiguousReferenceException("Found multiple entities named '%s'; please specify a namespace." % key) + raise AmbiguousReferenceException( + f"Found multiple entities named '{key}'; please specify a namespace.") elif len(entries) == 0: raise KeyError(key) else: return entries[0] except HTTPError as he: - if he.status == 404: # No entity matching key and namespace. + if he.status == 404: # No entity matching key and namespace. raise KeyError(key) else: raise @@ -1346,7 +1341,7 @@ def _entity_path(self, state): # This has been factored out so that it can be easily # overloaded by Configurations, which has to switch its # entities' endpoints from its own properties/ to configs/. - raw_path = urllib.parse.unquote(state.links.alternate) + raw_path = parse.unquote(state.links.alternate) if 'servicesNS/' in raw_path: return _trailing(raw_path, 'servicesNS/', '/', '/') elif 'services/' in raw_path: @@ -1515,8 +1510,6 @@ def list(self, count=None, **kwargs): return list(self.iter(count=count, **kwargs)) - - class Collection(ReadOnlyCollection): """A collection of entities. @@ -1590,8 +1583,8 @@ def create(self, name, **params): applications = s.apps new_app = applications.create("my_fake_app") """ - if not isinstance(name, six.string_types): - raise InvalidNameException("%s is not a valid name for an entity." % name) + if not isinstance(name, str): + raise InvalidNameException(f"{name} is not a valid name for an entity.") if 'namespace' in params: namespace = params.pop('namespace') params['owner'] = namespace.owner @@ -1650,7 +1643,7 @@ def delete(self, name, **params): # has already been deleted, and we reraise it as a # KeyError. if he.status == 404: - raise KeyError("No such entity %s" % name) + raise KeyError(f"No such entity {name}") else: raise return self @@ -1701,14 +1694,13 @@ def get(self, name="", owner=None, app=None, sharing=None, **query): """ name = UrlEncoded(name, encode_slash=True) - return super(Collection, self).get(name, owner, app, sharing, **query) - - + return super().get(name, owner, app, sharing, **query) class ConfigurationFile(Collection): """This class contains all of the stanzas from one configuration file. """ + # __init__'s arguments must match those of an Entity, not a # Collection, since it is being created as the elements of a # Configurations, which is a Collection subclass. @@ -1725,6 +1717,7 @@ class Configurations(Collection): stanzas. This collection is unusual in that the values in it are themselves collections of :class:`ConfigurationFile` objects. """ + def __init__(self, service): Collection.__init__(self, service, PATH_PROPERTIES, item=ConfigurationFile) if self.service.namespace.owner == '-' or self.service.namespace.app == '-': @@ -1742,7 +1735,7 @@ def __getitem__(self, key): response = self.get(key) return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) except HTTPError as he: - if he.status == 404: # No entity matching key + if he.status == 404: # No entity matching key raise KeyError(key) else: raise @@ -1754,7 +1747,7 @@ def __contains__(self, key): response = self.get(key) return True except HTTPError as he: - if he.status == 404: # No entity matching key + if he.status == 404: # No entity matching key return False else: raise @@ -1773,15 +1766,15 @@ def create(self, name): # This has to be overridden to handle the plumbing of creating # a ConfigurationFile (which is a Collection) instead of some # Entity. - if not isinstance(name, six.string_types): - raise ValueError("Invalid name: %s" % repr(name)) + if not isinstance(name, str): + raise ValueError(f"Invalid name: {repr(name)}") response = self.post(__conf=name) if response.status == 303: return self[name] elif response.status == 201: return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) else: - raise ValueError("Unexpected status code %s returned from creating a stanza" % response.status) + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") def delete(self, key): """Raises `IllegalOperationException`.""" @@ -1813,17 +1806,18 @@ def __len__(self): # The stanza endpoint returns all the keys at the same level in the XML as the eai information # and 'disabled', so to get an accurate length, we have to filter those out and have just # the stanza keys. - return len([x for x in self._state.content.keys() + return len([x for x in list(self._state.content.keys()) if not x.startswith('eai') and x != 'disabled']) class StoragePassword(Entity): """This class contains a storage password. """ + def __init__(self, service, path, **kwargs): state = kwargs.get('state', None) kwargs['skip_refresh'] = kwargs.get('skip_refresh', state is not None) - super(StoragePassword, self).__init__(service, path, **kwargs) + super().__init__(service, path, **kwargs) self._state = state @property @@ -1847,10 +1841,11 @@ class StoragePasswords(Collection): """This class provides access to the storage passwords from this Splunk instance. Retrieve this collection using :meth:`Service.storage_passwords`. """ + def __init__(self, service): if service.namespace.owner == '-' or service.namespace.app == '-': raise ValueError("StoragePasswords cannot have wildcards in namespace.") - super(StoragePasswords, self).__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) + super().__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) def create(self, password, username, realm=None): """ Creates a storage password. @@ -1867,8 +1862,8 @@ def create(self, password, username, realm=None): :return: The :class:`StoragePassword` object created. """ - if not isinstance(username, six.string_types): - raise ValueError("Invalid name: %s" % repr(username)) + if not isinstance(username, str): + raise ValueError(f"Invalid name: {repr(username)}") if realm is None: response = self.post(password=password, name=username) @@ -1876,7 +1871,7 @@ def create(self, password, username, realm=None): response = self.post(password=password, realm=realm, name=username) if response.status != 201: - raise ValueError("Unexpected status code %s returned from creating a stanza" % response.status) + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") entries = _load_atom_entries(response) state = _parse_atom_entry(entries[0]) @@ -1916,6 +1911,7 @@ def delete(self, username, realm=None): class AlertGroup(Entity): """This class represents a group of fired alerts for a saved search. Access it using the :meth:`alerts` property.""" + def __init__(self, service, path, **kwargs): Entity.__init__(self, service, path, **kwargs) @@ -1944,6 +1940,7 @@ class Indexes(Collection): """This class contains the collection of indexes in this Splunk instance. Retrieve this collection using :meth:`Service.indexes`. """ + def get_default(self): """ Returns the name of the default index. @@ -1971,6 +1968,7 @@ def delete(self, name): class Index(Entity): """This class represents an index and provides different operations, such as cleaning the index, writing to the index, and so forth.""" + def __init__(self, service, path, **kwargs): Entity.__init__(self, service, path, **kwargs) @@ -1987,26 +1985,26 @@ def attach(self, host=None, source=None, sourcetype=None): :return: A writable socket. """ - args = { 'index': self.name } + args = {'index': self.name} if host is not None: args['host'] = host if source is not None: args['source'] = source if sourcetype is not None: args['sourcetype'] = sourcetype - path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + urllib.parse.urlencode(args), skip_encode=True) + path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + parse.urlencode(args), skip_encode=True) - cookie_or_auth_header = "Authorization: Splunk %s\r\n" % \ - (self.service.token if self.service.token is _NoAuthenticationToken - else self.service.token.replace("Splunk ", "")) + cookie_header = self.service.token if self.service.token is _NoAuthenticationToken else self.service.token.replace("Splunk ", "") + cookie_or_auth_header = f"Authorization: Splunk {cookie_header}\r\n" # If we have cookie(s), use them instead of "Authorization: ..." if self.service.has_cookies(): - cookie_or_auth_header = "Cookie: %s\r\n" % _make_cookie_header(self.service.get_cookies().items()) + cookie_header = _make_cookie_header(list(self.service.get_cookies().items())) + cookie_or_auth_header = f"Cookie: {cookie_header}\r\n" # Since we need to stream to the index connection, we have to keep # the connection open and use the Splunk extension headers to note # the input mode sock = self.service.connect() - headers = [("POST %s HTTP/1.1\r\n" % str(self.service._abspath(path))).encode('utf-8'), - ("Host: %s:%s\r\n" % (self.service.host, int(self.service.port))).encode('utf-8'), + headers = [f"POST {str(self.service._abspath(path))} HTTP/1.1\r\n".encode('utf-8'), + f"Host: {self.service.host}:{int(self.service.port)}\r\n".encode('utf-8'), b"Accept-Encoding: identity\r\n", cookie_or_auth_header.encode('utf-8'), b"X-Splunk-Input-Mode: Streaming\r\n", @@ -2068,8 +2066,7 @@ def clean(self, timeout=60): ftp = self['frozenTimePeriodInSecs'] was_disabled_initially = self.disabled try: - if (not was_disabled_initially and \ - self.service.splunk_version < (5,)): + if not was_disabled_initially and self.service.splunk_version < (5,): # Need to disable the index first on Splunk 4.x, # but it doesn't work to disable it on 5.0. self.disable() @@ -2079,17 +2076,17 @@ def clean(self, timeout=60): # Wait until event count goes to 0. start = datetime.now() diff = timedelta(seconds=timeout) - while self.content.totalEventCount != '0' and datetime.now() < start+diff: + while self.content.totalEventCount != '0' and datetime.now() < start + diff: sleep(1) self.refresh() if self.content.totalEventCount != '0': - raise OperationError("Cleaning index %s took longer than %s seconds; timing out." % (self.name, timeout)) + raise OperationError( + f"Cleaning index {self.name} took longer than {timeout} seconds; timing out.") finally: # Restore original values self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) - if (not was_disabled_initially and \ - self.service.splunk_version < (5,)): + if not was_disabled_initially and self.service.splunk_version < (5,): # Re-enable the index if it was originally enabled and we messed with it. self.enable() @@ -2117,7 +2114,7 @@ def submit(self, event, host=None, source=None, sourcetype=None): :return: The :class:`Index`. """ - args = { 'index': self.name } + args = {'index': self.name} if host is not None: args['host'] = host if source is not None: args['source'] = source if sourcetype is not None: args['sourcetype'] = sourcetype @@ -2151,6 +2148,7 @@ class Input(Entity): typed input classes and is also used when the client does not recognize an input kind. """ + def __init__(self, service, path, kind=None, **kwargs): # kind can be omitted (in which case it is inferred from the path) # Otherwise, valid values are the paths from data/inputs ("udp", @@ -2161,7 +2159,7 @@ def __init__(self, service, path, kind=None, **kwargs): path_segments = path.split('/') i = path_segments.index('inputs') + 1 if path_segments[i] == 'tcp': - self.kind = path_segments[i] + '/' + path_segments[i+1] + self.kind = path_segments[i] + '/' + path_segments[i + 1] else: self.kind = path_segments[i] else: @@ -2187,7 +2185,7 @@ def update(self, **kwargs): # UDP and TCP inputs require special handling due to their restrictToHost # field. For all other inputs kinds, we can dispatch to the superclass method. if self.kind not in ['tcp', 'splunktcp', 'tcp/raw', 'tcp/cooked', 'udp']: - return super(Input, self).update(**kwargs) + return super().update(**kwargs) else: # The behavior of restrictToHost is inconsistent across input kinds and versions of Splunk. # In Splunk 4.x, the name of the entity is only the port, independent of the value of @@ -2209,7 +2207,7 @@ def update(self, **kwargs): to_update['restrictToHost'] = self._state.content['restrictToHost'] # Do the actual update operation. - return super(Input, self).update(**to_update) + return super().update(**to_update) # Inputs is a "kinded" collection, which is a heterogenous collection where @@ -2236,13 +2234,13 @@ def __getitem__(self, key): response = self.get(self.kindpath(kind) + "/" + key) entries = self._load_list(response) if len(entries) > 1: - raise AmbiguousReferenceException("Found multiple inputs of kind %s named %s." % (kind, key)) + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") elif len(entries) == 0: raise KeyError((key, kind)) else: return entries[0] except HTTPError as he: - if he.status == 404: # No entity matching kind and key + if he.status == 404: # No entity matching kind and key raise KeyError((key, kind)) else: raise @@ -2256,20 +2254,21 @@ def __getitem__(self, key): response = self.get(kind + "/" + key) entries = self._load_list(response) if len(entries) > 1: - raise AmbiguousReferenceException("Found multiple inputs of kind %s named %s." % (kind, key)) + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") elif len(entries) == 0: pass else: - if candidate is not None: # Already found at least one candidate - raise AmbiguousReferenceException("Found multiple inputs named %s, please specify a kind" % key) + if candidate is not None: # Already found at least one candidate + raise AmbiguousReferenceException( + f"Found multiple inputs named {key}, please specify a kind") candidate = entries[0] except HTTPError as he: if he.status == 404: - pass # Just carry on to the next kind. + pass # Just carry on to the next kind. else: raise if candidate is None: - raise KeyError(key) # Never found a match. + raise KeyError(key) # Never found a match. else: return candidate @@ -2295,7 +2294,7 @@ def __contains__(self, key): pass except HTTPError as he: if he.status == 404: - pass # Just carry on to the next kind. + pass # Just carry on to the next kind. else: raise return False @@ -2347,9 +2346,8 @@ def create(self, name, kind, **kwargs): name = UrlEncoded(name, encode_slash=True) path = _path( self.path + kindpath, - '%s:%s' % (kwargs['restrictToHost'], name) \ - if 'restrictToHost' in kwargs else name - ) + f"{kwargs['restrictToHost']}:{name}" if 'restrictToHost' in kwargs else name + ) return Input(self.service, path, kind) def delete(self, name, kind=None): @@ -2419,7 +2417,7 @@ def itemmeta(self, kind): :return: The metadata. :rtype: class:``splunklib.data.Record`` """ - response = self.get("%s/_new" % self._kindmap[kind]) + response = self.get(f"{self._kindmap[kind]}/_new") content = _load_atom(response, MATCH_ENTRY_CONTENT) return _parse_atom_metadata(content) @@ -2434,7 +2432,7 @@ def _get_kind_list(self, subpath=None): this_subpath = subpath + [entry.title] # The "all" endpoint doesn't work yet. # The "tcp/ssl" endpoint is not a real input collection. - if entry.title == 'all' or this_subpath == ['tcp','ssl']: + if entry.title == 'all' or this_subpath == ['tcp', 'ssl']: continue elif 'create' in [x.rel for x in entry.link]: path = '/'.join(subpath + [entry.title]) @@ -2556,18 +2554,18 @@ def list(self, *kinds, **kwargs): path = UrlEncoded(path, skip_encode=True) response = self.get(path, **kwargs) except HTTPError as he: - if he.status == 404: # No inputs of this kind + if he.status == 404: # No inputs of this kind return [] entities = [] entries = _load_atom_entries(response) if entries is None: - return [] # No inputs in a collection comes back with no feed or entry in the XML + return [] # No inputs in a collection comes back with no feed or entry in the XML for entry in entries: state = _parse_atom_entry(entry) # Unquote the URL, since all URL encoded in the SDK # should be of type UrlEncoded, and all str should not # be URL encoded. - path = urllib.parse.unquote(state.links.alternate) + path = parse.unquote(state.links.alternate) entity = Input(self.service, path, kind, state=state) entities.append(entity) return entities @@ -2582,18 +2580,18 @@ def list(self, *kinds, **kwargs): response = self.get(self.kindpath(kind), search=search) except HTTPError as e: if e.status == 404: - continue # No inputs of this kind + continue # No inputs of this kind else: raise entries = _load_atom_entries(response) - if entries is None: continue # No inputs to process + if entries is None: continue # No inputs to process for entry in entries: state = _parse_atom_entry(entry) # Unquote the URL, since all URL encoded in the SDK # should be of type UrlEncoded, and all str should not # be URL encoded. - path = urllib.parse.unquote(state.links.alternate) + path = parse.unquote(state.links.alternate) entity = Input(self.service, path, kind, state=state) entities.append(entity) if 'offset' in kwargs: @@ -2661,6 +2659,7 @@ def oneshot(self, path, **kwargs): class Job(Entity): """This class represents a search job.""" + def __init__(self, service, sid, **kwargs): path = PATH_JOBS + sid Entity.__init__(self, service, path, skip_refresh=True, **kwargs) @@ -2933,6 +2932,7 @@ def unpause(self): class Jobs(Collection): """This class represents a collection of search jobs. Retrieve this collection using :meth:`Service.jobs`.""" + def __init__(self, service): Collection.__init__(self, service, PATH_JOBS, item=Job) # The count value to say list all the contents of this @@ -3086,6 +3086,7 @@ def oneshot(self, query, **params): class Loggers(Collection): """This class represents a collection of service logging categories. Retrieve this collection using :meth:`Service.loggers`.""" + def __init__(self, service): Collection.__init__(self, service, PATH_LOGGER) @@ -3117,6 +3118,7 @@ class ModularInputKind(Entity): """This class contains the different types of modular inputs. Retrieve this collection using :meth:`Service.modular_input_kinds`. """ + def __contains__(self, name): args = self.state.content['endpoints']['args'] if name in args: @@ -3154,6 +3156,7 @@ def update(self, **kwargs): class SavedSearch(Entity): """This class represents a saved search.""" + def __init__(self, service, path, **kwargs): Entity.__init__(self, service, path, **kwargs) @@ -3307,6 +3310,7 @@ def unsuppress(self): class SavedSearches(Collection): """This class represents a collection of saved searches. Retrieve this collection using :meth:`Service.saved_searches`.""" + def __init__(self, service): Collection.__init__( self, service, PATH_SAVED_SEARCHES, item=SavedSearch) @@ -3331,6 +3335,7 @@ def create(self, name, search, **kwargs): class Settings(Entity): """This class represents configuration settings for a Splunk service. Retrieve this collection using :meth:`Service.settings`.""" + def __init__(self, service, **kwargs): Entity.__init__(self, service, "/services/server/settings", **kwargs) @@ -3352,6 +3357,7 @@ def update(self, **kwargs): class User(Entity): """This class represents a Splunk user. """ + @property def role_entities(self): """Returns a list of roles assigned to this user. @@ -3368,6 +3374,7 @@ class Users(Collection): """This class represents the collection of Splunk users for this instance of Splunk. Retrieve this collection using :meth:`Service.users`. """ + def __init__(self, service): Collection.__init__(self, service, PATH_USERS, item=User) @@ -3407,8 +3414,8 @@ def create(self, username, password, roles, **params): boris = users.create("boris", "securepassword", roles="user") hilda = users.create("hilda", "anotherpassword", roles=["user","power"]) """ - if not isinstance(username, six.string_types): - raise ValueError("Invalid username: %s" % str(username)) + if not isinstance(username, str): + raise ValueError(f"Invalid username: {str(username)}") username = username.lower() self.post(name=username, password=password, roles=roles, **params) # splunkd doesn't return the user in the POST response body, @@ -3418,7 +3425,7 @@ def create(self, username, password, roles, **params): state = _parse_atom_entry(entry) entity = self.item( self.service, - urllib.parse.unquote(state.links.alternate), + parse.unquote(state.links.alternate), state=state) return entity @@ -3437,6 +3444,7 @@ def delete(self, name): class Role(Entity): """This class represents a user role. """ + def grant(self, *capabilities_to_grant): """Grants additional capabilities to this role. @@ -3487,8 +3495,8 @@ def revoke(self, *capabilities_to_revoke): for c in old_capabilities: if c not in capabilities_to_revoke: new_capabilities.append(c) - if new_capabilities == []: - new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. + if not new_capabilities: + new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. self.post(capabilities=new_capabilities) return self @@ -3496,6 +3504,7 @@ def revoke(self, *capabilities_to_revoke): class Roles(Collection): """This class represents the collection of roles in the Splunk instance. Retrieve this collection using :meth:`Service.roles`.""" + def __init__(self, service): return Collection.__init__(self, service, PATH_ROLES, item=Role) @@ -3530,8 +3539,8 @@ def create(self, name, **params): roles = c.roles paltry = roles.create("paltry", imported_roles="user", defaultApp="search") """ - if not isinstance(name, six.string_types): - raise ValueError("Invalid role name: %s" % str(name)) + if not isinstance(name, str): + raise ValueError(f"Invalid role name: {str(name)}") name = name.lower() self.post(name=name, **params) # splunkd doesn't return the user in the POST response body, @@ -3541,7 +3550,7 @@ def create(self, name, **params): state = _parse_atom_entry(entry) entity = self.item( self.service, - urllib.parse.unquote(state.links.alternate), + parse.unquote(state.links.alternate), state=state) return entity @@ -3558,6 +3567,7 @@ def delete(self, name): class Application(Entity): """Represents a locally-installed Splunk app.""" + @property def setupInfo(self): """Returns the setup information for the app. @@ -3574,11 +3584,12 @@ def updateInfo(self): """Returns any update information that is available for the app.""" return self._run_action("update") + class KVStoreCollections(Collection): def __init__(self, service): Collection.__init__(self, service, 'storage/collections/config', item=KVStoreCollection) - def create(self, name, indexes = {}, fields = {}, **kwargs): + def create(self, name, indexes={}, fields={}, **kwargs): """Creates a KV Store Collection. :param name: name of collection to create @@ -3592,14 +3603,15 @@ def create(self, name, indexes = {}, fields = {}, **kwargs): :return: Result of POST request """ - for k, v in six.iteritems(indexes): + for k, v in list(indexes.items()): if isinstance(v, dict): v = json.dumps(v) kwargs['index.' + k] = v - for k, v in six.iteritems(fields): + for k, v in list(fields.items()): kwargs['field.' + k] = v return self.post(name=name, **kwargs) + class KVStoreCollection(Entity): @property def data(self): @@ -3620,7 +3632,7 @@ def update_index(self, name, value): :return: Result of POST request """ kwargs = {} - kwargs['index.' + name] = value if isinstance(value, six.string_types) else json.dumps(value) + kwargs['index.' + name] = value if isinstance(value, str) else json.dumps(value) return self.post(**kwargs) def update_field(self, name, value): @@ -3637,7 +3649,8 @@ def update_field(self, name, value): kwargs['field.' + name] = value return self.post(**kwargs) -class KVStoreCollectionData(object): + +class KVStoreCollectionData: """This class represents the data endpoint for a KVStoreCollection. Retrieve using :meth:`KVStoreCollection.data` @@ -3670,7 +3683,7 @@ def query(self, **query): :rtype: ``array`` """ - for key, value in query.items(): + for key, value in list(query.items()): if isinstance(query[key], dict): query[key] = json.dumps(value) @@ -3700,7 +3713,8 @@ def insert(self, data): """ if isinstance(data, dict): data = json.dumps(data) - return json.loads(self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads( + self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) def delete(self, query=None): """ @@ -3738,7 +3752,8 @@ def update(self, id, data): """ if isinstance(data, dict): data = json.dumps(data) - return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, + body=data).body.read().decode('utf-8')) def batch_find(self, *dbqueries): """ @@ -3755,7 +3770,8 @@ def batch_find(self, *dbqueries): data = json.dumps(dbqueries) - return json.loads(self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads( + self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) def batch_save(self, *documents): """ @@ -3772,4 +3788,5 @@ def batch_save(self, *documents): data = json.dumps(documents) - return json.loads(self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + return json.loads( + self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) From 66dc35179d2ae1c8ecf2313671384c4e105809a6 Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Wed, 11 May 2022 12:19:17 +0530 Subject: [PATCH 11/60] Update data.py --- splunklib/data.py | 97 +++++++++++++++++++++++++---------------------- 1 file changed, 52 insertions(+), 45 deletions(-) diff --git a/splunklib/data.py b/splunklib/data.py index f9ffb869..14e8a793 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -12,16 +12,13 @@ # License for the specific language governing permissions and limitations # under the License. -"""The **splunklib.data** module reads the responses from splunkd in Atom Feed +"""The **splunklib.data** module reads the responses from splunkd in Atom Feed format, which is the format used by most of the REST API. """ -from __future__ import absolute_import -import sys from xml.etree.ElementTree import XML -from splunklib import six -__all__ = ["load"] +__all__ = ["load", "record"] # LNAME refers to element names without namespaces; XNAME is the same # name, but with an XML namespace. @@ -36,33 +33,41 @@ XNAME_KEY = XNAMEF_REST % LNAME_KEY XNAME_LIST = XNAMEF_REST % LNAME_LIST + # Some responses don't use namespaces (eg: search/parse) so we look for # both the extended and local versions of the following names. + def isdict(name): - return name == XNAME_DICT or name == LNAME_DICT + return name in (XNAME_DICT, LNAME_DICT) + def isitem(name): - return name == XNAME_ITEM or name == LNAME_ITEM + return name in (XNAME_ITEM, LNAME_ITEM) + def iskey(name): - return name == XNAME_KEY or name == LNAME_KEY + return name in (XNAME_KEY, LNAME_KEY) + def islist(name): - return name == XNAME_LIST or name == LNAME_LIST + return name in (XNAME_LIST, LNAME_LIST) + def hasattrs(element): return len(element.attrib) > 0 + def localname(xname): rcurly = xname.find('}') - return xname if rcurly == -1 else xname[rcurly+1:] + return xname if rcurly == -1 else xname[rcurly + 1:] + def load(text, match=None): - """This function reads a string that contains the XML of an Atom Feed, then - returns the - data in a native Python structure (a ``dict`` or ``list``). If you also - provide a tag name or path to match, only the matching sub-elements are + """This function reads a string that contains the XML of an Atom Feed, then + returns the + data in a native Python structure (a ``dict`` or ``list``). If you also + provide a tag name or path to match, only the matching sub-elements are loaded. :param text: The XML text to load. @@ -78,30 +83,28 @@ def load(text, match=None): 'names': {} } - # Convert to unicode encoding in only python 2 for xml parser - if(sys.version_info < (3, 0, 0) and isinstance(text, unicode)): - text = text.encode('utf-8') - root = XML(text) items = [root] if match is None else root.findall(match) count = len(items) - if count == 0: + if count == 0: return None - elif count == 1: + elif count == 1: return load_root(items[0], nametable) else: return [load_root(item, nametable) for item in items] + # Load the attributes of the given element. def load_attrs(element): if not hasattrs(element): return None attrs = record() - for key, value in six.iteritems(element.attrib): + for key, value in list(element.attrib.items()): attrs[key] = value return attrs + # Parse a element and return a Python dict -def load_dict(element, nametable = None): +def load_dict(element, nametable=None): value = record() children = list(element) for child in children: @@ -110,6 +113,7 @@ def load_dict(element, nametable = None): value[name] = load_value(child, nametable) return value + # Loads the given elements attrs & value into single merged dict. def load_elem(element, nametable=None): name = localname(element.tag) @@ -118,12 +122,12 @@ def load_elem(element, nametable=None): if attrs is None: return name, value if value is None: return name, attrs # If value is simple, merge into attrs dict using special key - if isinstance(value, six.string_types): + if isinstance(value, str): attrs["$text"] = value return name, attrs # Both attrs & value are complex, so merge the two dicts, resolving collisions. collision_keys = [] - for key, val in six.iteritems(attrs): + for key, val in list(attrs.items()): if key in value and key in collision_keys: value[key].append(val) elif key in value and key not in collision_keys: @@ -133,6 +137,7 @@ def load_elem(element, nametable=None): value[key] = val return name, value + # Parse a element and return a Python list def load_list(element, nametable=None): assert islist(element.tag) @@ -143,6 +148,7 @@ def load_list(element, nametable=None): value.append(load_value(child, nametable)) return value + # Load the given root element. def load_root(element, nametable=None): tag = element.tag @@ -151,6 +157,7 @@ def load_root(element, nametable=None): k, v = load_elem(element, nametable) return Record.fromkv(k, v) + # Load the children of the given element. def load_value(element, nametable=None): children = list(element) @@ -159,7 +166,7 @@ def load_value(element, nametable=None): # No children, assume a simple text value if count == 0: text = element.text - if text is None: + if text is None: return None if len(text.strip()) == 0: @@ -179,7 +186,7 @@ def load_value(element, nametable=None): # If we have seen this name before, promote the value to a list if name in value: current = value[name] - if not isinstance(current, list): + if not isinstance(current, list): value[name] = [current] value[name].append(item) else: @@ -187,23 +194,24 @@ def load_value(element, nametable=None): return value + # A generic utility that enables "dot" access to dicts class Record(dict): - """This generic utility class enables dot access to members of a Python + """This generic utility class enables dot access to members of a Python dictionary. - Any key that is also a valid Python identifier can be retrieved as a field. - So, for an instance of ``Record`` called ``r``, ``r.key`` is equivalent to - ``r['key']``. A key such as ``invalid-key`` or ``invalid.key`` cannot be - retrieved as a field, because ``-`` and ``.`` are not allowed in + Any key that is also a valid Python identifier can be retrieved as a field. + So, for an instance of ``Record`` called ``r``, ``r.key`` is equivalent to + ``r['key']``. A key such as ``invalid-key`` or ``invalid.key`` cannot be + retrieved as a field, because ``-`` and ``.`` are not allowed in identifiers. - Keys of the form ``a.b.c`` are very natural to write in Python as fields. If - a group of keys shares a prefix ending in ``.``, you can retrieve keys as a + Keys of the form ``a.b.c`` are very natural to write in Python as fields. If + a group of keys shares a prefix ending in ``.``, you can retrieve keys as a nested dictionary by calling only the prefix. For example, if ``r`` contains keys ``'foo'``, ``'bar.baz'``, and ``'bar.qux'``, ``r.bar`` returns a record - with the keys ``baz`` and ``qux``. If a key contains multiple ``.``, each - one is placed into a nested dictionary, so you can write ``r.bar.qux`` or + with the keys ``baz`` and ``qux``. If a key contains multiple ``.``, each + one is placed into a nested dictionary, so you can write ``r.bar.qux`` or ``r['bar.qux']`` interchangeably. """ sep = '.' @@ -215,7 +223,7 @@ def __call__(self, *args): def __getattr__(self, name): try: return self[name] - except KeyError: + except KeyError: raise AttributeError(name) def __delattr__(self, name): @@ -235,7 +243,7 @@ def __getitem__(self, key): return dict.__getitem__(self, key) key += self.sep result = record() - for k,v in six.iteritems(self): + for k, v in list(self.items()): if not k.startswith(key): continue suffix = k[len(key):] @@ -250,17 +258,16 @@ def __getitem__(self, key): else: result[suffix] = v if len(result) == 0: - raise KeyError("No key or prefix: %s" % key) + raise KeyError(f"No key or prefix: {key}") return result - -def record(value=None): - """This function returns a :class:`Record` instance constructed with an + +def record(value=None): + """This function returns a :class:`Record` instance constructed with an initial value that you provide. - - :param `value`: An initial record value. - :type `value`: ``dict`` + + :param value: An initial record value. + :type value: ``dict`` """ if value is None: value = {} return Record(value) - From a808a031ee8353c60811cb87f6d7ee2ab9b3865f Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Wed, 11 May 2022 12:19:20 +0530 Subject: [PATCH 12/60] Update results.py --- splunklib/results.py | 43 +++++++------------------------------------ 1 file changed, 7 insertions(+), 36 deletions(-) diff --git a/splunklib/results.py b/splunklib/results.py index 8543ab0d..f9b976cc 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -32,27 +32,15 @@ print "Results are a preview: %s" % reader.is_preview """ -from __future__ import absolute_import - from io import BufferedReader, BytesIO -from splunklib import six - from splunklib.six import deprecated -try: - import xml.etree.cElementTree as et -except: - import xml.etree.ElementTree as et +import xml.etree.ElementTree as et from collections import OrderedDict from json import loads as json_loads -try: - from splunklib.six.moves import cStringIO as StringIO -except: - from splunklib.six import StringIO - __all__ = [ "ResultsReader", "Message", @@ -76,7 +64,7 @@ def __init__(self, type_, message): self.message = message def __repr__(self): - return "%s: %s" % (self.type, self.message) + return f"{self.type}: {self.message}" def __eq__(self, other): return (self.type, self.message) == (other.type, other.message) @@ -264,25 +252,7 @@ def _parse_results(self, stream): elem.clear() elif elem.tag in ('text', 'v') and event == 'end': - try: - text = "".join(elem.itertext()) - except AttributeError: - # Assume we're running in Python < 2.7, before itertext() was added - # So we'll define it here - - def __itertext(self): - tag = self.tag - if not isinstance(tag, six.string_types) and tag is not None: - return - if self.text: - yield self.text - for e in self: - for s in __itertext(e): - yield s - if e.tail: - yield e.tail - - text = "".join(__itertext(elem)) + text = "".join(elem.itertext()) values.append(text) elem.clear() @@ -298,11 +268,10 @@ def __itertext(self): # splunk that is described in __init__. if 'no element found' in pe.msg: return - else: - raise + raise -class JSONResultsReader(object): +class JSONResultsReader: """This class returns dictionaries and Splunk messages from a JSON results stream. ``JSONResultsReader`` is iterable, and returns a ``dict`` for results, or a @@ -355,6 +324,8 @@ def next(self): def _parse_results(self, stream): """Parse results and messages out of *stream*.""" + msg_type = None + text = None for line in stream.readlines(): strip_line = line.strip() if strip_line.__len__() == 0: continue From 7e63b6af6ffb3b9ae2ab066e4981eb1928f54f94 Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Wed, 11 May 2022 12:19:23 +0530 Subject: [PATCH 13/60] Delete six.py --- splunklib/six.py | 993 ----------------------------------------------- 1 file changed, 993 deletions(-) delete mode 100644 splunklib/six.py diff --git a/splunklib/six.py b/splunklib/six.py deleted file mode 100644 index d13e50c9..00000000 --- a/splunklib/six.py +++ /dev/null @@ -1,993 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.14.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - del io - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" - _assertNotRegex = "assertNotRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -def assertNotRegex(self, *args, **kwargs): - return getattr(self, _assertNotRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] > (3,): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - # This does exactly the same what the :func:`py3:functools.update_wrapper` - # function does on Python versions after 3.2. It sets the ``__wrapped__`` - # attribute on ``wrapper`` object and it doesn't raise an error if any of - # the attributes mentioned in ``assigned`` and ``updated`` are missing on - # ``wrapped`` object. - def _update_wrapper(wrapper, wrapped, - assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - for attr in assigned: - try: - value = getattr(wrapped, attr) - except AttributeError: - continue - else: - setattr(wrapper, attr, value) - for attr in updated: - getattr(wrapper, attr).update(getattr(wrapped, attr, {})) - wrapper.__wrapped__ = wrapped - return wrapper - _update_wrapper.__doc__ = functools.update_wrapper.__doc__ - - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - return functools.partial(_update_wrapper, wrapped=wrapped, - assigned=assigned, updated=updated) - wraps.__doc__ = functools.wraps.__doc__ - -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): - # This version introduced PEP 560 that requires a bit - # of extra care (we mimic what is done by __build_class__). - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d['__orig_bases__'] = bases - else: - resolved_bases = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, text_type): - return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def python_2_unicode_compatible(klass): - """ - A class decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) - -import warnings - -def deprecated(message): - def deprecated_decorator(func): - def deprecated_func(*args, **kwargs): - warnings.warn("{} is a deprecated function. {}".format(func.__name__, message), - category=DeprecationWarning, - stacklevel=2) - warnings.simplefilter('default', DeprecationWarning) - return func(*args, **kwargs) - return deprecated_func - return deprecated_decorator \ No newline at end of file From 4735248ffb1203820c4c0e7a9f713c96045c91ca Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 11 May 2022 15:35:01 +0530 Subject: [PATCH 14/60] Update argument.py --- splunklib/modularinput/argument.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splunklib/modularinput/argument.py b/splunklib/modularinput/argument.py index e8aca493..645ce330 100644 --- a/splunklib/modularinput/argument.py +++ b/splunklib/modularinput/argument.py @@ -14,8 +14,8 @@ import xml.etree.ElementTree as ET - class Argument: + """Class representing an argument to a modular input kind. ``Argument`` is meant to be used with ``Scheme`` to generate an XML From a066e7ca0069f1994440107d1094b3367f44104e Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 11 May 2022 15:36:43 +0530 Subject: [PATCH 15/60] python 3.9 --- Makefile | 2 +- setup.py | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 9f1bbd8b..2c56bbdb 100644 --- a/Makefile +++ b/Makefile @@ -34,7 +34,7 @@ docs: .PHONY: test test: @echo "$(ATTN_COLOR)==> test $(NO_COLOR)" - @tox -e py37,py39 + @tox -e py39 .PHONY: test_specific test_specific: diff --git a/setup.py b/setup.py index 284c5098..54856a28 100755 --- a/setup.py +++ b/setup.py @@ -24,10 +24,7 @@ failed = False def run_test_suite(): - try: - import unittest2 as unittest - except ImportError: - import unittest + import unittest def mark_failed(): global failed From 5340cfc8123a1e5710fb686e85b9ab61aaad7923 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 11 May 2022 15:36:53 +0530 Subject: [PATCH 16/60] Update tox.ini --- tox.ini | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 00ad22b8..a69003ac 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = clean,docs,py27,py37 +envlist = clean,docs,py37,py39 skipsdist = {env:TOXBUILD:false} [testenv:pep8] @@ -30,7 +30,6 @@ allowlist_externals = make deps = pytest pytest-cov xmlrunner - unittest2 unittest-xml-reporting python-dotenv deprecation From 893b0d722f16083c338056c19adaf26c8df6d766 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 11 May 2022 16:19:16 +0530 Subject: [PATCH 17/60] changes --- tests/test_collection.py | 1 - tests/test_conf.py | 3 +- tests/test_input.py | 12 +++--- tests/test_kvstore_batch.py | 3 +- tests/test_kvstore_data.py | 1 - tests/test_results.py | 1 - tests/test_saved_search.py | 1 - tests/test_service.py | 8 ++-- tests/test_storage_passwords.py | 1 - tests/test_user.py | 1 - tests/test_utils.py | 67 ++++++++++++++++----------------- 11 files changed, 44 insertions(+), 55 deletions(-) diff --git a/tests/test_collection.py b/tests/test_collection.py index 2423d77b..bf74e30c 100755 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -20,7 +20,6 @@ from contextlib import contextmanager from splunklib import client -from splunklib.six.moves import range collections = [ 'apps', diff --git a/tests/test_conf.py b/tests/test_conf.py index d3e33061..e5429cfa 100755 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -17,7 +17,6 @@ from tests import testlib from splunklib import client -from splunklib import six class TestRead(testlib.SDKTestCase): def test_read(self): @@ -88,7 +87,7 @@ def test_confs(self): testlib.tmpname(): testlib.tmpname()} stanza.submit(values) stanza.refresh() - for key, value in six.iteritems(values): + for key, value in values.items(): self.assertTrue(key in stanza) self.assertEqual(value, stanza[key]) diff --git a/tests/test_input.py b/tests/test_input.py index 0fa23f33..f6d01b29 100755 --- a/tests/test_input.py +++ b/tests/test_input.py @@ -18,7 +18,7 @@ from splunklib.binding import HTTPError from tests import testlib -from splunklib import six, client +from splunklib import client @@ -200,7 +200,7 @@ def setUp(self): def tearDown(self): super().tearDown() - for entity in six.itervalues(self._test_entities): + for entity in self._test_entities.values(): try: self.service.inputs.delete( kind=entity.kind, @@ -231,7 +231,7 @@ def test_lists_modular_inputs(self): def test_create(self): inputs = self.service.inputs - for entity in six.itervalues(self._test_entities): + for entity in self._test_entities.values(): self.check_entity(entity) self.assertTrue(isinstance(entity, client.Input)) @@ -242,7 +242,7 @@ def test_get_kind_list(self): def test_read(self): inputs = self.service.inputs - for this_entity in six.itervalues(self._test_entities): + for this_entity in self._test_entities.values(): kind, name = this_entity.kind, this_entity.name read_entity = inputs[name, kind] self.assertEqual(this_entity.kind, read_entity.kind) @@ -258,7 +258,7 @@ def test_read_indiviually(self): def test_update(self): inputs = self.service.inputs - for entity in six.itervalues(self._test_entities): + for entity in self._test_entities.values(): kind, name = entity.kind, entity.name kwargs = {'host': 'foo'} entity.update(**kwargs) @@ -269,7 +269,7 @@ def test_update(self): def test_delete(self): inputs = self.service.inputs remaining = len(self._test_entities)-1 - for input_entity in six.itervalues(self._test_entities): + for input_entity in self._test_entities.values(): name = input_entity.name kind = input_entity.kind self.assertTrue(name in inputs) diff --git a/tests/test_kvstore_batch.py b/tests/test_kvstore_batch.py index b32ee4d7..74a0c3b9 100755 --- a/tests/test_kvstore_batch.py +++ b/tests/test_kvstore_batch.py @@ -15,7 +15,6 @@ # under the License. from tests import testlib -from splunklib.six.moves import range from splunklib import client @@ -26,7 +25,7 @@ def setUp(self): # self.service.namespace['owner'] = 'nobody' self.service.namespace['app'] = 'search' confs = self.service.kvstore - if ('test' in confs): + if 'test' in confs: confs['test'].delete() confs.create('test') diff --git a/tests/test_kvstore_data.py b/tests/test_kvstore_data.py index 7e7a147a..2f36032f 100755 --- a/tests/test_kvstore_data.py +++ b/tests/test_kvstore_data.py @@ -16,7 +16,6 @@ import json from tests import testlib -from splunklib.six.moves import range from splunklib import client diff --git a/tests/test_results.py b/tests/test_results.py index a55c037b..03595124 100755 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -16,7 +16,6 @@ from io import BytesIO -from splunklib.six import StringIO from tests import testlib from time import sleep from splunklib import results diff --git a/tests/test_saved_search.py b/tests/test_saved_search.py index 1cbb664d..ee4bb83b 100755 --- a/tests/test_saved_search.py +++ b/tests/test_saved_search.py @@ -21,7 +21,6 @@ from time import sleep from splunklib import client -from splunklib.six.moves import zip import pytest diff --git a/tests/test_service.py b/tests/test_service.py index 436438df..fb6e7730 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -14,8 +14,8 @@ # License for the specific language governing permissions and limitations # under the License. -from tests import testlib import unittest +from tests import testlib from splunklib import client from splunklib.binding import AuthenticationError @@ -56,7 +56,7 @@ def test_info_with_namespace(self): try: self.assertEqual(self.service.info.licenseState, 'OK') except HTTPError as he: - self.fail("Couldn't get the server info, probably got a 403! %s" % he.message) + self.fail(f"Couldn't get the server info, probably got a 403! {he.message}") self.service.namespace["owner"] = owner self.service.namespace["app"] = app @@ -186,7 +186,7 @@ def setUp(self): def assertIsNotNone(self, obj, msg=None): if obj is None: - raise self.failureException(msg or '%r is not None' % obj) + raise self.failureException(msg or f'{obj} is not None') def test_login_and_store_cookie(self): self.assertIsNotNone(self.service.get_cookies()) @@ -363,5 +363,5 @@ def test_proper_namespace_with_service_namespace(self): if __name__ == "__main__": - import unittest + unittest.main() diff --git a/tests/test_storage_passwords.py b/tests/test_storage_passwords.py index 95ac037b..578b4fb0 100644 --- a/tests/test_storage_passwords.py +++ b/tests/test_storage_passwords.py @@ -15,7 +15,6 @@ # under the License. from tests import testlib -import logging from splunklib import client diff --git a/tests/test_user.py b/tests/test_user.py index 14013301..38958814 100755 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -15,7 +15,6 @@ # under the License. from tests import testlib -import logging from splunklib import client diff --git a/tests/test_utils.py b/tests/test_utils.py index 4c01b3cc..52137f72 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,19 +1,15 @@ from tests import testlib -try: - from utils import * -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - +from utils import * TEST_DICT = { - 'username':'admin', - 'password':'changeme', - 'port' : 8089, - 'host' : 'localhost', - 'scheme': 'https' - } + 'username': 'admin', + 'password': 'changeme', + 'port': 8089, + 'host': 'localhost', + 'scheme': 'https' +} + class TestUtils(testlib.SDKTestCase): def setUp(self): @@ -22,16 +18,16 @@ def setUp(self): # Test dslice when a dict is passed to change key names def test_dslice_dict_args(self): args = { - 'username':'user-name', - 'password':'new_password', - 'port': 'admin_port', - 'foo':'bar' - } + 'username': 'user-name', + 'password': 'new_password', + 'port': 'admin_port', + 'foo': 'bar' + } expected = { - 'user-name':'admin', - 'new_password':'changeme', - 'admin_port':8089 - } + 'user-name': 'admin', + 'new_password': 'changeme', + 'admin_port': 8089 + } self.assertTrue(expected == dslice(TEST_DICT, args)) # Test dslice when a list is passed @@ -42,40 +38,41 @@ def test_dslice_list_args(self): 'port', 'host', 'foo' - ] + ] expected = { - 'username':'admin', - 'password':'changeme', - 'port':8089, - 'host':'localhost' - } + 'username': 'admin', + 'password': 'changeme', + 'port': 8089, + 'host': 'localhost' + } self.assertTrue(expected == dslice(TEST_DICT, test_list)) # Test dslice when a single string is passed def test_dslice_arg(self): test_arg = 'username' expected = { - 'username':'admin' - } + 'username': 'admin' + } self.assertTrue(expected == dslice(TEST_DICT, test_arg)) # Test dslice using all three types of arguments def test_dslice_all_args(self): test_args = [ - {'username':'new_username'}, + {'username': 'new_username'}, ['password', - 'host'], + 'host'], 'port' ] expected = { - 'new_username':'admin', - 'password':'changeme', - 'host':'localhost', - 'port':8089 + 'new_username': 'admin', + 'password': 'changeme', + 'host': 'localhost', + 'port': 8089 } self.assertTrue(expected == dslice(TEST_DICT, *test_args)) if __name__ == "__main__": import unittest + unittest.main() From ec6907a7c93cf00d44533dc29675a1f3fc72abe0 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 11 May 2022 16:22:54 +0530 Subject: [PATCH 18/60] test changes for six.py removal --- tests/searchcommands/test_builtin_options.py | 2 +- .../test_configuration_settings.py | 17 ++- tests/searchcommands/test_decorators.py | 134 +++++++++--------- tests/searchcommands/test_internals_v1.py | 12 +- tests/searchcommands/test_internals_v2.py | 21 ++- tests/testlib.py | 19 +-- 6 files changed, 98 insertions(+), 107 deletions(-) diff --git a/tests/searchcommands/test_builtin_options.py b/tests/searchcommands/test_builtin_options.py index 122d650b..07a343ef 100644 --- a/tests/searchcommands/test_builtin_options.py +++ b/tests/searchcommands/test_builtin_options.py @@ -22,7 +22,7 @@ from unittest import main, TestCase import pytest -from splunklib.six.moves import cStringIO as StringIO +from io import StringIO from splunklib.searchcommands import environment diff --git a/tests/searchcommands/test_configuration_settings.py b/tests/searchcommands/test_configuration_settings.py index bf810edf..65d0d3a4 100644 --- a/tests/searchcommands/test_configuration_settings.py +++ b/tests/searchcommands/test_configuration_settings.py @@ -28,7 +28,6 @@ from unittest import main, TestCase import pytest from splunklib.searchcommands.decorators import Configuration -from splunklib import six @@ -48,7 +47,7 @@ def generate(self): command._protocol_version = 1 self.assertTrue( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('generating', True)]) self.assertIs(command.configuration.generates_timeorder, None) @@ -71,7 +70,7 @@ def generate(self): self.fail('Expected AttributeError') self.assertEqual( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('generates_timeorder', True), ('generating', True), ('local', True), ('retainsevents', True), ('streaming', True)]) @@ -79,7 +78,7 @@ def generate(self): command._protocol_version = 2 self.assertEqual( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('generating', True), ('type', 'stateful')]) self.assertIs(command.configuration.distributed, False) @@ -98,7 +97,7 @@ def generate(self): self.fail('Expected AttributeError') self.assertEqual( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('generating', True), ('type', 'streaming')]) def test_streaming_command(self): @@ -115,7 +114,7 @@ def stream(self, records): command._protocol_version = 1 self.assertEqual( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('streaming', True)]) self.assertIs(command.configuration.clear_required_fields, None) @@ -139,7 +138,7 @@ def stream(self, records): self.fail('Expected AttributeError') self.assertEqual( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('clear_required_fields', True), ('local', True), ('overrides_timeorder', True), ('required_fields', ['field_1', 'field_2', 'field_3']), ('streaming', True)]) @@ -147,7 +146,7 @@ def stream(self, records): command._protocol_version = 2 self.assertEqual( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('type', 'streaming')]) self.assertIs(command.configuration.distributed, True) @@ -166,7 +165,7 @@ def stream(self, records): self.fail('Expected AttributeError') self.assertEqual( - list(six.iteritems(command.configuration)), + list(command.configuration.items()), [('required_fields', ['field_1', 'field_2', 'field_3']), ('type', 'stateful')]) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 0a9f427f..ce0b811b 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -29,13 +29,9 @@ from tests.searchcommands import rebase_environment -from splunklib import six - - @Configuration() class TestSearchCommand(SearchCommand): - boolean = Option( doc=''' **Syntax:** **boolean=**** @@ -226,49 +222,48 @@ def fix_up(cls, command_class): return ConfiguredSearchCommand.ConfigurationSettings for name, values, error_values in ( - ('clear_required_fields', - (True, False), - (None, 'anything other than a bool')), - ('distributed', - (True, False), - (None, 'anything other than a bool')), - ('generates_timeorder', - (True, False), - (None, 'anything other than a bool')), - ('generating', - (True, False), - (None, 'anything other than a bool')), - ('maxinputs', - (0, 50000, sys.maxsize), - (None, -1, sys.maxsize + 1, 'anything other than an int')), - ('overrides_timeorder', - (True, False), - (None, 'anything other than a bool')), - ('required_fields', - (['field_1', 'field_2'], set(['field_1', 'field_2']), ('field_1', 'field_2')), - (None, 0xdead, {'foo': 1, 'bar': 2})), - ('requires_preop', - (True, False), - (None, 'anything other than a bool')), - ('retainsevents', - (True, False), - (None, 'anything other than a bool')), - ('run_in_preview', - (True, False), - (None, 'anything other than a bool')), - ('streaming', - (True, False), - (None, 'anything other than a bool')), - ('streaming_preop', - ('some unicode string', b'some byte string'), - (None, 0xdead)), - ('type', - # TODO: Do we need to validate byte versions of these strings? - ('events', 'reporting', 'streaming'), - ('eventing', 0xdead))): + ('clear_required_fields', + (True, False), + (None, 'anything other than a bool')), + ('distributed', + (True, False), + (None, 'anything other than a bool')), + ('generates_timeorder', + (True, False), + (None, 'anything other than a bool')), + ('generating', + (True, False), + (None, 'anything other than a bool')), + ('maxinputs', + (0, 50000, sys.maxsize), + (None, -1, sys.maxsize + 1, 'anything other than an int')), + ('overrides_timeorder', + (True, False), + (None, 'anything other than a bool')), + ('required_fields', + (['field_1', 'field_2'], set(['field_1', 'field_2']), ('field_1', 'field_2')), + (None, 0xdead, {'foo': 1, 'bar': 2})), + ('requires_preop', + (True, False), + (None, 'anything other than a bool')), + ('retainsevents', + (True, False), + (None, 'anything other than a bool')), + ('run_in_preview', + (True, False), + (None, 'anything other than a bool')), + ('streaming', + (True, False), + (None, 'anything other than a bool')), + ('streaming_preop', + ('some unicode string', b'some byte string'), + (None, 0xdead)), + ('type', + # TODO: Do we need to validate byte versions of these strings? + ('events', 'reporting', 'streaming'), + ('eventing', 0xdead))): for value in values: - settings_class = new_configuration_settings_class(name, value) # Setting property exists @@ -296,7 +291,9 @@ def fix_up(cls, command_class): try: new_configuration_settings_class(name, value) except Exception as error: - self.assertIsInstance(error, ValueError, 'Expected ValueError, not {}({}) for {}={}'.format(type(error).__name__, error, name, repr(value))) + self.assertIsInstance(error, ValueError, + 'Expected ValueError, not {}({}) for {}={}'.format(type(error).__name__, + error, name, repr(value))) else: self.fail(f'Expected ValueError, not success for {name}={repr(value)}') @@ -355,13 +352,13 @@ def test_option(self): command = TestSearchCommand() options = command.options - itervalues = lambda: six.itervalues(options) + #itervalues = lambda: options.values() options.reset() missing = options.get_missing() - self.assertListEqual(missing, [option.name for option in itervalues() if option.is_required]) - self.assertListEqual(presets, [six.text_type(option) for option in itervalues() if option.value is not None]) - self.assertListEqual(presets, [six.text_type(option) for option in itervalues() if six.text_type(option) != option.name + '=None']) + self.assertListEqual(missing, [option.name for option in options.values() if option.is_required]) + self.assertListEqual(presets, [str(option) for option in options.values() if option.value is not None]) + self.assertListEqual(presets, [str(option) for option in options.values() if str(option) != option.name + '=None']) test_option_values = { validators.Boolean: ('0', 'non-boolean value'), @@ -378,7 +375,7 @@ def test_option(self): validators.RegularExpression: ('\\s+', '(poorly formed regular expression'), validators.Set: ('bar', 'non-existent set entry')} - for option in itervalues(): + for option in options.values(): validator = option.validator if validator is None: @@ -397,7 +394,8 @@ def test_option(self): except ValueError: pass except BaseException as error: - self.assertFalse(f'Expected ValueError for {option.name}={illegal_value}, not this {type(error).__name__}: {error}') + self.assertFalse( + f'Expected ValueError for {option.name}={illegal_value}, not this {type(error).__name__}: {error}') else: self.assertFalse(f'Expected ValueError for {option.name}={illegal_value}, not a pass.') @@ -407,7 +405,7 @@ def test_option(self): 'code': 'foo == \"bar\"', 'duration': 89999, 'fieldname': 'some.field_name', - 'file': six.text_type(repr(__file__)), + 'file': str(repr(__file__)), 'integer': 100, 'float': 99.9, 'logging_configuration': environment.logging_configuration, @@ -421,7 +419,7 @@ def test_option(self): 'required_code': 'foo == \"bar\"', 'required_duration': 89999, 'required_fieldname': 'some.field_name', - 'required_file': six.text_type(repr(__file__)), + 'required_file': str(repr(__file__)), 'required_integer': 100, 'required_float': 99.9, 'required_map': 'foo', @@ -436,10 +434,10 @@ def test_option(self): self.maxDiff = None tuplewrap = lambda x: x if isinstance(x, tuple) else (x,) - invert = lambda x: {v: k for k, v in six.iteritems(x)} + invert = lambda x: {v: k for k, v in x.items()} - for x in six.itervalues(command.options): - # isinstance doesn't work for some reason + for x in command.options.values(): + # isinstance doesn't work for some reason if type(x.value).__name__ == 'Code': self.assertEqual(expected[x.name], x.value.source) elif type(x.validator).__name__ == 'Map': @@ -447,25 +445,27 @@ def test_option(self): elif type(x.validator).__name__ == 'RegularExpression': self.assertEqual(expected[x.name], x.value.pattern) elif isinstance(x.value, TextIOWrapper): - self.assertEqual(expected[x.name], f"'{x.value.name}'" ) - elif not isinstance(x.value, (bool,) + (float,) + (six.text_type,) + (six.binary_type,) + tuplewrap(six.integer_types)): + self.assertEqual(expected[x.name], f"'{x.value.name}'") + elif not isinstance(x.value, (bool,) + (float,) + (str,) + (bytes,) + tuplewrap(int)): self.assertEqual(expected[x.name], repr(x.value)) else: self.assertEqual(expected[x.name], x.value) expected = ( - 'foo="f" boolean="f" code="foo == \\"bar\\"" duration="24:59:59" fieldname="some.field_name" ' - 'file=' + json_encode_string(__file__) + ' float="99.9" integer="100" map="foo" match="123-45-6789" ' - 'optionname="some_option_name" record="f" regularexpression="\\\\s+" required_boolean="f" ' - 'required_code="foo == \\"bar\\"" required_duration="24:59:59" required_fieldname="some.field_name" ' - 'required_file=' + json_encode_string(__file__) + ' required_float="99.9" required_integer="100" required_map="foo" ' - 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' - 'required_set="bar" set="bar" show_configuration="f"') + 'foo="f" boolean="f" code="foo == \\"bar\\"" duration="24:59:59" fieldname="some.field_name" ' + 'file=' + json_encode_string(__file__) + ' float="99.9" integer="100" map="foo" match="123-45-6789" ' + 'optionname="some_option_name" record="f" regularexpression="\\\\s+" required_boolean="f" ' + 'required_code="foo == \\"bar\\"" required_duration="24:59:59" required_fieldname="some.field_name" ' + 'required_file=' + json_encode_string( + __file__) + ' required_float="99.9" required_integer="100" required_map="foo" ' + 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' + 'required_set="bar" set="bar" show_configuration="f"') - observed = six.text_type(command.options) + observed = str(command.options) self.assertEqual(observed, expected) + TestSearchCommand.__test__ = False if __name__ == "__main__": diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index 793cd768..b2ee99fc 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -26,10 +26,8 @@ from splunklib.searchcommands.search_command import SearchCommand -from splunklib.six import StringIO, BytesIO +from io import StringIO, BytesIO -from splunklib import six -from splunklib.six.moves import range @pytest.mark.smoke @@ -57,7 +55,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, options) - for option in six.itervalues(command.options): + for option in command.options.values(): if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) continue @@ -74,7 +72,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, options + fieldnames) - for option in six.itervalues(command.options): + for option in command.options.values(): if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) continue @@ -89,7 +87,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, ['required_option=true'] + fieldnames) - for option in six.itervalues(command.options): + for option in command.options.values(): if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) @@ -284,7 +282,7 @@ def test_input_header(self): 'sentence': 'hello world!'} input_header = InputHeader() - text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', six.iteritems(collection), '') + '\n' + text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', collection.items(), '') + '\n' with closing(StringIO(text)) as input_file: input_header.read(input_file) diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index b4215f5b..091a816f 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -20,6 +20,7 @@ import json import os import random +import sys import pytest from functools import wraps @@ -35,18 +36,16 @@ from splunklib.searchcommands.internals import MetadataDecoder, MetadataEncoder, Recorder, RecordWriterV2 from splunklib.searchcommands import SearchMetric -from splunklib import six -from splunklib.six.moves import range -from splunklib.six import BytesIO as BytesIO -import splunklib.six.moves.cPickle as pickle +from io import BytesIO +import pickle # region Functions for producing random apps # Confirmed: [minint, maxint) covers the full range of values that xrange allows -minint = (-six.MAXSIZE - 1) // 2 -maxint = six.MAXSIZE // 2 +minint = (-sys.maxsize - 1) // 2 +maxint = sys.maxsize // 2 max_length = 1 * 1024 @@ -82,7 +81,7 @@ def random_integer(): def random_integers(): - return random_list(six.moves.range, minint, maxint) + return random_list(range, minint, maxint) def random_list(population, *args): @@ -206,7 +205,7 @@ def test_record_writer_with_random_data(self, save_recording=False): test_data['metrics'] = metrics - for name, metric in six.iteritems(metrics): + for name, metric in metrics.items(): writer.write_metric(name, metric) self.assertEqual(writer._chunk_count, 0) @@ -221,8 +220,8 @@ def test_record_writer_with_random_data(self, save_recording=False): self.assertListEqual(writer._inspector['messages'], messages) self.assertDictEqual( - dict(k_v for k_v in six.iteritems(writer._inspector) if k_v[0].startswith('metric.')), - dict(('metric.' + k_v1[0], k_v1[1]) for k_v1 in six.iteritems(metrics))) + dict(k_v for k_v in writer._inspector.items() if k_v[0].startswith('metric.')), + dict(('metric.' + k_v1[0], k_v1[1]) for k_v1 in metrics.items())) writer.flush(finished=True) @@ -312,7 +311,7 @@ def _load_chunks(self, ifile): 'n': 12 } - _json_input = six.text_type(json.dumps(_dictionary, separators=(',', ':'))) + _json_input = str(json.dumps(_dictionary, separators=(',', ':'))) _package_path = os.path.dirname(os.path.abspath(__file__)) _recordings_path = os.path.join(_package_path, 'recordings', 'scpv2', 'Splunk-6.3') diff --git a/tests/testlib.py b/tests/testlib.py index 00c3a60e..7c157eed 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -17,29 +17,24 @@ """Shared unit test utilities.""" import contextlib +import os +import time +import logging import sys -from splunklib import six # Run the test suite on the SDK without installing it. sys.path.insert(0, '../') sys.path.insert(0, '../examples') - -from splunklib import client from time import sleep from datetime import datetime, timedelta import unittest -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") +from utils import parse + +from splunklib import client -import os -import time -import logging logging.basicConfig( filename='test.log', @@ -97,7 +92,7 @@ def assertEventuallyTrue(self, predicate, timeout=30, pause_time=0.5, logging.debug("wait finished after %s seconds", datetime.now() - start) def check_content(self, entity, **kwargs): - for k, v in six.iteritems(kwargs): + for k, v in list(kwargs): self.assertEqual(entity[k], str(v)) def check_entity(self, entity): From 9ffee3113dd3f75faccd15533e0b1a742d86fc24 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Thu, 12 May 2022 16:02:18 +0530 Subject: [PATCH 19/60] refractor changes --- splunklib/__init__.py | 18 +- splunklib/binding.py | 33 ++-- splunklib/client.py | 65 +++---- splunklib/data.py | 5 +- splunklib/modularinput/argument.py | 2 +- splunklib/modularinput/event.py | 3 +- splunklib/modularinput/event_writer.py | 2 +- splunklib/modularinput/script.py | 7 +- splunklib/modularinput/utils.py | 5 +- splunklib/results.py | 13 +- splunklib/searchcommands/__init__.py | 4 +- splunklib/searchcommands/decorators.py | 37 ++-- splunklib/searchcommands/environment.py | 11 +- splunklib/searchcommands/eventing_command.py | 9 +- .../searchcommands/external_search_command.py | 32 ++-- .../searchcommands/generating_command.py | 14 +- splunklib/searchcommands/internals.py | 88 +++++---- splunklib/searchcommands/reporting_command.py | 10 +- splunklib/searchcommands/search_command.py | 168 ++++++++---------- splunklib/searchcommands/streaming_command.py | 14 +- splunklib/searchcommands/validators.py | 110 ++++++------ tests/searchcommands/chunked_data_stream.py | 9 +- .../searchcommands/test_generator_command.py | 5 + tests/searchcommands/test_search_command.py | 19 +- .../searchcommands/test_searchcommands_app.py | 59 +++--- .../searchcommands/test_streaming_command.py | 1 - tests/searchcommands/test_validators.py | 26 ++- tests/test_binding.py | 41 ++--- tests/test_examples.py | 7 +- utils/__init__.py | 37 ++-- utils/cmdopts.py | 2 - 31 files changed, 393 insertions(+), 463 deletions(-) diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 8f808d64..f3b39891 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -17,7 +17,7 @@ import logging DEFAULT_LOG_FORMAT = '%(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, ' \ - 'Line=%(lineno)s, %(message)s' + 'Line=%(lineno)s, %(message)s' DEFAULT_DATE_FORMAT = '%Y-%m-%d %H:%M:%S %Z' @@ -58,6 +58,22 @@ def ensure_str(s, encoding='utf-8', errors='strict'): raise TypeError(f"not expecting type '{type(s)}'") +def ensure_text(s, encoding='utf-8', errors='strict'): + """ + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, bytes): + return s.decode(encoding, errors) + if isinstance(s, str): + return s + raise TypeError(f"not expecting type '{type(s)}'") + + +def assertRegex(self, *args, **kwargs): + return getattr(self, "assertRegex")(*args, **kwargs) + + __version_info__ = (1, 6, 19) __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/binding.py b/splunklib/binding.py index a387ecef..6de146c4 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -169,15 +169,14 @@ def __new__(self, val='', skip_encode=False, encode_slash=False): if isinstance(val, UrlEncoded): # Don't urllib.quote something already URL encoded. return val - elif skip_encode: + if skip_encode: return str.__new__(self, val) - elif encode_slash: + if encode_slash: return str.__new__(self, parse.quote_plus(val)) - else: - # When subclassing str, just call str.__new__ method - # with your class and the value you want to have in the - # new string. - return str.__new__(self, parse.quote(val)) + # When subclassing str, just call str.__new__ method + # with your class and the value you want to have in the + # new string. + return str.__new__(self, parse.quote(val)) def __add__(self, other): """self + other @@ -236,8 +235,7 @@ def _handle_auth_error(msg): except HTTPError as he: if he.status == 401: raise AuthenticationError(msg, he) - else: - raise + raise def _authentication(request_fun): @@ -305,8 +303,7 @@ def wrapper(self, *args, **kwargs): elif he.status == 401 and not self.autologin: raise AuthenticationError( "Request failed: Session is not logged in.", he) - else: - raise + raise return wrapper @@ -533,14 +530,14 @@ def _auth_headers(self): """ if self.has_cookies(): return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] - elif self.basic and (self.username and self.password): + if self.basic and (self.username and self.password): encoded_username_password = b64encode(f"{self.username}:{self.password}".encode('utf-8')).decode('ascii') token = f'Basic {encoded_username_password}' return [("Authorization", token)] - elif self.bearerToken: + if self.bearerToken: token = f"Bearer {self.bearerToken}" return [("Authorization", token)] - elif self.token is _NoAuthenticationToken: + if self.token is _NoAuthenticationToken: return [] else: # Ensure the token is properly formatted @@ -925,8 +922,7 @@ def login(self): except HTTPError as he: if he.status == 401: raise AuthenticationError("Login failed.", he) - else: - raise + raise def logout(self): """Forgets the current session token, and cookies.""" @@ -1304,9 +1300,8 @@ def request(self, url, message, **kwargs): except Exception: if self.retries <= 0: raise - else: - time.sleep(self.retryDelay) - self.retries -= 1 + time.sleep(self.retryDelay) + self.retries -= 1 response = record(response) if 400 <= response.status: raise HTTPError(response) diff --git a/splunklib/client.py b/splunklib/client.py index 5a7d6f0f..0dc48564 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -212,10 +212,9 @@ def _load_atom_entries(response): # its state wrapped in another element, but at the top level. # For example, in XML, it returns ... instead of # .... - else: - entries = r.get('entry', None) - if entries is None: return None - return entries if isinstance(entries, list) else [entries] + entries = r.get('entry', None) + if entries is None: return None + return entries if isinstance(entries, list) else [entries] # Load the sid from the body of the given response @@ -530,8 +529,7 @@ def modular_input_kinds(self): """ if self.splunk_version >= (5,): return ReadOnlyCollection(self, PATH_MODULAR_INPUTS, item=ModularInputKind) - else: - raise IllegalOperationException("Modular inputs are not supported before Splunk version 5.") + raise IllegalOperationException("Modular inputs are not supported before Splunk version 5.") @property def storage_passwords(self): @@ -926,7 +924,6 @@ def __init__(self, service, path, **kwargs): self._state = None if not kwargs.get('skip_refresh', False): self.refresh(kwargs.get('state', None)) # "Prefresh" - return def __contains__(self, item): try: @@ -963,10 +960,9 @@ def __getattr__(self, key): # case we try to find it in self.content and then self.defaults. if key in self.state.content: return self.state.content[key] - elif key in self.defaults: + if key in self.defaults: return self.defaults[key] - else: - raise AttributeError(key) + raise AttributeError(key) def __getitem__(self, key): # getattr attempts to find a field on the object in the normal way, @@ -1022,8 +1018,7 @@ def _proper_namespace(self, owner=None, app=None, sharing=None): return (self._state.access.owner, self._state.access.app, self._state.access.sharing) - else: - return (self.service.namespace['owner'], + return (self.service.namespace['owner'], self.service.namespace['app'], self.service.namespace['sharing']) else: @@ -1272,15 +1267,13 @@ def __getitem__(self, key): if len(entries) > 1: raise AmbiguousReferenceException( f"Found multiple entities named '{key}'; please specify a namespace.") - elif len(entries) == 0: + if len(entries) == 0: raise KeyError(key) - else: - return entries[0] + return entries[0] except HTTPError as he: if he.status == 404: # No entity matching key and namespace. raise KeyError(key) - else: - raise + raise def __iter__(self, **kwargs): """Iterate over the entities in the collection. @@ -1344,10 +1337,9 @@ def _entity_path(self, state): raw_path = parse.unquote(state.links.alternate) if 'servicesNS/' in raw_path: return _trailing(raw_path, 'servicesNS/', '/', '/') - elif 'services/' in raw_path: + if 'services/' in raw_path: return _trailing(raw_path, 'services/') - else: - return raw_path + return raw_path def _load_list(self, response): """Converts *response* to a list of entities. @@ -1596,14 +1588,13 @@ def create(self, name, **params): # This endpoint doesn't return the content of the new # item. We have to go fetch it ourselves. return self[name] - else: - entry = atom.entry - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - self._entity_path(state), - state=state) - return entity + entry = atom.entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + self._entity_path(state), + state=state) + return entity def delete(self, name, **params): """Deletes a specified entity from the collection. @@ -1644,8 +1635,7 @@ def delete(self, name, **params): # KeyError. if he.status == 404: raise KeyError(f"No such entity {name}") - else: - raise + raise return self def get(self, name="", owner=None, app=None, sharing=None, **query): @@ -1749,8 +1739,7 @@ def __contains__(self, key): except HTTPError as he: if he.status == 404: # No entity matching key return False - else: - raise + raise def create(self, name): """ Creates a configuration file named *name*. @@ -1771,10 +1760,9 @@ def create(self, name): response = self.post(__conf=name) if response.status == 303: return self[name] - elif response.status == 201: + if response.status == 201: return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) - else: - raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") def delete(self, key): """Raises `IllegalOperationException`.""" @@ -2203,7 +2191,7 @@ def update(self, **kwargs): if 'restrictToHost' in kwargs: raise IllegalOperationException("Cannot set restrictToHost on an existing input with the SDK.") - elif 'restrictToHost' in self._state.content and self.kind != 'udp': + if 'restrictToHost' in self._state.content and self.kind != 'udp': to_update['restrictToHost'] = self._state.content['restrictToHost'] # Do the actual update operation. @@ -2235,10 +2223,9 @@ def __getitem__(self, key): entries = self._load_list(response) if len(entries) > 1: raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") - elif len(entries) == 0: + if len(entries) == 0: raise KeyError((key, kind)) - else: - return entries[0] + return entries[0] except HTTPError as he: if he.status == 404: # No entity matching kind and key raise KeyError((key, kind)) diff --git a/splunklib/data.py b/splunklib/data.py index 14e8a793..c889ff9b 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -88,10 +88,9 @@ def load(text, match=None): count = len(items) if count == 0: return None - elif count == 1: + if count == 1: return load_root(items[0], nametable) - else: - return [load_root(item, nametable) for item in items] + return [load_root(item, nametable) for item in items] # Load the attributes of the given element. diff --git a/splunklib/modularinput/argument.py b/splunklib/modularinput/argument.py index 645ce330..f16ea99e 100644 --- a/splunklib/modularinput/argument.py +++ b/splunklib/modularinput/argument.py @@ -18,7 +18,7 @@ class Argument: """Class representing an argument to a modular input kind. - ``Argument`` is meant to be used with ``Scheme`` to generate an XML + ``Argument`` is meant to be used with ``Scheme`` to generate an XML definition of the modular input kind that Splunk understands. ``name`` is the only required parameter for the constructor. diff --git a/splunklib/modularinput/event.py b/splunklib/modularinput/event.py index 93759b06..6a9fba93 100644 --- a/splunklib/modularinput/event.py +++ b/splunklib/modularinput/event.py @@ -14,7 +14,8 @@ from io import TextIOBase import xml.etree.ElementTree as ET -from splunklib.six import ensure_text + +from splunklib import ensure_text class Event: diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py index 75a96a68..5aa83d96 100755 --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -14,7 +14,7 @@ import sys -from splunklib.six import ensure_str +from splunklib import ensure_str from .event import ET diff --git a/splunklib/modularinput/script.py b/splunklib/modularinput/script.py index 1502774a..2cac0011 100644 --- a/splunklib/modularinput/script.py +++ b/splunklib/modularinput/script.py @@ -68,7 +68,7 @@ def run_script(self, args, event_writer, input_stream): event_writer.close() return 0 - elif str(args[1]).lower() == "--scheme": + if str(args[1]).lower() == "--scheme": # Splunk has requested XML specifying the scheme for this # modular input Return it and exit. scheme = self.get_scheme() @@ -77,9 +77,8 @@ def run_script(self, args, event_writer, input_stream): EventWriter.FATAL, "Modular input script returned a null scheme.") return 1 - else: - event_writer.write_xml_document(scheme.to_xml()) - return 0 + event_writer.write_xml_document(scheme.to_xml()) + return 0 elif args[1].lower() == "--validate-arguments": validation_definition = ValidationDefinition.parse(input_stream) diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index 57f00330..923dae04 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -45,14 +45,13 @@ def xml_compare(expected, found): if (expected.text is None or expected.text.strip() == "") \ and (found.text is None or found.text.strip() == ""): return True - else: - return expected.tag == found.tag and expected.text == found.text \ + return expected.tag == found.tag and expected.text == found.text \ and expected.attrib == found.attrib def parse_parameters(param_node): if param_node.tag == "param": return param_node.text - elif param_node.tag == "param_list": + if param_node.tag == "param_list": parameters = [] for mvp in param_node: parameters.append(mvp.text) diff --git a/splunklib/results.py b/splunklib/results.py index f9b976cc..4a20b266 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -34,7 +34,6 @@ from io import BufferedReader, BytesIO -from splunklib.six import deprecated import xml.etree.ElementTree as et @@ -48,7 +47,7 @@ ] -class Message(object): +class Message: """This class represents informational messages that Splunk interleaves in the results stream. ``Message`` takes two arguments: a string giving the message type (e.g., "DEBUG"), and @@ -73,7 +72,7 @@ def __hash__(self): return hash((self.type, self.message)) -class _ConcatenatedStream(object): +class _ConcatenatedStream: """Lazily concatenate zero or more streams into a stream. As you read from the concatenated stream, you get characters from @@ -105,7 +104,7 @@ def read(self, n=None): return response -class _XMLDTDFilter(object): +class _XMLDTDFilter: """Lazily remove all XML DTDs from a stream. All substrings matching the regular expression ]*> are @@ -132,7 +131,7 @@ def read(self, n=None): c = self.stream.read(1) if c == b"": break - elif c == b"<": + if c == b"<": c += self.stream.read(1) if c == b"`_ 3. `Configure seach assistant with searchbnf.conf `_ - + 4. `Control search distribution with distsearch.conf `_ """ -from __future__ import absolute_import, division, print_function, unicode_literals - from .environment import * from .decorators import * from .validators import * diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index d8b3f48c..01159d50 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -14,19 +14,16 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals -from splunklib import six from collections import OrderedDict # must be python 2.7 - from inspect import getmembers, isclass, isfunction -from splunklib.six.moves import map as imap + from .internals import ConfigurationSettingsType, json_encode_string from .validators import OptionName -class Configuration(object): +class Configuration: """ Defines the configuration settings for a search command. Documents, validates, and ensures that only relevant configuration settings are applied. Adds a :code:`name` class @@ -69,7 +66,7 @@ def __call__(self, o): name = o.__name__ if name.endswith('Command'): name = name[:-len('Command')] - o.name = six.text_type(name.lower()) + o.name = str(name.lower()) # Construct ConfigurationSettings instance for the command class @@ -82,7 +79,7 @@ def __call__(self, o): o.ConfigurationSettings.fix_up(o) Option.fix_up(o) else: - raise TypeError('Incorrect usage: Configuration decorator applied to {0}'.format(type(o), o.__name__)) + raise TypeError(f'Incorrect usage: Configuration decorator applied to {type(o)}') return o @@ -136,7 +133,7 @@ def fix_up(cls, values): for name, setting in definitions: if setting._name is None: - setting._name = name = six.text_type(name) + setting._name = name = str(name) else: name = setting._name @@ -187,14 +184,14 @@ def is_supported_by_protocol(version): continue if setting.fset is None: - raise ValueError('The value of configuration setting {} is fixed'.format(name)) + raise ValueError(f'The value of configuration setting {name} is fixed') setattr(cls, backing_field_name, validate(specification, name, value)) del values[name] if len(values) > 0: - settings = sorted(list(six.iteritems(values))) - settings = imap(lambda n_v: '{}={}'.format(n_v[0], repr(n_v[1])), settings) + settings = sorted(list(values.items())) + settings = map(lambda n_v: f'{n_v[0]}={n_v[1]}', settings) raise AttributeError('Inapplicable configuration settings: ' + ', '.join(settings)) cls.configuration_setting_definitions = definitions @@ -212,7 +209,7 @@ def _get_specification(self): try: specification = ConfigurationSettingsType.specification_matrix[name] except KeyError: - raise AttributeError('Unknown configuration setting: {}={}'.format(name, repr(self._value))) + raise AttributeError(f'Unknown configuration setting: {name}={repr(self._value)}') return ConfigurationSettingsType.validate_configuration_setting, specification @@ -346,7 +343,7 @@ def _copy_extra_attributes(self, other): # region Types - class Item(object): + class Item: """ Presents an instance/class view over a search command `Option`. This class is used by SearchCommand.process to parse and report on option values. @@ -357,7 +354,7 @@ def __init__(self, command, option): self._option = option self._is_set = False validator = self.validator - self._format = six.text_type if validator is None else validator.format + self._format = str if validator is None else validator.format def __repr__(self): return '(' + repr(self.name) + ', ' + repr(self._format(self.value)) + ')' @@ -405,7 +402,6 @@ def reset(self): self._option.__set__(self._command, self._option.default) self._is_set = False - pass # endregion class View(OrderedDict): @@ -420,27 +416,26 @@ def __init__(self, command): OrderedDict.__init__(self, ((option.name, item_class(command, option)) for (name, option) in definitions)) def __repr__(self): - text = 'Option.View([' + ','.join(imap(lambda item: repr(item), six.itervalues(self))) + '])' + text = 'Option.View([' + ','.join(map(lambda item: repr(item), self.values())) + '])' return text def __str__(self): - text = ' '.join([str(item) for item in six.itervalues(self) if item.is_set]) + text = ' '.join([str(item) for item in self.values() if item.is_set]) return text # region Methods def get_missing(self): - missing = [item.name for item in six.itervalues(self) if item.is_required and not item.is_set] + missing = [item.name for item in self.values() if item.is_required and not item.is_set] return missing if len(missing) > 0 else None def reset(self): - for value in six.itervalues(self): + for value in self.values(): value.reset() - pass # endregion - pass + # endregion diff --git a/splunklib/searchcommands/environment.py b/splunklib/searchcommands/environment.py index e92018f6..2896df7b 100644 --- a/splunklib/searchcommands/environment.py +++ b/splunklib/searchcommands/environment.py @@ -14,16 +14,15 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals + from logging import getLogger, root, StreamHandler from logging.config import fileConfig -from os import chdir, environ, path -from splunklib.six.moves import getcwd - +from os import chdir, environ, path, getcwd import sys + def configure_logging(logger_name, filename=None): """ Configure logging and return the named logger and the location of the logging configuration file loaded. @@ -88,9 +87,9 @@ def configure_logging(logger_name, filename=None): found = True break if not found: - raise ValueError('Logging configuration file "{}" not found in local or default directory'.format(filename)) + raise ValueError(f'Logging configuration file "{filename}" not found in local or default directory') elif not path.exists(filename): - raise ValueError('Logging configuration file "{}" not found'.format(filename)) + raise ValueError(f'Logging configuration file "{filename}" not found') if filename is not None: global _current_logging_configuration_file diff --git a/splunklib/searchcommands/eventing_command.py b/splunklib/searchcommands/eventing_command.py index 27dc13a3..20767a32 100644 --- a/splunklib/searchcommands/eventing_command.py +++ b/splunklib/searchcommands/eventing_command.py @@ -14,10 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals -from splunklib import six -from splunklib.six.moves import map as imap from .decorators import ConfigurationSetting from .search_command import SearchCommand @@ -140,10 +137,10 @@ def fix_up(cls, command): # N.B.: Does not use Python 2 dict copy semantics def iteritems(self): iteritems = SearchCommand.ConfigurationSettings.iteritems(self) - return imap(lambda name_value: (name_value[0], 'events' if name_value[0] == 'type' else name_value[1]), iteritems) + return map(lambda name_value: (name_value[0], 'events' if name_value[0] == 'type' else name_value[1]), iteritems) # N.B.: Does not use Python 3 dict view semantics - if not six.PY2: - items = iteritems + + items = iteritems # endregion diff --git a/splunklib/searchcommands/external_search_command.py b/splunklib/searchcommands/external_search_command.py index c2306241..18fc2643 100644 --- a/splunklib/searchcommands/external_search_command.py +++ b/splunklib/searchcommands/external_search_command.py @@ -14,34 +14,31 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - from logging import getLogger import os import sys import traceback -from splunklib import six +from . import splunklib_logger as logger + if sys.platform == 'win32': from signal import signal, CTRL_BREAK_EVENT, SIGBREAK, SIGINT, SIGTERM from subprocess import Popen import atexit -from . import splunklib_logger as logger + # P1 [ ] TODO: Add ExternalSearchCommand class documentation -class ExternalSearchCommand(object): - """ - """ +class ExternalSearchCommand: def __init__(self, path, argv=None, environ=None): - if not isinstance(path, (bytes, six.text_type)): - raise ValueError('Expected a string value for path, not {}'.format(repr(path))) + if not isinstance(path, (bytes,str)): + raise ValueError(f'Expected a string value for path, not {repr(path)}') self._logger = getLogger(self.__class__.__name__) - self._path = six.text_type(path) + self._path = str(path) self._argv = None self._environ = None @@ -57,7 +54,7 @@ def argv(self): @argv.setter def argv(self, value): if not (value is None or isinstance(value, (list, tuple))): - raise ValueError('Expected a list, tuple or value of None for argv, not {}'.format(repr(value))) + raise ValueError(f'Expected a list, tuple or value of None for argv, not {repr(value)}') self._argv = value @property @@ -67,7 +64,7 @@ def environ(self): @environ.setter def environ(self, value): if not (value is None or isinstance(value, dict)): - raise ValueError('Expected a dictionary value for environ, not {}'.format(repr(value))) + raise ValueError(f'Expected a dictionary value for environ, not {repr(value)}') self._environ = value @property @@ -90,7 +87,7 @@ def execute(self): self._execute(self._path, self._argv, self._environ) except: error_type, error, tb = sys.exc_info() - message = 'Command execution failed: ' + six.text_type(error) + message = 'Command execution failed: ' + str(error) self._logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) sys.exit(1) @@ -120,13 +117,13 @@ def _execute(path, argv=None, environ=None): found = ExternalSearchCommand._search_path(path, search_path) if found is None: - raise ValueError('Cannot find command on path: {}'.format(path)) + raise ValueError(f'Cannot find command on path: {path}') path = found - logger.debug('starting command="%s", arguments=%s', path, argv) + logger.debug(f'starting command="{path}", arguments={path}') - def terminate(signal_number, frame): - sys.exit('External search command is terminating on receipt of signal={}.'.format(signal_number)) + def terminate(signal_number): + sys.exit(f'External search command is terminating on receipt of signal={signal_number}.') def terminate_child(): if p.pid is not None and p.returncode is None: @@ -206,7 +203,6 @@ def _execute(path, argv, environ): os.execvp(path, argv) else: os.execvpe(path, argv, environ) - return # endregion diff --git a/splunklib/searchcommands/generating_command.py b/splunklib/searchcommands/generating_command.py index 6a75d2c2..bf2527d9 100644 --- a/splunklib/searchcommands/generating_command.py +++ b/splunklib/searchcommands/generating_command.py @@ -20,8 +20,6 @@ from .decorators import ConfigurationSetting from .search_command import SearchCommand -from splunklib import six -from splunklib.six.moves import map as imap, filter as ifilter # P1 [O] TODO: Discuss generates_timeorder in the class-level documentation for GeneratingCommand @@ -254,8 +252,7 @@ def process(self, argv=sys.argv, ifile=sys.stdin, ofile=sys.stdout, allow_empty_ if not allow_empty_input: raise ValueError("allow_empty_input cannot be False for Generating Commands") - else: - return super(GeneratingCommand, self).process(argv=argv, ifile=ifile, ofile=ofile, allow_empty_input=True) + return super().process(argv=argv, ifile=ifile, ofile=ofile, allow_empty_input=True) # endregion @@ -370,18 +367,15 @@ def iteritems(self): iteritems = SearchCommand.ConfigurationSettings.iteritems(self) version = self.command.protocol_version if version == 2: - iteritems = ifilter(lambda name_value1: name_value1[0] != 'distributed', iteritems) + iteritems = filter(lambda name_value1: name_value1[0] != 'distributed', iteritems) if not self.distributed and self.type == 'streaming': - iteritems = imap( + iteritems = map( lambda name_value: (name_value[0], 'stateful') if name_value[0] == 'type' else (name_value[0], name_value[1]), iteritems) return iteritems # N.B.: Does not use Python 3 dict view semantics - if not six.PY2: - items = iteritems + items = iteritems - pass # endregion - pass # endregion diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index 1ea2833d..db8d363c 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -14,25 +14,22 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function - -from io import TextIOWrapper -from collections import deque, namedtuple -from splunklib import six -from collections import OrderedDict -from splunklib.six.moves import StringIO -from itertools import chain -from splunklib.six.moves import map as imap -from json import JSONDecoder, JSONEncoder -from json.encoder import encode_basestring_ascii as json_encode_string -from splunklib.six.moves import urllib - import csv import gzip import os import re import sys import warnings +import urllib +from io import TextIOWrapper, StringIO +from collections import deque, namedtuple +from collections import OrderedDict +from itertools import chain +from json import JSONDecoder, JSONEncoder +from json.encoder import encode_basestring_ascii as json_encode_string + + + from . import environment @@ -54,7 +51,7 @@ def set_binary_mode(fh): if sys.version_info >= (3, 0) and hasattr(fh, 'buffer'): return fh.buffer # check for python3 - elif sys.version_info >= (3, 0): + if sys.version_info >= (3, 0): pass # check for windows python2. SPL-175233 -- python3 stdout is already binary elif sys.platform == 'win32': @@ -65,13 +62,12 @@ def set_binary_mode(fh): implementation = python_implementation() if implementation == 'PyPy': return os.fdopen(fh.fileno(), 'wb', 0) - else: - import msvcrt - msvcrt.setmode(fh.fileno(), os.O_BINARY) + import msvcrt + msvcrt.setmode(fh.fileno(), os.O_BINARY) return fh -class CommandLineParser(object): +class CommandLineParser: r""" Parses the arguments to a search command. A search command line is described by the following syntax. @@ -144,7 +140,7 @@ def parse(cls, command, argv): command_args = cls._arguments_re.match(argv) if command_args is None: - raise SyntaxError('Syntax error: {}'.format(argv)) + raise SyntaxError(f'Syntax error: {argv}') # Parse options @@ -152,7 +148,7 @@ def parse(cls, command, argv): name, value = option.group('name'), option.group('value') if name not in command.options: raise ValueError( - 'Unrecognized {} command option: {}={}'.format(command.name, name, json_encode_string(value))) + f'Unrecognized {command.name} command option: {name}={json_encode_string(value)}') command.options[name].value = cls.unquote(value) missing = command.options.get_missing() @@ -160,8 +156,8 @@ def parse(cls, command, argv): if missing is not None: if len(missing) > 1: raise ValueError( - 'Values for these {} command options are required: {}'.format(command.name, ', '.join(missing))) - raise ValueError('A value for {} command option {} is required'.format(command.name, missing[0])) + f'Values for these {command.name} command options are required: {", ".join(missing)}') + raise ValueError(f'A value for {command.name} command option {missing[0]} is required') # Parse field names @@ -277,10 +273,10 @@ def validate_configuration_setting(specification, name, value): if isinstance(specification.type, type): type_names = specification.type.__name__ else: - type_names = ', '.join(imap(lambda t: t.__name__, specification.type)) - raise ValueError('Expected {} value, not {}={}'.format(type_names, name, repr(value))) + type_names = ', '.join(map(lambda t: t.__name__, specification.type)) + raise ValueError(f'Expected {type_names} value, not {name}={repr(value)}') if specification.constraint and not specification.constraint(value): - raise ValueError('Illegal value: {}={}'.format(name, repr(value))) + raise ValueError(f'Illegal value: {name}={ repr(value)}') return value specification = namedtuple( @@ -314,7 +310,7 @@ def validate_configuration_setting(specification, name, value): supporting_protocols=[1]), 'maxinputs': specification( type=int, - constraint=lambda value: 0 <= value <= six.MAXSIZE, + constraint=lambda value: 0 <= value <= sys.maxsize, supporting_protocols=[2]), 'overrides_timeorder': specification( type=bool, @@ -341,11 +337,11 @@ def validate_configuration_setting(specification, name, value): constraint=None, supporting_protocols=[1]), 'streaming_preop': specification( - type=(bytes, six.text_type), + type=(bytes, str), constraint=None, supporting_protocols=[1, 2]), 'type': specification( - type=(bytes, six.text_type), + type=(bytes, str), constraint=lambda value: value in ('events', 'reporting', 'streaming'), supporting_protocols=[2])} @@ -368,7 +364,7 @@ class InputHeader(dict): """ def __str__(self): - return '\n'.join([name + ':' + value for name, value in six.iteritems(self)]) + return '\n'.join([name + ':' + value for name, value in self.items()]) def read(self, ifile): """ Reads an input header from an input file. @@ -416,7 +412,7 @@ def _object_hook(dictionary): while len(stack): instance, member_name, dictionary = stack.popleft() - for name, value in six.iteritems(dictionary): + for name, value in dictionary.items(): if isinstance(value, dict): stack.append((dictionary, name, value)) @@ -437,7 +433,7 @@ def default(self, o): _separators = (',', ':') -class ObjectView(object): +class ObjectView: def __init__(self, dictionary): self.__dict__ = dictionary @@ -449,7 +445,7 @@ def __str__(self): return str(self.__dict__) -class Recorder(object): +class Recorder: def __init__(self, path, f): self._recording = gzip.open(path + '.gz', 'wb') @@ -487,7 +483,7 @@ def write(self, text): self._recording.flush() -class RecordWriter(object): +class RecordWriter: def __init__(self, ofile, maxresultrows=None): self._maxresultrows = 50000 if maxresultrows is None else maxresultrows @@ -513,7 +509,7 @@ def is_flushed(self): @is_flushed.setter def is_flushed(self, value): - self._flushed = True if value else False + self._flushed = bool(value) @property def ofile(self): @@ -593,7 +589,7 @@ def _write_record(self, record): if fieldnames is None: self._fieldnames = fieldnames = list(record.keys()) self._fieldnames.extend([i for i in self.custom_fields if i not in self._fieldnames]) - value_list = imap(lambda fn: (str(fn), str('__mv_') + str(fn)), fieldnames) + value_list = map(lambda fn: (str(fn), str('__mv_') + str(fn)), fieldnames) self._writerow(list(chain.from_iterable(value_list))) get_value = record.get @@ -632,9 +628,9 @@ def _write_record(self, record): if value_t is bool: value = str(value.real) - elif value_t is six.text_type: - value = value - elif isinstance(value, six.integer_types) or value_t is float or value_t is complex: + elif value_t is str: + value = str(value) + elif isinstance(value, int) or value_t is float or value_t is complex: value = str(value) elif issubclass(value_t, (dict, list, tuple)): value = str(''.join(RecordWriter._iterencode_json(value, 0))) @@ -658,13 +654,11 @@ def _write_record(self, record): values += (value, None) continue - if value_t is six.text_type: - if six.PY2: - value = value.encode('utf-8') + if value_t is str: values += (value, None) continue - if isinstance(value, six.integer_types) or value_t is float or value_t is complex: + if isinstance(value, int) or value_t is float or value_t is complex: values += (str(value), None) continue @@ -799,14 +793,14 @@ def write_chunk(self, finished=None): if len(inspector) == 0: inspector = None - metadata = [item for item in (('inspector', inspector), ('finished', finished))] + metadata = [('inspector', inspector), ('finished', finished)] self._write_chunk(metadata, self._buffer.getvalue()) self._clear() def write_metadata(self, configuration): self._ensure_validity() - metadata = chain(six.iteritems(configuration), (('inspector', self._inspector if self._inspector else None),)) + metadata = chain(configuration.items(), (('inspector', self._inspector if self._inspector else None),)) self._write_chunk(metadata, '') self.write('\n') self._clear() @@ -816,13 +810,13 @@ def write_metric(self, name, value): self._inspector['metric.' + name] = value def _clear(self): - super(RecordWriterV2, self)._clear() + super()._clear() self._fieldnames = None def _write_chunk(self, metadata, body): if metadata: - metadata = str(''.join(self._iterencode_json(dict([(n, v) for n, v in metadata if v is not None]), 0))) + metadata = str(''.join(self._iterencode_json(dict((n, v) for n, v in metadata if v is not None), 0))) if sys.version_info >= (3, 0): metadata = metadata.encode('utf-8') metadata_length = len(metadata) @@ -836,7 +830,7 @@ def _write_chunk(self, metadata, body): if not (metadata_length > 0 or body_length > 0): return - start_line = 'chunked 1.0,%s,%s\n' % (metadata_length, body_length) + start_line = f'chunked 1.0,{metadata_length},{body_length}\n' self.write(start_line) self.write(metadata) self.write(body) diff --git a/splunklib/searchcommands/reporting_command.py b/splunklib/searchcommands/reporting_command.py index 94708619..3551f4cd 100644 --- a/splunklib/searchcommands/reporting_command.py +++ b/splunklib/searchcommands/reporting_command.py @@ -14,8 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - from itertools import chain from .internals import ConfigurationSettingsType, json_encode_string @@ -23,7 +21,6 @@ from .streaming_command import StreamingCommand from .search_command import SearchCommand from .validators import Set -from splunklib import six class ReportingCommand(SearchCommand): @@ -94,7 +91,7 @@ def prepare(self): self._configuration.streaming_preop = ' '.join(streaming_preop) return - raise RuntimeError('Unrecognized reporting command phase: {}'.format(json_encode_string(six.text_type(phase)))) + raise RuntimeError(f'Unrecognized reporting command phase: {json_encode_string(str(phase))}') def reduce(self, records): """ Override this method to produce a reporting data structure. @@ -244,7 +241,7 @@ def fix_up(cls, command): """ if not issubclass(command, ReportingCommand): - raise TypeError('{} is not a ReportingCommand'.format( command)) + raise TypeError(f'{command} is not a ReportingCommand') if command.reduce == ReportingCommand.reduce: raise AttributeError('No ReportingCommand.reduce override') @@ -274,8 +271,7 @@ def fix_up(cls, command): ConfigurationSetting.fix_up(f.ConfigurationSettings, settings) del f._settings - pass + # endregion - pass # endregion diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index dd11391d..dd7a8989 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -14,44 +14,30 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - # Absolute imports -from collections import namedtuple - +import csv import io - -from collections import OrderedDict +import os +import re +import sys +import tempfile +import traceback +from collections import namedtuple, OrderedDict from copy import deepcopy -from splunklib.six.moves import StringIO +from io import StringIO from itertools import chain, islice -from splunklib.six.moves import filter as ifilter, map as imap, zip as izip -from splunklib import six -if six.PY2: - from logging import _levelNames, getLevelName, getLogger -else: - from logging import _nameToLevel as _levelNames, getLevelName, getLogger -try: - from shutil import make_archive -except ImportError: - # Used for recording, skip on python 2.6 - pass +from logging import _nameToLevel as _levelNames, getLevelName, getLogger +from shutil import make_archive from time import time -from splunklib.six.moves.urllib.parse import unquote -from splunklib.six.moves.urllib.parse import urlsplit +from urllib.parse import unquote +from urllib.parse import urlsplit from warnings import warn from xml.etree import ElementTree -import os -import sys -import re -import csv -import tempfile -import traceback - # Relative imports - +import splunklib +from . import Boolean, Option, environment from .internals import ( CommandLineParser, CsvDialect, @@ -64,8 +50,6 @@ RecordWriterV1, RecordWriterV2, json_encode_string) - -from . import Boolean, Option, environment from ..client import Service @@ -91,7 +75,7 @@ # P2 [ ] TODO: Consider bumping None formatting up to Option.Item.__str__ -class SearchCommand(object): +class SearchCommand: """ Represents a custom search command. """ @@ -158,16 +142,16 @@ def logging_level(self): def logging_level(self, value): if value is None: value = self._default_logging_level - if isinstance(value, (bytes, six.text_type)): + if isinstance(value, (bytes, str)): try: level = _levelNames[value.upper()] except KeyError: - raise ValueError('Unrecognized logging level: {}'.format(value)) + raise ValueError(f'Unrecognized logging level: {value}') else: try: level = int(value) except ValueError: - raise ValueError('Unrecognized logging level: {}'.format(value)) + raise ValueError(f'Unrecognized logging level: {value}') self._logger.setLevel(level) def add_field(self, current_record, field_name, field_value): @@ -291,7 +275,7 @@ def search_results_info(self): values = next(reader) except IOError as error: if error.errno == 2: - self.logger.error('Search results info file {} does not exist.'.format(json_encode_string(path))) + self.logger.error(f'Search results info file {json_encode_string(path)} does not exist.') return raise @@ -306,7 +290,7 @@ def convert_value(value): except ValueError: return value - info = ObjectView(dict(imap(lambda f_v: (convert_field(f_v[0]), convert_value(f_v[1])), izip(fields, values)))) + info = ObjectView(dict(map(lambda f_v: (convert_field(f_v[0]), convert_value(f_v[1])), zip(fields, values)))) try: count_map = info.countMap @@ -315,7 +299,7 @@ def convert_value(value): else: count_map = count_map.split(';') n = len(count_map) - info.countMap = dict(izip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2))) + info.countMap = dict(zip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2))) try: msg_type = info.msgType @@ -323,7 +307,7 @@ def convert_value(value): except AttributeError: pass else: - messages = ifilter(lambda t_m: t_m[0] or t_m[1], izip(msg_type.split('\n'), msg_text.split('\n'))) + messages = filter(lambda t_m: t_m[0] or t_m[1], zip(msg_type.split('\n'), msg_text.split('\n'))) info.msg = [Message(message) for message in messages] del info.msgType @@ -417,7 +401,6 @@ def prepare(self): :rtype: NoneType """ - pass def process(self, argv=sys.argv, ifile=sys.stdin, ofile=sys.stdout, allow_empty_input=True): """ Process data. @@ -466,7 +449,7 @@ def _map_metadata(self, argv): def _map(metadata_map): metadata = {} - for name, value in six.iteritems(metadata_map): + for name, value in metadata_map.items(): if isinstance(value, dict): value = _map(value) else: @@ -485,7 +468,8 @@ def _map(metadata_map): _metadata_map = { 'action': - (lambda v: 'getinfo' if v == '__GETINFO__' else 'execute' if v == '__EXECUTE__' else None, lambda s: s.argv[1]), + (lambda v: 'getinfo' if v == '__GETINFO__' else 'execute' if v == '__EXECUTE__' else None, + lambda s: s.argv[1]), 'preview': (bool, lambda s: s.input_header.get('preview')), 'searchinfo': { @@ -533,7 +517,7 @@ def _prepare_protocol_v1(self, argv, ifile, ofile): try: tempfile.tempdir = self._metadata.searchinfo.dispatch_dir except AttributeError: - raise RuntimeError('{}.metadata.searchinfo.dispatch_dir is undefined'.format(self.__class__.__name__)) + raise RuntimeError(f'{self.__class__.__name__}.metadata.searchinfo.dispatch_dir is undefined') debug(' tempfile.tempdir=%r', tempfile.tempdir) @@ -603,7 +587,7 @@ def _process_protocol_v1(self, argv, ifile, ofile): ifile = self._prepare_protocol_v1(argv, ifile, ofile) self._record_writer.write_record(dict( - (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in six.iteritems(self._configuration))) + (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in self._configuration.items())) self.finish() elif argv[1] == '__EXECUTE__': @@ -617,21 +601,21 @@ def _process_protocol_v1(self, argv, ifile, ofile): else: message = ( - 'Command {0} appears to be statically configured for search command protocol version 1 and static ' + f'Command {self.name} appears to be statically configured for search command protocol version 1 and static ' 'configuration is unsupported by splunklib.searchcommands. Please ensure that ' 'default/commands.conf contains this stanza:\n' - '[{0}]\n' - 'filename = {1}\n' + f'[{self.name}]\n' + f'filename = {os.path.basename(argv[0])}\n' 'enableheader = true\n' 'outputheader = true\n' 'requires_srinfo = true\n' 'supports_getinfo = true\n' 'supports_multivalues = true\n' - 'supports_rawargs = true'.format(self.name, os.path.basename(argv[0]))) + 'supports_rawargs = true') raise RuntimeError(message) except (SyntaxError, ValueError) as error: - self.write_error(six.text_type(error)) + self.write_error(str(error)) self.flush() exit(0) @@ -686,7 +670,7 @@ def _process_protocol_v2(self, argv, ifile, ofile): action = getattr(metadata, 'action', None) if action != 'getinfo': - raise RuntimeError('Expected getinfo action, not {}'.format(action)) + raise RuntimeError(f'Expected getinfo action, not {action}') if len(body) > 0: raise RuntimeError('Did not expect data for getinfo action') @@ -706,7 +690,7 @@ def _process_protocol_v2(self, argv, ifile, ofile): try: tempfile.tempdir = self._metadata.searchinfo.dispatch_dir except AttributeError: - raise RuntimeError('%s.metadata.searchinfo.dispatch_dir is undefined'.format(class_name)) + raise RuntimeError(f'{class_name}.metadata.searchinfo.dispatch_dir is undefined') debug(' tempfile.tempdir=%r', tempfile.tempdir) except: @@ -727,7 +711,7 @@ def _process_protocol_v2(self, argv, ifile, ofile): debug('Parsing arguments') - if args and type(args) == list: + if args and isinstance(args, list): for arg in args: result = self._protocol_v2_option_parser(arg) if len(result) == 1: @@ -738,13 +722,13 @@ def _process_protocol_v2(self, argv, ifile, ofile): try: option = self.options[name] except KeyError: - self.write_error('Unrecognized option: {}={}'.format(name, value)) + self.write_error(f'Unrecognized option: {name}={value}') error_count += 1 continue try: option.value = value except ValueError: - self.write_error('Illegal value: {}={}'.format(name, value)) + self.write_error(f'Illegal value: {name}={value}') error_count += 1 continue @@ -752,15 +736,15 @@ def _process_protocol_v2(self, argv, ifile, ofile): if missing is not None: if len(missing) == 1: - self.write_error('A value for "{}" is required'.format(missing[0])) + self.write_error(f'A value for "{missing[0]}" is required'.format()) else: - self.write_error('Values for these required options are missing: {}'.format(', '.join(missing))) + self.write_error(f'Values for these required options are missing: {", ".join(missing)}') error_count += 1 if error_count > 0: exit(1) - debug(' command: %s', six.text_type(self)) + debug(' command: %s', str(self)) debug('Preparing for execution') self.prepare() @@ -778,7 +762,7 @@ def _process_protocol_v2(self, argv, ifile, ofile): setattr(info, attr, [arg for arg in getattr(info, attr) if not arg.startswith('record=')]) metadata = MetadataEncoder().encode(self._metadata) - ifile.record('chunked 1.0,', six.text_type(len(metadata)), ',0\n', metadata) + ifile.record('chunked 1.0,', str(len(metadata)), ',0\n', metadata) if self.show_configuration: self.write_info(self.name + ' command configuration: ' + str(self._configuration)) @@ -888,25 +872,25 @@ def _as_binary_stream(ifile): try: return ifile.buffer except AttributeError as error: - raise RuntimeError('Failed to get underlying buffer: {}'.format(error)) + raise RuntimeError(f'Failed to get underlying buffer: {error}') @staticmethod def _read_chunk(istream): # noinspection PyBroadException - assert isinstance(istream.read(0), six.binary_type), 'Stream must be binary' + assert isinstance(istream.read(0), bytes), 'Stream must be binary' try: header = istream.readline() except Exception as error: - raise RuntimeError('Failed to read transport header: {}'.format(error)) + raise RuntimeError(f'Failed to read transport header: {error}') if not header: return None - match = SearchCommand._header.match(six.ensure_str(header)) + match = SearchCommand._header.match(splunklib.ensure_str(header)) if match is None: - raise RuntimeError('Failed to parse transport header: {}'.format(header)) + raise RuntimeError(f'Failed to parse transport header: {header}') metadata_length, body_length = match.groups() metadata_length = int(metadata_length) @@ -915,14 +899,14 @@ def _read_chunk(istream): try: metadata = istream.read(metadata_length) except Exception as error: - raise RuntimeError('Failed to read metadata of length {}: {}'.format(metadata_length, error)) + raise RuntimeError(f'Failed to read metadata of length {metadata_length}: {error}') decoder = MetadataDecoder() try: - metadata = decoder.decode(six.ensure_str(metadata)) + metadata = decoder.decode(splunklib.ensure_str(metadata)) except Exception as error: - raise RuntimeError('Failed to parse metadata of length {}: {}'.format(metadata_length, error)) + raise RuntimeError(f'Failed to parse metadata of length {metadata_length}: {error}') # if body_length <= 0: # return metadata, '' @@ -932,9 +916,9 @@ def _read_chunk(istream): if body_length > 0: body = istream.read(body_length) except Exception as error: - raise RuntimeError('Failed to read body of length {}: {}'.format(body_length, error)) + raise RuntimeError(f'Failed to read body of length {body_length}: {error}') - return metadata, six.ensure_str(body) + return metadata, splunklib.ensure_str(body) _header = re.compile(r'chunked\s+1.0\s*,\s*(\d+)\s*,\s*(\d+)\s*\n') @@ -949,16 +933,16 @@ def _read_csv_records(self, ifile): except StopIteration: return - mv_fieldnames = dict([(name, name[len('__mv_'):]) for name in fieldnames if name.startswith('__mv_')]) + mv_fieldnames = dict((name, name[len('__mv_'):]) for name in fieldnames if name.startswith('__mv_')) if len(mv_fieldnames) == 0: for values in reader: - yield OrderedDict(izip(fieldnames, values)) + yield OrderedDict(zip(fieldnames, values)) return for values in reader: record = OrderedDict() - for fieldname, value in izip(fieldnames, values): + for fieldname, value in zip(fieldnames, values): if fieldname.startswith('__mv_'): if len(value) > 0: record[mv_fieldnames[fieldname]] = self._decode_list(value) @@ -978,7 +962,7 @@ def _execute_v2(self, ifile, process): metadata, body = result action = getattr(metadata, 'action', None) if action != 'execute': - raise RuntimeError('Expected execute action, not {}'.format(action)) + raise RuntimeError(f'Expected execute action, not {action}') self._finished = getattr(metadata, 'finished', False) self._record_writer.is_flushed = False @@ -988,15 +972,15 @@ def _execute_v2(self, ifile, process): self._record_writer.write_chunk(finished=self._finished) def _execute_chunk_v2(self, process, chunk): - metadata, body = chunk + metadata, body = chunk - if len(body) <= 0 and not self._allow_empty_input: - raise ValueError( - "No records found to process. Set allow_empty_input=True in dispatch function to move forward " - "with empty records.") + if len(body) <= 0 and not self._allow_empty_input: + raise ValueError( + "No records found to process. Set allow_empty_input=True in dispatch function to move forward " + "with empty records.") - records = self._read_csv_records(StringIO(body)) - self._record_writer.write_records(process(records)) + records = self._read_csv_records(StringIO(body)) + self._record_writer.write_records(process(records)) def _report_unexpected_error(self): @@ -1008,7 +992,7 @@ def _report_unexpected_error(self): filename = origin.tb_frame.f_code.co_filename lineno = origin.tb_lineno - message = '{0} at "{1}", line {2:d} : {3}'.format(error_type.__name__, filename, lineno, error) + message = f'{error_type.__name__} at "{filename}", line {str(lineno)} : {error}' environment.splunklib_logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) self.write_error(message) @@ -1017,10 +1001,11 @@ def _report_unexpected_error(self): # region Types - class ConfigurationSettings(object): + class ConfigurationSettings: """ Represents the configuration settings common to all :class:`SearchCommand` classes. """ + def __init__(self, command): self.command = command @@ -1034,7 +1019,7 @@ def __repr__(self): """ definitions = type(self).configuration_setting_definitions - settings = imap( + settings = map( lambda setting: repr((setting.name, setting.__get__(self), setting.supporting_protocols)), definitions) return '[' + ', '.join(settings) + ']' @@ -1047,8 +1032,8 @@ def __str__(self): :return: String representation of this instance """ - #text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) - text = ', '.join(['{}={}'.format(name, json_encode_string(six.text_type(value))) for (name, value) in six.iteritems(self)]) + # text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) + text = ', '.join([f'{name}={json_encode_string(str(value))}' for (name, value) in self.items()]) return text # region Methods @@ -1072,24 +1057,25 @@ def fix_up(cls, command_class): def iteritems(self): definitions = type(self).configuration_setting_definitions version = self.command.protocol_version - return ifilter( - lambda name_value1: name_value1[1] is not None, imap( - lambda setting: (setting.name, setting.__get__(self)), ifilter( + return filter( + lambda name_value1: name_value1[1] is not None, map( + lambda setting: (setting.name, setting.__get__(self)), filter( lambda setting: setting.is_supported_by_protocol(version), definitions))) # N.B.: Does not use Python 3 dict view semantics - if not six.PY2: - items = iteritems - pass # endregion + items = iteritems - pass # endregion + # endregion + + # endregion SearchMetric = namedtuple('SearchMetric', ('elapsed_seconds', 'invocation_count', 'input_count', 'output_count')) -def dispatch(command_class, argv=sys.argv, input_file=sys.stdin, output_file=sys.stdout, module_name=None, allow_empty_input=True): +def dispatch(command_class, argv=sys.argv, input_file=sys.stdin, output_file=sys.stdout, module_name=None, + allow_empty_input=True): """ Instantiates and executes a search command class This function implements a `conditional script stanza `_ based on the value of diff --git a/splunklib/searchcommands/streaming_command.py b/splunklib/searchcommands/streaming_command.py index fa075edb..40d69cb6 100644 --- a/splunklib/searchcommands/streaming_command.py +++ b/splunklib/searchcommands/streaming_command.py @@ -14,10 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - -from splunklib import six -from splunklib.six.moves import map as imap, filter as ifilter from .decorators import ConfigurationSetting from .search_command import SearchCommand @@ -171,7 +167,6 @@ def fix_up(cls, command): """ if command.stream == StreamingCommand.stream: raise AttributeError('No StreamingCommand.stream override') - return # TODO: Stop looking like a dictionary because we don't obey the semantics # N.B.: Does not use Python 2 dict copy semantics @@ -180,16 +175,15 @@ def iteritems(self): version = self.command.protocol_version if version == 1: if self.required_fields is None: - iteritems = ifilter(lambda name_value: name_value[0] != 'clear_required_fields', iteritems) + iteritems = filter(lambda name_value: name_value[0] != 'clear_required_fields', iteritems) else: - iteritems = ifilter(lambda name_value2: name_value2[0] != 'distributed', iteritems) + iteritems = filter(lambda name_value2: name_value2[0] != 'distributed', iteritems) if not self.distributed: - iteritems = imap( + iteritems = map( lambda name_value1: (name_value1[0], 'stateful') if name_value1[0] == 'type' else (name_value1[0], name_value1[1]), iteritems) return iteritems # N.B.: Does not use Python 3 dict view semantics - if not six.PY2: - items = iteritems + items = iteritems # endregion diff --git a/splunklib/searchcommands/validators.py b/splunklib/searchcommands/validators.py index 22f0e16b..ef460a4b 100644 --- a/splunklib/searchcommands/validators.py +++ b/splunklib/searchcommands/validators.py @@ -14,20 +14,17 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - -from json.encoder import encode_basestring_ascii as json_encode_string -from collections import namedtuple -from splunklib.six.moves import StringIO -from io import open import csv import os import re -from splunklib import six -from splunklib.six.moves import getcwd +from io import open, StringIO +from os import getcwd +from json.encoder import encode_basestring_ascii as json_encode_string +from collections import namedtuple -class Validator(object): + +class Validator: """ Base class for validators that check and format search command options. You must inherit from this class and override :code:`Validator.__call__` and @@ -60,14 +57,16 @@ class Boolean(Validator): def __call__(self, value): if not (value is None or isinstance(value, bool)): - value = six.text_type(value).lower() + value = str(value).lower() if value not in Boolean.truth_values: - raise ValueError('Unrecognized truth value: {0}'.format(value)) + raise ValueError(f'Unrecognized truth value: {value}') value = Boolean.truth_values[value] return value def format(self, value): - return None if value is None else 't' if value else 'f' + if value is None: + return None + return 't' if value else 'f' class Code(Validator): @@ -93,11 +92,11 @@ def __call__(self, value): if value is None: return None try: - return Code.object(compile(value, 'string', self._mode), six.text_type(value)) + return Code.object(compile(value, 'string', self._mode), str(value)) except (SyntaxError, TypeError) as error: message = str(error) - six.raise_from(ValueError(message), error) + raise ValueError(message) from error def format(self, value): return None if value is None else value.source @@ -113,9 +112,9 @@ class Fieldname(Validator): def __call__(self, value): if value is not None: - value = six.text_type(value) + value = str(value) if Fieldname.pattern.match(value) is None: - raise ValueError('Illegal characters in fieldname: {}'.format(value)) + raise ValueError(f'Illegal characters in fieldname: {value}') return value def format(self, value): @@ -136,7 +135,7 @@ def __call__(self, value): if value is None: return value - path = six.text_type(value) + path = str(value) if not os.path.isabs(path): path = os.path.join(self.directory, path) @@ -144,8 +143,7 @@ def __call__(self, value): try: value = open(path, self.mode) if self.buffering is None else open(path, self.mode, self.buffering) except IOError as error: - raise ValueError('Cannot open {0} with mode={1} and buffering={2}: {3}'.format( - value, self.mode, self.buffering, error)) + raise ValueError(f'Cannot open {value} with mode={self.mode} and buffering={self.buffering}: {error}') return value @@ -163,42 +161,38 @@ class Integer(Validator): def __init__(self, minimum=None, maximum=None): if minimum is not None and maximum is not None: def check_range(value): - if not (minimum <= value <= maximum): - raise ValueError('Expected integer in the range [{0},{1}], not {2}'.format(minimum, maximum, value)) - return + if not minimum <= value <= maximum: + raise ValueError(f'Expected integer in the range [{minimum},{maximum}], not {value}') + elif minimum is not None: def check_range(value): if value < minimum: - raise ValueError('Expected integer in the range [{0},+∞], not {1}'.format(minimum, value)) - return + raise ValueError(f'Expected integer in the range [{minimum},+∞], not {value}') elif maximum is not None: def check_range(value): if value > maximum: - raise ValueError('Expected integer in the range [-∞,{0}], not {1}'.format(maximum, value)) - return + raise ValueError(f'Expected integer in the range [-∞,{maximum}], not {value}') + else: def check_range(value): return self.check_range = check_range - return + def __call__(self, value): if value is None: return None try: - if six.PY2: - value = long(value) - else: - value = int(value) + value = int(value) except ValueError: - raise ValueError('Expected integer value, not {}'.format(json_encode_string(value))) + raise ValueError(f'Expected integer value, not {json_encode_string(value)}') self.check_range(value) return value def format(self, value): - return None if value is None else six.text_type(int(value)) + return None if value is None else str(int(value)) class Float(Validator): @@ -208,25 +202,21 @@ class Float(Validator): def __init__(self, minimum=None, maximum=None): if minimum is not None and maximum is not None: def check_range(value): - if not (minimum <= value <= maximum): - raise ValueError('Expected float in the range [{0},{1}], not {2}'.format(minimum, maximum, value)) - return + if not minimum <= value <= maximum: + raise ValueError(f'Expected float in the range [{minimum},{maximum}], not {value}') elif minimum is not None: def check_range(value): if value < minimum: - raise ValueError('Expected float in the range [{0},+∞], not {1}'.format(minimum, value)) - return + raise ValueError(f'Expected float in the range [{minimum},+∞], not {value}') elif maximum is not None: def check_range(value): if value > maximum: - raise ValueError('Expected float in the range [-∞,{0}], not {1}'.format(maximum, value)) - return + raise ValueError(f'Expected float in the range [-∞,{maximum}], not {value}') else: def check_range(value): return - self.check_range = check_range - return + def __call__(self, value): if value is None: @@ -234,13 +224,13 @@ def __call__(self, value): try: value = float(value) except ValueError: - raise ValueError('Expected float value, not {}'.format(json_encode_string(value))) + raise ValueError(f'Expected float value, not {json_encode_string(value)}') self.check_range(value) return value def format(self, value): - return None if value is None else six.text_type(float(value)) + return None if value is None else str(float(value)) class Duration(Validator): @@ -265,7 +255,7 @@ def __call__(self, value): if len(p) == 3: result = 3600 * _unsigned(p[0]) + 60 * _60(p[1]) + _60(p[2]) except ValueError: - raise ValueError('Invalid duration value: {0}'.format(value)) + raise ValueError(f'Invalid duration value: {value}') return result @@ -302,7 +292,7 @@ class Dialect(csv.Dialect): def __init__(self, validator=None): if not (validator is None or isinstance(validator, Validator)): - raise ValueError('Expected a Validator instance or None for validator, not {}', repr(validator)) + raise ValueError(f'Expected a Validator instance or None for validator, not {repr(validator)}') self._validator = validator def __call__(self, value): @@ -322,7 +312,7 @@ def __call__(self, value): for index, item in enumerate(value): value[index] = self._validator(item) except ValueError as error: - raise ValueError('Could not convert item {}: {}'.format(index, error)) + raise ValueError(f'Could not convert item {index}: {error}') return value @@ -346,10 +336,10 @@ def __call__(self, value): if value is None: return None - value = six.text_type(value) + value = str(value) if value not in self.membership: - raise ValueError('Unrecognized value: {0}'.format(value)) + raise ValueError(f'Unrecognized value: {value}') return self.membership[value] @@ -362,19 +352,19 @@ class Match(Validator): """ def __init__(self, name, pattern, flags=0): - self.name = six.text_type(name) + self.name = str(name) self.pattern = re.compile(pattern, flags) def __call__(self, value): if value is None: return None - value = six.text_type(value) + value = str(value) if self.pattern.match(value) is None: - raise ValueError('Expected {}, not {}'.format(self.name, json_encode_string(value))) + raise ValueError(f'Expected {self.name}, not {json_encode_string(value)}') return value def format(self, value): - return None if value is None else six.text_type(value) + return None if value is None else str(value) class OptionName(Validator): @@ -385,13 +375,13 @@ class OptionName(Validator): def __call__(self, value): if value is not None: - value = six.text_type(value) + value = str(value) if OptionName.pattern.match(value) is None: - raise ValueError('Illegal characters in option name: {}'.format(value)) + raise ValueError(f'Illegal characters in option name: {value}') return value def format(self, value): - return None if value is None else six.text_type(value) + return None if value is None else str(value) class RegularExpression(Validator): @@ -402,9 +392,9 @@ def __call__(self, value): if value is None: return None try: - value = re.compile(six.text_type(value)) + value = re.compile(str(value)) except re.error as error: - raise ValueError('{}: {}'.format(six.text_type(error).capitalize(), value)) + raise ValueError(f'{str(error).capitalize()}: {value}') return value def format(self, value): @@ -421,9 +411,9 @@ def __init__(self, *args): def __call__(self, value): if value is None: return None - value = six.text_type(value) + value = str(value) if value not in self.membership: - raise ValueError('Unrecognized value: {}'.format(value)) + raise ValueError(f'Unrecognized value: {value}') return value def format(self, value): diff --git a/tests/searchcommands/chunked_data_stream.py b/tests/searchcommands/chunked_data_stream.py index 9c128ffb..39782c44 100644 --- a/tests/searchcommands/chunked_data_stream.py +++ b/tests/searchcommands/chunked_data_stream.py @@ -4,12 +4,11 @@ import json import splunklib.searchcommands.internals -from splunklib import six class Chunk: def __init__(self, version, meta, data): - self.version = six.ensure_str(version) + self.version = version self.meta = json.loads(meta) dialect = splunklib.searchcommands.internals.CsvDialect self.data = csv.DictReader(io.StringIO(data.decode("utf-8")), @@ -54,7 +53,7 @@ def read_chunk(self): def build_chunk(keyval, data=None): - metadata = six.ensure_binary(json.dumps(keyval), 'utf-8') + metadata = json.dumps(keyval).encode('utf-8') data_output = _build_data_csv(data) return b"chunked 1.0,%d,%d\n%s%s" % (len(metadata), len(data_output), metadata, data_output) @@ -87,7 +86,7 @@ def _build_data_csv(data): return b'' if isinstance(data, bytes): return data - csvout = splunklib.six.StringIO() + csvout = io.StringIO() headers = set() for datum in data: @@ -97,4 +96,4 @@ def _build_data_csv(data): writer.writeheader() for datum in data: writer.writerow(datum) - return six.ensure_binary(csvout.getvalue()) + return csvout.getvalue().encode('utf-8') diff --git a/tests/searchcommands/test_generator_command.py b/tests/searchcommands/test_generator_command.py index 9a56e8bb..af103977 100644 --- a/tests/searchcommands/test_generator_command.py +++ b/tests/searchcommands/test_generator_command.py @@ -11,6 +11,7 @@ class GeneratorTest(GeneratingCommand): def generate(self): for num in range(1, 10): yield {'_time': time.time(), 'event_index': num} + generator = GeneratorTest() in_stream = io.BytesIO() in_stream.write(chunky.build_getinfo_chunk()) @@ -39,15 +40,18 @@ def generate(self): assert expected.issubset(seen) assert finished_seen + def test_allow_empty_input_for_generating_command(): """ Passing allow_empty_input for generating command will cause an error """ + @Configuration() class GeneratorTest(GeneratingCommand): def generate(self): for num in range(1, 3): yield {"_index": num} + generator = GeneratorTest() in_stream = io.BytesIO() out_stream = io.BytesIO() @@ -57,6 +61,7 @@ def generate(self): except ValueError as error: assert str(error) == "allow_empty_input cannot be False for Generating Commands" + def test_all_fieldnames_present_for_generated_records(): @Configuration() class GeneratorTest(GeneratingCommand): diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index 7df283c7..7e6e7ed6 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -27,20 +27,20 @@ import pytest -from splunklib import six +import splunklib from splunklib.searchcommands import Configuration, StreamingCommand from splunklib.searchcommands.decorators import ConfigurationSetting, Option from splunklib.searchcommands.search_command import SearchCommand from splunklib.client import Service -from splunklib.six import StringIO, BytesIO +from io import StringIO, BytesIO def build_command_input(getinfo_metadata, execute_metadata, execute_body): - input = (f'chunked 1.0,{len(six.ensure_binary(getinfo_metadata))},0\n{getinfo_metadata}' + - f'chunked 1.0,{len(six.ensure_binary(execute_metadata))},{len(six.ensure_binary(execute_body))}\n{execute_metadata}{execute_body}') + input = (f'chunked 1.0,{len(splunklib.ensure_binary(getinfo_metadata))},0\n{getinfo_metadata}' + + f'chunked 1.0,{len(splunklib.ensure_binary(execute_metadata))},{len(splunklib.ensure_binary(execute_body))}\n{execute_metadata}{execute_body}') - ifile = BytesIO(six.ensure_binary(input)) + ifile = BytesIO(splunklib.ensure_binary(input)) ifile = TextIOWrapper(ifile) @@ -135,7 +135,8 @@ def test_process_scpv1(self): result = BytesIO() self.assertRaises(SystemExit, command.process, argv, ofile=result) - six.assertRegex(self, result.getvalue().decode('UTF-8'), expected) + + splunklib.assertRegex(self, result.getvalue().decode('UTF-8'), expected) # TestCommand.process should return configuration settings on Getinfo probe @@ -286,7 +287,7 @@ def test_process_scpv1(self): command.process(argv, ifile, ofile=result) except SystemExit as error: self.assertNotEqual(error.code, 0) - six.assertRegex( + splunklib.assertRegex( self, result.getvalue().decode('UTF-8'), r'^error_message=RuntimeError at ".+", line \d+ : Testing\r\n\r\n$') @@ -311,7 +312,7 @@ def test_process_scpv1(self): except BaseException as error: self.fail(f'Expected no exception, but caught {type(error).__name__}: {error}') else: - six.assertRegex( + splunklib.assertRegex( self, result.getvalue().decode('UTF-8'), r'^\r\n' @@ -694,7 +695,7 @@ def test_process_scpv2(self): r'logging_configuration=\\\".+\\\" logging_level=\\\"WARNING\\\" record=\\\"f\\\" ' \ r'required_option_1=\\\"value_1\\\" required_option_2=\\\"value_2\\\" show_configuration=\\\"f\\\"\"\]\]\}' - six.assertRegex( + splunklib.assertRegex( self, result.getvalue().decode('utf-8'), r'^chunked 1.0,2,0\n' diff --git a/tests/searchcommands/test_searchcommands_app.py b/tests/searchcommands/test_searchcommands_app.py index 25435054..595ea8d0 100755 --- a/tests/searchcommands/test_searchcommands_app.py +++ b/tests/searchcommands/test_searchcommands_app.py @@ -39,8 +39,7 @@ import sys import pytest -from splunklib.six.moves import cStringIO as StringIO -from splunklib import six +from io import StringIO from tests.searchcommands import project_root @@ -85,7 +84,7 @@ def __init__(self, path): self._output_file = path + '.output' - if six.PY3 and os.path.isfile(self._output_file + '.py3'): + if os.path.isfile(self._output_file + '.py3'): self._output_file = self._output_file + '.py3' # Remove the "splunk cmd" portion @@ -115,7 +114,7 @@ def search(self): class Recordings: def __init__(self, name, action, phase, protocol_version): - basedir = Recordings._prefix + six.text_type(protocol_version) + basedir = Recordings._prefix + str(protocol_version) if not os.path.isdir(basedir): raise ValueError( @@ -146,72 +145,72 @@ def setUp(self): self.skipTest("You must build the searchcommands_app by running " + build_command) TestCase.setUp(self) - @pytest.mark.skipif(six.PY3, + @pytest.mark.skipif(sys.version_info=='3.9.12', reason="Python 2 does not treat Unicode as words for regex, so Python 3 has broken fixtures") def test_countmatches_as_unit(self): expected, output, errors, exit_status = self._run_command('countmatches', action='getinfo', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_sensitive(expected, output) expected, output, errors, exit_status = self._run_command('countmatches', action='execute', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_sensitive(expected, output) expected, output, errors, exit_status = self._run_command('countmatches') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_chunks(expected, output) def test_generatehello_as_unit(self): expected, output, errors, exit_status = self._run_command('generatehello', action='getinfo', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_sensitive(expected, output) expected, output, errors, exit_status = self._run_command('generatehello', action='execute', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_insensitive(expected, output) expected, output, errors, exit_status = self._run_command('generatehello') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_chunks(expected, output, time_sensitive=False) def test_sum_as_unit(self): expected, output, errors, exit_status = self._run_command('sum', action='getinfo', phase='reduce', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_sensitive(expected, output) expected, output, errors, exit_status = self._run_command('sum', action='getinfo', phase='map', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_sensitive(expected, output) expected, output, errors, exit_status = self._run_command('sum', action='execute', phase='map', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_sensitive(expected, output) expected, output, errors, exit_status = self._run_command('sum', action='execute', phase='reduce', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_csv_files_time_sensitive(expected, output) expected, output, errors, exit_status = self._run_command('sum', phase='map') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_chunks(expected, output) expected, output, errors, exit_status = self._run_command('sum', phase='reduce') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) + self.assertEqual(0, exit_status, msg=str(errors)) + self.assertEqual('', errors, msg=str(errors)) self._compare_chunks(expected, output) def assertInfoEqual(self, output, expected): @@ -380,7 +379,7 @@ def _run_command(self, name, action=None, phase=None, protocol=2): finally: os.remove(uncompressed_file) - return six.ensure_str(expected), six.ensure_str(output), six.ensure_str(errors), process.returncode + return expected.decode('utf-8'), output.decode('utf-8'), errors.decode('utf-8'), process.returncode _Chunk = namedtuple('Chunk', 'metadata body') diff --git a/tests/searchcommands/test_streaming_command.py b/tests/searchcommands/test_streaming_command.py index 579c334c..afb2e8ca 100644 --- a/tests/searchcommands/test_streaming_command.py +++ b/tests/searchcommands/test_streaming_command.py @@ -4,7 +4,6 @@ from . import chunked_data_stream as chunky - def test_simple_streaming_command(): @Configuration() class TestStreamingCommand(StreamingCommand): diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index cc3dd6f2..7b815491 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -19,12 +19,9 @@ from unittest import main, TestCase import os -import re import sys import tempfile import pytest -from splunklib import six -from splunklib.six.moves import range from splunklib.searchcommands import validators @@ -50,7 +47,7 @@ def test_boolean(self): for value in truth_values: for variant in value, value.capitalize(), value.upper(): - s = six.text_type(variant) + s = str(variant) self.assertEqual(validator.__call__(s), truth_values[value]) self.assertIsNone(validator.__call__(None)) @@ -64,7 +61,7 @@ def test_duration(self): validator = validators.Duration() for seconds in range(0, 25 * 60 * 60, 59): - value = six.text_type(seconds) + value = str(seconds) self.assertEqual(validator(value), seconds) self.assertEqual(validator(validator.format(seconds)), seconds) value = '%d:%02d' % (seconds / 60, seconds % 60) @@ -157,11 +154,10 @@ def test_integer(self): validator = validators.Integer() def test(integer): - for s in str(integer), six.text_type(integer): - value = validator.__call__(s) - self.assertEqual(value, integer) - self.assertIsInstance(value, int) - self.assertEqual(validator.format(integer), six.text_type(integer)) + value = validator.__call__(integer) + self.assertEqual(value, integer) + self.assertIsInstance(value, int) + self.assertEqual(validator.format(integer), str(integer)) test(2 * minsize) test(minsize) @@ -206,11 +202,11 @@ def test(float_val): float_val = float(float_val) except ValueError: assert False - for s in str(float_val), six.text_type(float_val): - value = validator.__call__(s) - self.assertAlmostEqual(value, float_val) - self.assertIsInstance(value, float) - self.assertEqual(validator.format(float_val), six.text_type(float_val)) + + value = validator.__call__(float_val) + self.assertAlmostEqual(value, float_val) + self.assertIsInstance(value, float) + self.assertEqual(validator.format(float_val), str(float_val)) test(2 * minsize) test(minsize) diff --git a/tests/test_binding.py b/tests/test_binding.py index 9f8b4029..ce3f444f 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -14,13 +14,11 @@ # License for the specific language governing permissions and limitations # under the License. - -from io import BytesIO +from http import server as BaseHTTPServer +from io import BytesIO, StringIO from threading import Thread +from urllib.request import Request, urlopen -from splunklib.six.moves import BaseHTTPServer -from splunklib.six.moves.urllib.request import Request, urlopen -from splunklib.six.moves.urllib.error import HTTPError from xml.etree.ElementTree import XML import json @@ -29,12 +27,11 @@ import unittest import socket import ssl -import splunklib.six.moves.http_cookies +import splunklib from splunklib import binding from splunklib.binding import HTTPError, AuthenticationError, UrlEncoded from splunklib import data -from splunklib import six import pytest @@ -106,7 +103,7 @@ def test_read_partial(self): def test_readable(self): txt = "abcd" - response = binding.ResponseReader(six.StringIO(txt)) + response = binding.ResponseReader(StringIO(txt)) self.assertTrue(response.readable()) def test_readinto_bytearray(self): @@ -310,7 +307,7 @@ def test_socket_gethostbyname(self): class TestUnicodeConnect(BindingTestCase): def test_unicode_connect(self): opts = self.opts.kwargs.copy() - opts['host'] = six.text_type(opts['host']) + opts['host'] = str(opts['host']) context = binding.connect(**opts) # Just check to make sure the service is alive response = context.get("/services") @@ -699,7 +696,7 @@ def test_namespace(self): for kwargs, expected in tests: namespace = binding.namespace(**kwargs) - for k, v in six.iteritems(expected): + for k, v in list(expected.items()): self.assertEqual(namespace[k], v) def test_namespace_fails(self): @@ -752,11 +749,11 @@ def test_preexisting_token(self): self.context._abspath("some/path/to/post/to")).encode('utf-8')) socket.write(("Host: %s:%s\r\n" % \ (self.context.host, self.context.port)).encode('utf-8')) - socket.write(("Accept-Encoding: identity\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) socket.write(("Authorization: %s\r\n" % \ self.context.token).encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write(("\r\n").encode('utf-8')) + socket.write("\r\n".encode('utf-8')) socket.close() def test_preexisting_token_sans_splunk(self): @@ -783,8 +780,8 @@ def test_preexisting_token_sans_splunk(self): socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) socket.write(("Authorization: %s\r\n" % \ self.context.token).encode('utf-8')) - socket.write(("X-Splunk-Input-Mode: Streaming\r\n").encode('utf-8')) - socket.write(("\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) socket.close() def test_connect_with_preexisting_token_sans_user_and_pass(self): @@ -815,8 +812,8 @@ class TestPostWithBodyParam(unittest.TestCase): def test_post(self): def handler(url, message, **kwargs): - assert six.ensure_str(url) == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" - assert six.ensure_str(message["body"]) == "testkey=testvalue" + assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" + assert message["body"] == b"testkey=testvalue" return splunklib.data.Record({ "status": 200, "headers": [], @@ -828,7 +825,7 @@ def handler(url, message, **kwargs): def test_post_with_params_and_body(self): def handler(url, message, **kwargs): assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar?extrakey=extraval" - assert six.ensure_str(message["body"]) == "testkey=testvalue" + assert message["body"] == b"testkey=testvalue" return splunklib.data.Record({ "status": 200, "headers": [], @@ -840,7 +837,7 @@ def handler(url, message, **kwargs): def test_post_with_params_and_no_body(self): def handler(url, message, **kwargs): assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" - assert six.ensure_str(message["body"]) == "extrakey=extraval" + assert message["body"] == b"extrakey=extraval" return splunklib.data.Record({ "status": 200, "headers": [], @@ -899,7 +896,7 @@ def test_post_with_body_urlencoded(self): def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) - assert six.ensure_str(body) == "foo=bar" + assert body.decode('utf-8') == "foo=bar" with MockServer(POST=check_response): ctx = binding.connect(port=9093, scheme='http', token="waffle") @@ -909,7 +906,7 @@ def test_post_with_body_string(self): def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) - assert six.ensure_str(handler.headers['content-type']) == 'application/json' + assert handler.headers['content-type'] == 'application/json' assert json.loads(body)["baz"] == "baf" with MockServer(POST=check_response): @@ -921,8 +918,8 @@ def test_post_with_body_dict(self): def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) - assert six.ensure_str(handler.headers['content-type']) == 'application/x-www-form-urlencoded' - assert six.ensure_str(body) == 'baz=baf&hep=cat' or six.ensure_str(body) == 'hep=cat&baz=baf' + assert handler.headers['content-type'] == 'application/x-www-form-urlencoded' + assert body.decode('utf-8') == 'baz=baf&hep=cat' or body.decode('utf-8') == 'hep=cat&baz=baf' with MockServer(POST=check_response): ctx = binding.connect(port=9093, scheme='http', token="waffle") diff --git a/tests/test_examples.py b/tests/test_examples.py index 304b6bad..52319bcb 100755 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -24,7 +24,6 @@ from tests import testlib from splunklib import client -from splunklib import six DIR_PATH = os.path.dirname(os.path.realpath(__file__)) EXAMPLES_PATH = os.path.join(DIR_PATH, '..', 'examples') @@ -32,9 +31,9 @@ def check_multiline(testcase, first, second, message=None): """Assert that two multi-line strings are equal.""" - testcase.assertTrue(isinstance(first, six.string_types), + testcase.assertTrue(isinstance(first, str), 'First argument is not a string') - testcase.assertTrue(isinstance(second, six.string_types), + testcase.assertTrue(isinstance(second, str), 'Second argument is not a string') # Unix-ize Windows EOL first = first.replace("\r", "") @@ -74,7 +73,7 @@ def setUp(self): # Ignore result, it might already exist run("index.py create sdk-tests") - @pytest.mark.skipif(six.PY3, reason="Async needs work to support Python 3") + @pytest.mark.skip(reason="Async needs work to support Python 3") def test_async(self): result = run("async/async.py sync") self.assertEqual(result, 0) diff --git a/utils/__init__.py b/utils/__init__.py index b1bb77a5..da1c5803 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -16,12 +16,13 @@ from __future__ import absolute_import from utils.cmdopts import * -from splunklib import six + def config(option, opt, value, parser): assert opt == "--config" parser.load(value) + # Default Splunk cmdline rules RULES_SPLUNK = { 'config': { @@ -30,7 +31,7 @@ def config(option, opt, value, parser): 'callback': config, 'type': "string", 'nargs': "1", - 'help': "Load options from config file" + 'help': "Load options from config file" }, 'scheme': { 'flags': ["--scheme"], @@ -40,30 +41,30 @@ def config(option, opt, value, parser): 'host': { 'flags': ["--host"], 'default': "localhost", - 'help': "Host name (default 'localhost')" + 'help': "Host name (default 'localhost')" }, - 'port': { + 'port': { 'flags': ["--port"], 'default': "8089", - 'help': "Port number (default 8089)" + 'help': "Port number (default 8089)" }, 'app': { - 'flags': ["--app"], + 'flags': ["--app"], 'help': "The app context (optional)" }, 'owner': { - 'flags': ["--owner"], + 'flags': ["--owner"], 'help': "The user context (optional)" }, 'username': { 'flags': ["--username"], 'default': None, - 'help': "Username to login with" + 'help': "Username to login with" }, 'password': { - 'flags': ["--password"], + 'flags': ["--password"], 'default': None, - 'help': "Password to login with" + 'help': "Password to login with" }, 'version': { 'flags': ["--version"], @@ -84,28 +85,30 @@ def config(option, opt, value, parser): FLAGS_SPLUNK = list(RULES_SPLUNK.keys()) + # value: dict, args: [(dict | list | str)*] def dslice(value, *args): """Returns a 'slice' of the given dictionary value containing only the requested keys. The keys can be requested in a variety of ways, as an arg list of keys, as a list of keys, or as a dict whose key(s) represent - the source keys and whose corresponding values represent the resulting - key(s) (enabling key rename), or any combination of the above.""" + the source keys and whose corresponding values represent the resulting + key(s) (enabling key rename), or any combination of the above.""" result = {} for arg in args: if isinstance(arg, dict): - for k, v in six.iteritems(arg): - if k in value: + for k, v in (arg.items()): + if k in value: result[v] = value[k] elif isinstance(arg, list): for k in arg: - if k in value: + if k in value: result[k] = value[k] else: - if arg in value: + if arg in value: result[arg] = value[arg] return result + def parse(argv, rules=None, config=None, **kwargs): """Parse the given arg vector with the default Splunk command rules.""" parser_ = parser(rules, **kwargs) @@ -113,8 +116,8 @@ def parse(argv, rules=None, config=None, **kwargs): parser_.loadenv(config) return parser_.parse(argv).result + def parser(rules=None, **kwargs): """Instantiate a parser with the default Splunk command rules.""" rules = RULES_SPLUNK if rules is None else dict(RULES_SPLUNK, **rules) return Parser(rules, **kwargs) - diff --git a/utils/cmdopts.py b/utils/cmdopts.py index b0cbb732..a3a715ef 100644 --- a/utils/cmdopts.py +++ b/utils/cmdopts.py @@ -14,8 +14,6 @@ """Command line utilities shared by command line tools & unit tests.""" -from __future__ import absolute_import -from __future__ import print_function from os import path from optparse import OptionParser import sys From 2c207d9b14f2c0d1ac5244acb898bc1e1e6abfef Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 16 May 2022 12:24:17 +0530 Subject: [PATCH 20/60] Update binding.py --- splunklib/binding.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 6de146c4..5808ce09 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -297,8 +297,7 @@ def wrapper(self, *args, **kwargs): # an AuthenticationError and give up. with _handle_auth_error("Autologin failed."): self.login() - with _handle_auth_error("Autologin succeeded, but there was an auth error on next request. Something " - "is very wrong."): + with _handle_auth_error("Authentication Failed! If session token is used, it seems to have been expired."): return request_fun(self, *args, **kwargs) elif he.status == 401 and not self.autologin: raise AuthenticationError( From 0250984c1eaa413abe19c59b8d9dbc3cba468c03 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 16 May 2022 17:19:12 +0530 Subject: [PATCH 21/60] Update results.py --- splunklib/results.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/splunklib/results.py b/splunklib/results.py index 4a20b266..21c85c85 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -46,6 +46,8 @@ "JSONResultsReader" ] +import deprecation + class Message: """This class represents informational messages that Splunk interleaves in the results stream. @@ -149,7 +151,7 @@ def read(self, n=None): return response -#@deprecat("Use the JSONResultsReader function instead in conjuction with the 'output_mode' query param set to 'json'") +@deprecation.deprecated(details="Use the JSONResultsReader function instead in conjuction with the 'output_mode' query param set to 'json'") class ResultsReader: """This class returns dictionaries and Splunk messages from an XML results stream. From b8d5b9caf967db7a50389db3fc82a09f3b3cfbf8 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 16 May 2022 17:51:24 +0530 Subject: [PATCH 22/60] Update client.py --- splunklib/client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/splunklib/client.py b/splunklib/client.py index 0dc48564..f691db8b 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -701,6 +701,7 @@ def kvstore_owner(self, value): kvstore is refreshed, when the owner value is changed """ self._kvstore_owner = value + self.kvstore @property def kvstore(self): From ea0b34bb82e237b3d5ca47a14b3d2a89ceb92e58 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 17 May 2022 11:27:21 +0530 Subject: [PATCH 23/60] refactoring --- splunklib/client.py | 66 ++++++++++++-------------------- splunklib/modularinput/script.py | 11 +++--- splunklib/modularinput/utils.py | 3 +- 3 files changed, 31 insertions(+), 49 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index f691db8b..4508a830 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -681,7 +681,7 @@ def splunk_version(self): :return: A ``tuple`` of ``integers``. """ if self._splunk_version is None: - self._splunk_version = tuple([int(p) for p in self.info['version'].split('.')]) + self._splunk_version = tuple(int(p) for p in self.info['version'].split('.')) return self._splunk_version @property @@ -977,11 +977,9 @@ def _load_atom_entry(self, response): elem = _load_atom(response, XNAME_ENTRY) if isinstance(elem, list): apps = [ele.entry.content.get('eai:appName') for ele in elem] - raise AmbiguousReferenceException( f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") - else: - return elem.entry + return elem.entry # Load the entity state record from the given response def _load_state(self, response): @@ -1022,8 +1020,7 @@ def _proper_namespace(self, owner=None, app=None, sharing=None): return (self.service.namespace['owner'], self.service.namespace['app'], self.service.namespace['sharing']) - else: - return owner, app, sharing + return owner, app, sharing def delete(self): owner, app, sharing = self._proper_namespace() @@ -1125,8 +1122,8 @@ def read(self, response): # In lower layers of the SDK, we end up trying to URL encode # text to be dispatched via HTTP. However, these links are already # URL encoded when they arrive, and we need to mark them as such. - unquoted_links = dict([(k, UrlEncoded(v, skip_encode=True)) - for k, v in list(results['links'].items())]) + unquoted_links = dict((k, UrlEncoded(v, skip_encode=True)) + for k, v in list(results['links'].items())) results['links'] = unquoted_links return results @@ -1728,8 +1725,7 @@ def __getitem__(self, key): except HTTPError as he: if he.status == 404: # No entity matching key raise KeyError(key) - else: - raise + raise def __contains__(self, key): # configs/conf-{name} never returns a 404. We have to post to properties/{name} @@ -2230,8 +2226,7 @@ def __getitem__(self, key): except HTTPError as he: if he.status == 404: # No entity matching kind and key raise KeyError((key, kind)) - else: - raise + raise else: # Iterate over all the kinds looking for matches. kind = None @@ -2243,22 +2238,19 @@ def __getitem__(self, key): entries = self._load_list(response) if len(entries) > 1: raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") - elif len(entries) == 0: + if len(entries) == 0: pass - else: - if candidate is not None: # Already found at least one candidate - raise AmbiguousReferenceException( - f"Found multiple inputs named {key}, please specify a kind") - candidate = entries[0] + if candidate is not None: # Already found at least one candidate + raise AmbiguousReferenceException( + f"Found multiple inputs named {key}, please specify a kind") + candidate = entries[0] except HTTPError as he: if he.status == 404: pass # Just carry on to the next kind. - else: - raise + raise if candidate is None: raise KeyError(key) # Never found a match. - else: - return candidate + return candidate def __contains__(self, key): if isinstance(key, tuple) and len(key) == 2: @@ -2278,13 +2270,11 @@ def __contains__(self, key): entries = self._load_list(response) if len(entries) > 0: return True - else: - pass + pass except HTTPError as he: if he.status == 404: pass # Just carry on to the next kind. - else: - raise + raise return False def create(self, name, kind, **kwargs): @@ -2422,12 +2412,11 @@ def _get_kind_list(self, subpath=None): # The "tcp/ssl" endpoint is not a real input collection. if entry.title == 'all' or this_subpath == ['tcp', 'ssl']: continue - elif 'create' in [x.rel for x in entry.link]: + if 'create' in [x.rel for x in entry.link]: path = '/'.join(subpath + [entry.title]) kinds.append(path) - else: - subkinds = self._get_kind_list(subpath + [entry.title]) - kinds.extend(subkinds) + subkinds = self._get_kind_list(subpath + [entry.title]) + kinds.extend(subkinds) return kinds @property @@ -2471,10 +2460,9 @@ def kindpath(self, kind): """ if kind == 'tcp': return UrlEncoded('tcp/raw', skip_encode=True) - elif kind == 'splunktcp': + if kind == 'splunktcp': return UrlEncoded('tcp/cooked', skip_encode=True) - else: - return UrlEncoded(kind, skip_encode=True) + return UrlEncoded(kind, skip_encode=True) def list(self, *kinds, **kwargs): """Returns a list of inputs that are in the :class:`Inputs` collection. @@ -2569,8 +2557,7 @@ def list(self, *kinds, **kwargs): except HTTPError as e: if e.status == 404: continue # No inputs of this kind - else: - raise + raise entries = _load_atom_entries(response) if entries is None: continue # No inputs to process @@ -3111,15 +3098,13 @@ def __contains__(self, name): args = self.state.content['endpoints']['args'] if name in args: return True - else: - return Entity.__contains__(self, name) + return Entity.__contains__(self, name) def __getitem__(self, name): args = self.state.content['endpoint']['args'] if name in args: return args['item'] - else: - return Entity.__getitem__(self, name) + return Entity.__getitem__(self, name) @property def arguments(self): @@ -3283,8 +3268,7 @@ def suppressed(self): r = self._run_action("suppress") if r.suppressed == "1": return int(r.expiration) - else: - return 0 + return 0 def unsuppress(self): """Cancels suppression and makes this search run as scheduled. diff --git a/splunklib/modularinput/script.py b/splunklib/modularinput/script.py index 2cac0011..5df6d0fc 100644 --- a/splunklib/modularinput/script.py +++ b/splunklib/modularinput/script.py @@ -80,7 +80,7 @@ def run_script(self, args, event_writer, input_stream): event_writer.write_xml_document(scheme.to_xml()) return 0 - elif args[1].lower() == "--validate-arguments": + if args[1].lower() == "--validate-arguments": validation_definition = ValidationDefinition.parse(input_stream) try: self.validate_input(validation_definition) @@ -91,11 +91,10 @@ def run_script(self, args, event_writer, input_stream): event_writer.write_xml_document(root) return 1 - else: - err_string = "ERROR Invalid arguments to modular input script:" + ' '.join( - args) - event_writer._err.write(err_string) - return 1 + err_string = "ERROR Invalid arguments to modular input script:" + ' '.join( + args) + event_writer._err.write(err_string) + return 1 except Exception as e: event_writer.log(EventWriter.ERROR, str(e)) diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index 923dae04..6429c0a7 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -56,8 +56,7 @@ def parse_parameters(param_node): for mvp in param_node: parameters.append(mvp.text) return parameters - else: - raise ValueError(f"Invalid configuration scheme, {param_node.tag} tag unexpected.") + raise ValueError(f"Invalid configuration scheme, {param_node.tag} tag unexpected.") def parse_xml_data(parent_node, child_node_tag): data = {} From 079df7b133d7aa280ff4d2db3448a94679d6515a Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 17 May 2022 12:22:26 +0530 Subject: [PATCH 24/60] 2to3 code refactoring --- splunklib/client.py | 8 +++--- splunklib/results.py | 7 ++--- splunklib/searchcommands/decorators.py | 10 +++---- splunklib/searchcommands/eventing_command.py | 2 +- .../searchcommands/generating_command.py | 6 ++--- splunklib/searchcommands/search_command.py | 27 ++++++++++--------- splunklib/searchcommands/streaming_command.py | 7 +++-- 7 files changed, 32 insertions(+), 35 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index 4508a830..48a8812c 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -977,6 +977,7 @@ def _load_atom_entry(self, response): elem = _load_atom(response, XNAME_ENTRY) if isinstance(elem, list): apps = [ele.entry.content.get('eai:appName') for ele in elem] + raise AmbiguousReferenceException( f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") return elem.entry @@ -2415,8 +2416,9 @@ def _get_kind_list(self, subpath=None): if 'create' in [x.rel for x in entry.link]: path = '/'.join(subpath + [entry.title]) kinds.append(path) - subkinds = self._get_kind_list(subpath + [entry.title]) - kinds.extend(subkinds) + else: + subkinds = self._get_kind_list(subpath + [entry.title]) + kinds.extend(subkinds) return kinds @property @@ -3761,4 +3763,4 @@ def batch_save(self, *documents): data = json.dumps(documents) return json.loads( - self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) \ No newline at end of file diff --git a/splunklib/results.py b/splunklib/results.py index 21c85c85..86850710 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -206,10 +206,9 @@ def __init__(self, stream): def __iter__(self): return self - def next(self): + def __next__(self): return next(self._gen) - __next__ = next def _parse_results(self, stream): """Parse results and messages out of *stream*.""" @@ -318,11 +317,9 @@ def __init__(self, stream): def __iter__(self): return self - def next(self): + def __next__(self): return next(self._gen) - __next__ = next - def _parse_results(self, stream): """Parse results and messages out of *stream*.""" msg_type = None diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index 01159d50..47902969 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -191,7 +191,7 @@ def is_supported_by_protocol(version): if len(values) > 0: settings = sorted(list(values.items())) - settings = map(lambda n_v: f'{n_v[0]}={n_v[1]}', settings) + settings = [f'{n_v[0]}={n_v[1]}' for n_v in settings] raise AttributeError('Inapplicable configuration settings: ' + ', '.join(settings)) cls.configuration_setting_definitions = definitions @@ -416,21 +416,21 @@ def __init__(self, command): OrderedDict.__init__(self, ((option.name, item_class(command, option)) for (name, option) in definitions)) def __repr__(self): - text = 'Option.View([' + ','.join(map(lambda item: repr(item), self.values())) + '])' + text = 'Option.View([' + ','.join([repr(item) for item in list(self.values())]) + '])' return text def __str__(self): - text = ' '.join([str(item) for item in self.values() if item.is_set]) + text = ' '.join([str(item) for item in list(self.values()) if item.is_set]) return text # region Methods def get_missing(self): - missing = [item.name for item in self.values() if item.is_required and not item.is_set] + missing = [item.name for item in list(self.values()) if item.is_required and not item.is_set] return missing if len(missing) > 0 else None def reset(self): - for value in self.values(): + for value in list(self.values()): value.reset() # endregion diff --git a/splunklib/searchcommands/eventing_command.py b/splunklib/searchcommands/eventing_command.py index 20767a32..ab27d32e 100644 --- a/splunklib/searchcommands/eventing_command.py +++ b/splunklib/searchcommands/eventing_command.py @@ -137,7 +137,7 @@ def fix_up(cls, command): # N.B.: Does not use Python 2 dict copy semantics def iteritems(self): iteritems = SearchCommand.ConfigurationSettings.iteritems(self) - return map(lambda name_value: (name_value[0], 'events' if name_value[0] == 'type' else name_value[1]), iteritems) + return [(name_value[0], 'events' if name_value[0] == 'type' else name_value[1]) for name_value in iteritems] # N.B.: Does not use Python 3 dict view semantics diff --git a/splunklib/searchcommands/generating_command.py b/splunklib/searchcommands/generating_command.py index bf2527d9..139935b8 100644 --- a/splunklib/searchcommands/generating_command.py +++ b/splunklib/searchcommands/generating_command.py @@ -14,7 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -from __future__ import absolute_import, division, print_function, unicode_literals import sys from .decorators import ConfigurationSetting @@ -367,10 +366,9 @@ def iteritems(self): iteritems = SearchCommand.ConfigurationSettings.iteritems(self) version = self.command.protocol_version if version == 2: - iteritems = filter(lambda name_value1: name_value1[0] != 'distributed', iteritems) + iteritems = [name_value1 for name_value1 in iteritems if name_value1[0] != 'distributed'] if not self.distributed and self.type == 'streaming': - iteritems = map( - lambda name_value: (name_value[0], 'stateful') if name_value[0] == 'type' else (name_value[0], name_value[1]), iteritems) + iteritems = [(name_value[0], 'stateful') if name_value[0] == 'type' else (name_value[0], name_value[1]) for name_value in iteritems] return iteritems # N.B.: Does not use Python 3 dict view semantics diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index dd7a8989..0a1e0caa 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -290,7 +290,7 @@ def convert_value(value): except ValueError: return value - info = ObjectView(dict(map(lambda f_v: (convert_field(f_v[0]), convert_value(f_v[1])), zip(fields, values)))) + info = ObjectView(dict((convert_field(f_v[0]), convert_value(f_v[1])) for f_v in zip(fields, values))) try: count_map = info.countMap @@ -299,7 +299,7 @@ def convert_value(value): else: count_map = count_map.split(';') n = len(count_map) - info.countMap = dict(zip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2))) + info.countMap = dict(list(zip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2)))) try: msg_type = info.msgType @@ -307,7 +307,7 @@ def convert_value(value): except AttributeError: pass else: - messages = filter(lambda t_m: t_m[0] or t_m[1], zip(msg_type.split('\n'), msg_text.split('\n'))) + messages = [t_m for t_m in zip(msg_type.split('\n'), msg_text.split('\n')) if t_m[0] or t_m[1]] info.msg = [Message(message) for message in messages] del info.msgType @@ -449,7 +449,7 @@ def _map_metadata(self, argv): def _map(metadata_map): metadata = {} - for name, value in metadata_map.items(): + for name, value in list(metadata_map.items()): if isinstance(value, dict): value = _map(value) else: @@ -587,7 +587,8 @@ def _process_protocol_v1(self, argv, ifile, ofile): ifile = self._prepare_protocol_v1(argv, ifile, ofile) self._record_writer.write_record(dict( - (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in self._configuration.items())) + (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in + list(self._configuration.items()))) self.finish() elif argv[1] == '__EXECUTE__': @@ -937,7 +938,7 @@ def _read_csv_records(self, ifile): if len(mv_fieldnames) == 0: for values in reader: - yield OrderedDict(zip(fieldnames, values)) + yield OrderedDict(list(zip(fieldnames, values))) return for values in reader: @@ -1019,8 +1020,8 @@ def __repr__(self): """ definitions = type(self).configuration_setting_definitions - settings = map( - lambda setting: repr((setting.name, setting.__get__(self), setting.supporting_protocols)), definitions) + settings = [repr((setting.name, setting.__get__(self), setting.supporting_protocols)) for setting in + definitions] return '[' + ', '.join(settings) + ']' def __str__(self): @@ -1033,7 +1034,7 @@ def __str__(self): """ # text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) - text = ', '.join([f'{name}={json_encode_string(str(value))}' for (name, value) in self.items()]) + text = ', '.join([f'{name}={json_encode_string(str(value))}' for (name, value) in list(self.items())]) return text # region Methods @@ -1057,10 +1058,10 @@ def fix_up(cls, command_class): def iteritems(self): definitions = type(self).configuration_setting_definitions version = self.command.protocol_version - return filter( - lambda name_value1: name_value1[1] is not None, map( - lambda setting: (setting.name, setting.__get__(self)), filter( - lambda setting: setting.is_supported_by_protocol(version), definitions))) + return [name_value1 for name_value1 in [(setting.name, setting.__get__(self)) for setting in + [setting for setting in definitions if + setting.is_supported_by_protocol(version)]] if + name_value1[1] is not None] # N.B.: Does not use Python 3 dict view semantics diff --git a/splunklib/searchcommands/streaming_command.py b/splunklib/searchcommands/streaming_command.py index 40d69cb6..b3eb4375 100644 --- a/splunklib/searchcommands/streaming_command.py +++ b/splunklib/searchcommands/streaming_command.py @@ -175,12 +175,11 @@ def iteritems(self): version = self.command.protocol_version if version == 1: if self.required_fields is None: - iteritems = filter(lambda name_value: name_value[0] != 'clear_required_fields', iteritems) + iteritems = [name_value for name_value in iteritems if name_value[0] != 'clear_required_fields'] else: - iteritems = filter(lambda name_value2: name_value2[0] != 'distributed', iteritems) + iteritems = [name_value2 for name_value2 in iteritems if name_value2[0] != 'distributed'] if not self.distributed: - iteritems = map( - lambda name_value1: (name_value1[0], 'stateful') if name_value1[0] == 'type' else (name_value1[0], name_value1[1]), iteritems) + iteritems = [(name_value1[0], 'stateful') if name_value1[0] == 'type' else (name_value1[0], name_value1[1]) for name_value1 in iteritems] return iteritems # N.B.: Does not use Python 3 dict view semantics From 3af370d78c0bc7c65456bc197f2cd11358c91256 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 17 May 2022 13:53:43 +0530 Subject: [PATCH 25/60] test case fix --- splunklib/client.py | 12 ++++++++---- tests/test_input.py | 25 +++++++++++++------------ 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index 48a8812c..ecd19948 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -1726,7 +1726,8 @@ def __getitem__(self, key): except HTTPError as he: if he.status == 404: # No entity matching key raise KeyError(key) - raise + else: + raise def __contains__(self, key): # configs/conf-{name} never returns a 404. We have to post to properties/{name} @@ -2248,7 +2249,8 @@ def __getitem__(self, key): except HTTPError as he: if he.status == 404: pass # Just carry on to the next kind. - raise + else: + raise if candidate is None: raise KeyError(key) # Never found a match. return candidate @@ -2275,7 +2277,8 @@ def __contains__(self, key): except HTTPError as he: if he.status == 404: pass # Just carry on to the next kind. - raise + else: + raise return False def create(self, name, kind, **kwargs): @@ -2559,7 +2562,8 @@ def list(self, *kinds, **kwargs): except HTTPError as e: if e.status == 404: continue # No inputs of this kind - raise + else: + raise entries = _load_atom_entries(response) if entries is None: continue # No inputs to process diff --git a/tests/test_input.py b/tests/test_input.py index f6d01b29..282ac70c 100755 --- a/tests/test_input.py +++ b/tests/test_input.py @@ -21,7 +21,6 @@ from splunklib import client - def highest_port(service, base_port, *kinds): """Find the first port >= base_port not in use by any input in kinds.""" highest_port = base_port @@ -45,7 +44,7 @@ def tearDown(self): def create_tcp_input(self, base_port, kind, **options): port = base_port - while True: # Find the next unbound port + while True: # Find the next unbound port try: input = self.service.inputs.create(str(port), kind, **options) return input @@ -66,7 +65,7 @@ def test_cannot_create_with_restrictToHost_in_name(self): ) def test_create_tcp_ports_with_restrictToHost(self): - for kind in ['tcp', 'splunktcp']: # Multiplexed UDP ports are not supported + for kind in ['tcp', 'splunktcp']: # Multiplexed UDP ports are not supported # Make sure we can create two restricted inputs on the same port boris = self.service.inputs.create(str(self.base_port), kind, restrictToHost='boris') natasha = self.service.inputs.create(str(self.base_port), kind, restrictToHost='natasha') @@ -101,7 +100,7 @@ def test_unrestricted_to_restricted_collision(self): unrestricted.delete() def test_update_restrictToHost_fails(self): - for kind in ['tcp', 'splunktcp']: # No UDP, since it's broken in Splunk + for kind in ['tcp', 'splunktcp']: # No UDP, since it's broken in Splunk boris = self.create_tcp_input(self.base_port, kind, restrictToHost='boris') self.assertRaises( @@ -171,21 +170,22 @@ def test_oneshot(self): def f(): index.refresh() - return int(index['totalEventCount']) == eventCount+4 + return int(index['totalEventCount']) == eventCount + 4 + self.assertEventuallyTrue(f, timeout=60) def test_oneshot_on_nonexistant_file(self): name = testlib.tmpname() self.assertRaises(HTTPError, - self.service.inputs.oneshot, name) + self.service.inputs.oneshot, name) class TestInput(testlib.SDKTestCase): def setUp(self): super().setUp() inputs = self.service.inputs - unrestricted_port = str(highest_port(self.service, 10000, 'tcp', 'splunktcp', 'udp')+1) - restricted_port = str(highest_port(self.service, int(unrestricted_port)+1, 'tcp', 'splunktcp')+1) + unrestricted_port = str(highest_port(self.service, 10000, 'tcp', 'splunktcp', 'udp') + 1) + restricted_port = str(highest_port(self.service, int(unrestricted_port) + 1, 'tcp', 'splunktcp') + 1) test_inputs = [{'kind': 'tcp', 'name': unrestricted_port, 'host': 'sdk-test'}, {'kind': 'udp', 'name': unrestricted_port, 'host': 'sdk-test'}, {'kind': 'tcp', 'name': 'boris:' + restricted_port, 'host': 'sdk-test'}] @@ -223,7 +223,7 @@ def test_lists_modular_inputs(self): self.uncheckedRestartSplunk() inputs = self.service.inputs - if ('abcd','test2') not in inputs: + if ('abcd', 'test2') not in inputs: inputs.create('abcd', 'test2', field1='boris') input = inputs['abcd', 'test2'] @@ -268,19 +268,19 @@ def test_update(self): @pytest.mark.skip('flaky') def test_delete(self): inputs = self.service.inputs - remaining = len(self._test_entities)-1 + remaining = len(self._test_entities) - 1 for input_entity in self._test_entities.values(): name = input_entity.name kind = input_entity.kind self.assertTrue(name in inputs) - self.assertTrue((name,kind) in inputs) + self.assertTrue((name, kind) in inputs) if remaining == 0: inputs.delete(name) self.assertFalse(name in inputs) else: if not name.startswith('boris'): self.assertRaises(client.AmbiguousReferenceException, - inputs.delete, name) + inputs.delete, name) self.service.inputs.delete(name, kind) self.assertFalse((name, kind) in inputs) self.assertRaises(client.HTTPError, @@ -290,4 +290,5 @@ def test_delete(self): if __name__ == "__main__": import unittest + unittest.main() From 3a26633decaa35e10503de653c17f49f12d57067 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 17 May 2022 14:09:45 +0530 Subject: [PATCH 26/60] sonar changes --- tests/test_kvstore_batch.py | 1 - tests/test_kvstore_conf.py | 1 - tests/test_kvstore_data.py | 5 ++--- tests/test_utils.py | 3 +-- 4 files changed, 3 insertions(+), 7 deletions(-) diff --git a/tests/test_kvstore_batch.py b/tests/test_kvstore_batch.py index 74a0c3b9..9c2f3afe 100755 --- a/tests/test_kvstore_batch.py +++ b/tests/test_kvstore_batch.py @@ -22,7 +22,6 @@ class KVStoreBatchTestCase(testlib.SDKTestCase): def setUp(self): super().setUp() - # self.service.namespace['owner'] = 'nobody' self.service.namespace['app'] = 'search' confs = self.service.kvstore if 'test' in confs: diff --git a/tests/test_kvstore_conf.py b/tests/test_kvstore_conf.py index eba8996f..beca1f69 100755 --- a/tests/test_kvstore_conf.py +++ b/tests/test_kvstore_conf.py @@ -20,7 +20,6 @@ class KVStoreConfTestCase(testlib.SDKTestCase): def setUp(self): super().setUp() - #self.service.namespace['owner'] = 'nobody' self.service.namespace['app'] = 'search' self.confs = self.service.kvstore if ('test' in self.confs): diff --git a/tests/test_kvstore_data.py b/tests/test_kvstore_data.py index 2f36032f..5627921f 100755 --- a/tests/test_kvstore_data.py +++ b/tests/test_kvstore_data.py @@ -23,7 +23,6 @@ class KVStoreDataTestCase(testlib.SDKTestCase): def setUp(self): super().setUp() - # self.service.namespace['owner'] = 'nobody' self.service.namespace['app'] = 'search' self.confs = self.service.kvstore if ('test' in self.confs): @@ -84,8 +83,8 @@ def test_params_data_type_conversion(self): self.assertEqual(len(data), 20) for x in range(20): self.assertEqual(data[x]['data'], 39 - x) - self.assertTrue(not 'ignore' in data[x]) - self.assertTrue(not '_key' in data[x]) + self.assertTrue('ignore' not in data[x]) + self.assertTrue('_key' not in data[x]) def tearDown(self): if ('test' in self.confs): diff --git a/tests/test_utils.py b/tests/test_utils.py index 52137f72..cd728cf6 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,5 @@ from tests import testlib - -from utils import * +from utils import dslice TEST_DICT = { 'username': 'admin', From 924a0ebdfc558084f9eeafd8d81f0ebfeab4ece1 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 17 May 2022 15:43:47 +0530 Subject: [PATCH 27/60] adding else after raise --- splunklib/binding.py | 14 +++++++++----- splunklib/client.py | 9 ++++++--- splunklib/results.py | 3 ++- 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 5808ce09..d0b21b6b 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -235,7 +235,8 @@ def _handle_auth_error(msg): except HTTPError as he: if he.status == 401: raise AuthenticationError(msg, he) - raise + else: + raise def _authentication(request_fun): @@ -302,7 +303,8 @@ def wrapper(self, *args, **kwargs): elif he.status == 401 and not self.autologin: raise AuthenticationError( "Request failed: Session is not logged in.", he) - raise + else: + raise return wrapper @@ -921,7 +923,8 @@ def login(self): except HTTPError as he: if he.status == 401: raise AuthenticationError("Login failed.", he) - raise + else: + raise def logout(self): """Forgets the current session token, and cookies.""" @@ -1299,8 +1302,9 @@ def request(self, url, message, **kwargs): except Exception: if self.retries <= 0: raise - time.sleep(self.retryDelay) - self.retries -= 1 + else: + time.sleep(self.retryDelay) + self.retries -= 1 response = record(response) if 400 <= response.status: raise HTTPError(response) diff --git a/splunklib/client.py b/splunklib/client.py index ecd19948..99a2c778 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -1272,7 +1272,8 @@ def __getitem__(self, key): except HTTPError as he: if he.status == 404: # No entity matching key and namespace. raise KeyError(key) - raise + else: + raise def __iter__(self, **kwargs): """Iterate over the entities in the collection. @@ -1634,7 +1635,8 @@ def delete(self, name, **params): # KeyError. if he.status == 404: raise KeyError(f"No such entity {name}") - raise + else: + raise return self def get(self, name="", owner=None, app=None, sharing=None, **query): @@ -2228,7 +2230,8 @@ def __getitem__(self, key): except HTTPError as he: if he.status == 404: # No entity matching kind and key raise KeyError((key, kind)) - raise + else: + raise else: # Iterate over all the kinds looking for matches. kind = None diff --git a/splunklib/results.py b/splunklib/results.py index 86850710..2e11c549 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -268,7 +268,8 @@ def _parse_results(self, stream): # splunk that is described in __init__. if 'no element found' in pe.msg: return - raise + else: + raise class JSONResultsReader: From ed117bcc6ce2e62a04c92ec6480ec33b6c442d44 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 17 May 2022 15:53:42 +0530 Subject: [PATCH 28/60] 2to3 suggested changes --- tests/searchcommands/test_decorators.py | 12 ++++++------ tests/searchcommands/test_internals_v1.py | 8 ++++---- tests/searchcommands/test_internals_v2.py | 6 +++--- tests/test_conf.py | 2 +- tests/test_input.py | 10 +++++----- utils/__init__.py | 3 +-- 6 files changed, 20 insertions(+), 21 deletions(-) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index ce0b811b..d258729c 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -356,9 +356,9 @@ def test_option(self): options.reset() missing = options.get_missing() - self.assertListEqual(missing, [option.name for option in options.values() if option.is_required]) - self.assertListEqual(presets, [str(option) for option in options.values() if option.value is not None]) - self.assertListEqual(presets, [str(option) for option in options.values() if str(option) != option.name + '=None']) + self.assertListEqual(missing, [option.name for option in list(options.values()) if option.is_required]) + self.assertListEqual(presets, [str(option) for option in list(options.values()) if option.value is not None]) + self.assertListEqual(presets, [str(option) for option in list(options.values()) if str(option) != option.name + '=None']) test_option_values = { validators.Boolean: ('0', 'non-boolean value'), @@ -375,7 +375,7 @@ def test_option(self): validators.RegularExpression: ('\\s+', '(poorly formed regular expression'), validators.Set: ('bar', 'non-existent set entry')} - for option in options.values(): + for option in list(options.values()): validator = option.validator if validator is None: @@ -434,9 +434,9 @@ def test_option(self): self.maxDiff = None tuplewrap = lambda x: x if isinstance(x, tuple) else (x,) - invert = lambda x: {v: k for k, v in x.items()} + invert = lambda x: {v: k for k, v in list(x.items())} - for x in command.options.values(): + for x in list(command.options.values()): # isinstance doesn't work for some reason if type(x.value).__name__ == 'Code': self.assertEqual(expected[x.name], x.value.source) diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index b2ee99fc..a6a68840 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -55,7 +55,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, options) - for option in command.options.values(): + for option in list(command.options.values()): if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) continue @@ -72,7 +72,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, options + fieldnames) - for option in command.options.values(): + for option in list(command.options.values()): if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) continue @@ -87,7 +87,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, ['required_option=true'] + fieldnames) - for option in command.options.values(): + for option in list(command.options.values()): if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) @@ -282,7 +282,7 @@ def test_input_header(self): 'sentence': 'hello world!'} input_header = InputHeader() - text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', collection.items(), '') + '\n' + text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', list(collection.items()), '') + '\n' with closing(StringIO(text)) as input_file: input_header.read(input_file) diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index 091a816f..a4db4e03 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -205,7 +205,7 @@ def test_record_writer_with_random_data(self, save_recording=False): test_data['metrics'] = metrics - for name, metric in metrics.items(): + for name, metric in list(metrics.items()): writer.write_metric(name, metric) self.assertEqual(writer._chunk_count, 0) @@ -220,8 +220,8 @@ def test_record_writer_with_random_data(self, save_recording=False): self.assertListEqual(writer._inspector['messages'], messages) self.assertDictEqual( - dict(k_v for k_v in writer._inspector.items() if k_v[0].startswith('metric.')), - dict(('metric.' + k_v1[0], k_v1[1]) for k_v1 in metrics.items())) + dict(k_v for k_v in list(writer._inspector.items()) if k_v[0].startswith('metric.')), + dict(('metric.' + k_v1[0], k_v1[1]) for k_v1 in list(metrics.items()))) writer.flush(finished=True) diff --git a/tests/test_conf.py b/tests/test_conf.py index e5429cfa..6b1f9b09 100755 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -87,7 +87,7 @@ def test_confs(self): testlib.tmpname(): testlib.tmpname()} stanza.submit(values) stanza.refresh() - for key, value in values.items(): + for key, value in list(values.items()): self.assertTrue(key in stanza) self.assertEqual(value, stanza[key]) diff --git a/tests/test_input.py b/tests/test_input.py index 282ac70c..26943cd9 100755 --- a/tests/test_input.py +++ b/tests/test_input.py @@ -200,7 +200,7 @@ def setUp(self): def tearDown(self): super().tearDown() - for entity in self._test_entities.values(): + for entity in list(self._test_entities.values()): try: self.service.inputs.delete( kind=entity.kind, @@ -231,7 +231,7 @@ def test_lists_modular_inputs(self): def test_create(self): inputs = self.service.inputs - for entity in self._test_entities.values(): + for entity in list(self._test_entities.values()): self.check_entity(entity) self.assertTrue(isinstance(entity, client.Input)) @@ -242,7 +242,7 @@ def test_get_kind_list(self): def test_read(self): inputs = self.service.inputs - for this_entity in self._test_entities.values(): + for this_entity in list(self._test_entities.values()): kind, name = this_entity.kind, this_entity.name read_entity = inputs[name, kind] self.assertEqual(this_entity.kind, read_entity.kind) @@ -258,7 +258,7 @@ def test_read_indiviually(self): def test_update(self): inputs = self.service.inputs - for entity in self._test_entities.values(): + for entity in list(self._test_entities.values()): kind, name = entity.kind, entity.name kwargs = {'host': 'foo'} entity.update(**kwargs) @@ -269,7 +269,7 @@ def test_update(self): def test_delete(self): inputs = self.service.inputs remaining = len(self._test_entities) - 1 - for input_entity in self._test_entities.values(): + for input_entity in list(self._test_entities.values()): name = input_entity.name kind = input_entity.kind self.assertTrue(name in inputs) diff --git a/utils/__init__.py b/utils/__init__.py index 73f6138c..3a2b48de 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -14,7 +14,6 @@ """Utility module shared by the SDK unit tests.""" -from __future__ import absolute_import from utils.cmdopts import * @@ -96,7 +95,7 @@ def dslice(value, *args): result = {} for arg in args: if isinstance(arg, dict): - for k, v in (arg.items()): + for k, v in (list(arg.items())): if k in value: result[v] = value[k] elif isinstance(arg, list): From 468c92ae60de9ac799a05ca8091b09ca447ea83f Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 25 May 2022 14:12:32 +0530 Subject: [PATCH 29/60] Update Makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 2c56bbdb..9f1bbd8b 100644 --- a/Makefile +++ b/Makefile @@ -34,7 +34,7 @@ docs: .PHONY: test test: @echo "$(ATTN_COLOR)==> test $(NO_COLOR)" - @tox -e py39 + @tox -e py37,py39 .PHONY: test_specific test_specific: From c772272fd15fec4f21be7c0eccf13879376eafd7 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Fri, 3 Jun 2022 18:25:32 +0530 Subject: [PATCH 30/60] refractor changes --- splunklib/searchcommands/internals.py | 2 +- tests/testlib.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index db8d363c..eadf2b05 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -20,7 +20,7 @@ import re import sys import warnings -import urllib +import urllib.parse from io import TextIOWrapper, StringIO from collections import deque, namedtuple from collections import OrderedDict diff --git a/tests/testlib.py b/tests/testlib.py index 667919b0..ac8a3e1e 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -25,7 +25,6 @@ # Run the test suite on the SDK without installing it. sys.path.insert(0, '../') -import splunklib.client as client from time import sleep from datetime import datetime, timedelta @@ -168,7 +167,7 @@ def install_app_from_collection(self, name): self.service.post("apps/local", **kwargs) except client.HTTPError as he: if he.status == 400: - raise IOError("App %s not found in app collection" % name) + raise IOError(f"App {name} not found in app collection") if self.service.restart_required: self.service.restart(120) self.installedApps.append(name) From ff3f8140a332b76fb8013d9edd0c4477bfd76056 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 12 Oct 2022 11:52:29 +0530 Subject: [PATCH 31/60] Update test.yml Test execution with Python v3.10 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1d570141..2b6f7ea6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,7 @@ jobs: matrix: os: - ubuntu-latest - python: [ 3.7, 3.9] + python: [ 3.7, 3.9, 3.10] splunk-version: - "8.2" - "latest" From 5094e0eaa3c15ce1e1ae487afaba5792d2d08437 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 12 Oct 2022 11:54:09 +0530 Subject: [PATCH 32/60] Update test.yml Test execution with Python v3.10.7 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2b6f7ea6..e9290b08 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,7 @@ jobs: matrix: os: - ubuntu-latest - python: [ 3.7, 3.9, 3.10] + python: [ 3.7, 3.9, 3.10.7] splunk-version: - "8.2" - "latest" From 917cf04b6e47f37ec571cd4a932223ec1a28e86d Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Thu, 13 Oct 2022 12:38:37 +0530 Subject: [PATCH 33/60] updated binding.py --- splunklib/__init__.py | 2 +- splunklib/binding.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/splunklib/__init__.py b/splunklib/__init__.py index f3b39891..11894e77 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -74,6 +74,6 @@ def assertRegex(self, *args, **kwargs): return getattr(self, "assertRegex")(*args, **kwargs) -__version_info__ = (1, 6, 19) +__version_info__ = (1, 7, 2) __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/binding.py b/splunklib/binding.py index 66db9899..607da46e 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -533,9 +533,9 @@ def _auth_headers(self): if self.has_cookies(): return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] elif self.basic and (self.username and self.password): - token = 'Basic %s' % b64encode(("%s:%s" % (self.username, self.password)).encode('utf-8')).decode('ascii') + token = f'Basic {b64encode(("%s:%s" % (self.username, self.password)).encode("utf-8")).decode("ascii")}' elif self.bearerToken: - token = 'Bearer %s' % self.bearerToken + token = f'Bearer {self.bearerToken}' elif self.token is _NoAuthenticationToken: token = [] else: @@ -543,7 +543,7 @@ def _auth_headers(self): if self.token.startswith('Splunk '): token = self.token else: - token = 'Splunk %s' % self.token + token = f'Splunk {self.token}' if token: header.append(("Authorization", token)) if self.get_cookies(): From 8cc4c210b15e6482b266250d3ed54f7ad5da3a11 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Fri, 4 Nov 2022 11:15:08 +0530 Subject: [PATCH 34/60] updated comments examples --- splunklib/binding.py | 2 +- splunklib/client.py | 24 ++++++++++++------------ splunklib/results.py | 14 +++++++------- tests/test_job.py | 20 ++++++++++---------- utils/cmdopts.py | 2 +- 5 files changed, 31 insertions(+), 31 deletions(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 607da46e..2dfc6eab 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -271,7 +271,7 @@ def _authentication(request_fun): def f(): c.get("/services") return 42 - print _authentication(f) + print(_authentication(f)) """ @wraps(request_fun) diff --git a/splunklib/client.py b/splunklib/client.py index ad72b39e..a8c5ac34 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -54,7 +54,7 @@ are subclasses of :class:`Entity`. An ``Entity`` object has fields for its attributes, and methods that are specific to each kind of entity. For example:: - print my_app['author'] # Or: print my_app.author + print(my_app['author']) # Or: print(my_app.author) my_app.package() # Creates a compressed package of this application """ @@ -1393,7 +1393,7 @@ def __iter__(self, **kwargs): c = client.connect(...) saved_searches = c.saved_searches for entity in saved_searches: - print "Saved search named %s" % entity.name + print(f"Saved search named {entity.name}") """ for item in self.iter(**kwargs): @@ -2880,10 +2880,10 @@ def results(self, **query_params): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - print '%s: %s' % (result.type, result.message) + print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - print result + print(result) assert rr.is_preview == False Results are not available until the job has finished. If called on @@ -2924,14 +2924,14 @@ def preview(self, **query_params): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - print '%s: %s' % (result.type, result.message) + print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - print result + print(result) if rr.is_preview: - print "Preview of a running search job." + print("Preview of a running search job.") else: - print "Job is finished. Results are final." + print("Job is finished. Results are final.") This method makes one roundtrip to the server, plus at most two more if @@ -3097,10 +3097,10 @@ def export(self, query, **params): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - print '%s: %s' % (result.type, result.message) + print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - print result + print(result) assert rr.is_preview == False Running an export search is more efficient as it streams the results @@ -3153,10 +3153,10 @@ def oneshot(self, query, **params): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - print '%s: %s' % (result.type, result.message) + print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - print result + print(result) assert rr.is_preview == False The ``oneshot`` method makes a single roundtrip to the server (as opposed diff --git a/splunklib/results.py b/splunklib/results.py index 2e11c549..8420cf3d 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -29,7 +29,7 @@ reader = ResultsReader(result_stream) for item in reader: print(item) - print "Results are a preview: %s" % reader.is_preview + print(f"Results are a preview: {reader.is_preview}") """ from io import BufferedReader, BytesIO @@ -174,10 +174,10 @@ class ResultsReader: reader = results.ResultsReader(response) for result in reader: if isinstance(result, dict): - print "Result: %s" % result + print(f"Result: {result}") elif isinstance(result, results.Message): - print "Message: %s" % result - print "is_preview = %s " % reader.is_preview + print(f"Message: {result}") + print(f"is_preview = {reader.is_preview}") """ # Be sure to update the docstrings of client.Jobs.oneshot, @@ -292,10 +292,10 @@ class JSONResultsReader: reader = results.JSONResultsReader(response) for result in reader: if isinstance(result, dict): - print "Result: %s" % result + print(f"Result: {result}") elif isinstance(result, results.Message): - print "Message: %s" % result - print "is_preview = %s " % reader.is_preview + print(f"Message: {result}") + print(f"is_preview = {reader.is_preview}") """ # Be sure to update the docstrings of client.Jobs.oneshot, diff --git a/tests/test_job.py b/tests/test_job.py index ef4bd69a..bab74f65 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -85,10 +85,10 @@ def test_export_docstring_sample(self): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - pass #print '%s: %s' % (result.type, result.message) + pass #print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - pass #print result + pass #print(result) assert rr.is_preview == False def test_results_docstring_sample(self): @@ -101,10 +101,10 @@ def test_results_docstring_sample(self): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - pass #print '%s: %s' % (result.type, result.message) + pass #print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - pass #print result + pass #print(result) assert rr.is_preview == False def test_preview_docstring_sample(self): @@ -116,14 +116,14 @@ def test_preview_docstring_sample(self): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - pass #print '%s: %s' % (result.type, result.message) + pass #print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - pass #print result + pass #print(result) if rr.is_preview: - pass #print "Preview of a running search job." + pass #print("Preview of a running search job.") else: - pass #print "Job is finished. Results are final." + pass #print("Job is finished. Results are final.") def test_oneshot_docstring_sample(self): from splunklib import client @@ -133,10 +133,10 @@ def test_oneshot_docstring_sample(self): for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results - pass #print '%s: %s' % (result.type, result.message) + pass #print(f'{result.type}: {result.message}') elif isinstance(result, dict): # Normal events are returned as dicts - pass #print result + pass #print(result) assert rr.is_preview == False def test_normal_job_with_garbage_fails(self): diff --git a/utils/cmdopts.py b/utils/cmdopts.py index a3a715ef..e9fffb3b 100644 --- a/utils/cmdopts.py +++ b/utils/cmdopts.py @@ -23,7 +23,7 @@ # Print the given message to stderr, and optionally exit def error(message, exitcode = None): - print("Error: %s" % message, file=sys.stderr) + print(f"Error: {message}", file=sys.stderr) if exitcode is not None: sys.exit(exitcode) From bf40010ed603c1f35a72ea9b437f0c672024c8e3 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 20 Dec 2022 17:07:44 +0530 Subject: [PATCH 35/60] added access to finished flag within metadata --- splunklib/searchcommands/internals.py | 3 +++ splunklib/searchcommands/search_command.py | 4 +++- tests/test_utils.py | 5 +---- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index eadf2b05..5389d283 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -438,6 +438,9 @@ class ObjectView: def __init__(self, dictionary): self.__dict__ = dictionary + def update(self, obj): + self.__dict__.update(obj.__dict__) + def __repr__(self): return repr(self.__dict__) diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 0a1e0caa..084ebb4b 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -967,7 +967,9 @@ def _execute_v2(self, ifile, process): self._finished = getattr(metadata, 'finished', False) self._record_writer.is_flushed = False - + # metadata.update(self._metadata) + # self._metadata = metadata + self._metadata.update(metadata) self._execute_chunk_v2(process, result) self._record_writer.write_chunk(finished=self._finished) diff --git a/tests/test_utils.py b/tests/test_utils.py index eabf2cff..922d380f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -89,10 +89,7 @@ def checkFilePermissions(dir_path): path = os.path.join(dir_path, file) if os.path.isfile(path): permission = oct(os.stat(path).st_mode) - if sys.version_info >= (3, 0): - self.assertEqual(permission, '0o100644') - else : - self.assertEqual(permission, '0100644') + self.assertEqual(permission, '0o100644') else: checkFilePermissions(path) From 72372f9d37758aedb208de7431e7efca0db5848e Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 20 Dec 2022 17:10:40 +0530 Subject: [PATCH 36/60] Update internals.py --- splunklib/searchcommands/internals.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index 5389d283..6bbec4b3 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -805,7 +805,8 @@ def write_metadata(self, configuration): metadata = chain(configuration.items(), (('inspector', self._inspector if self._inspector else None),)) self._write_chunk(metadata, '') - self.write('\n') + # Removed additional new line + # self.write('\n') self._clear() def write_metric(self, name, value): From 14cdb92d4a5e65398885a79d54af5c5869594255 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 21 Dec 2022 14:15:13 +0530 Subject: [PATCH 37/60] Update test_search_command.py --- tests/searchcommands/test_search_command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index c79e528e..0aadc8db 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -193,7 +193,7 @@ def test_process_scpv2(self): expected = ( 'chunked 1.0,68,0\n' - '{"inspector":{"messages":[["INFO","test command configuration: "]]}}\n' + '{"inspector":{"messages":[["INFO","test command configuration: "]]}}' 'chunked 1.0,17,32\n' '{"finished":true}test,__mv_test\r\n' 'data,\r\n' From 8c90d1e062ba65d0292d604a4611efda98592856 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 17 Jan 2023 14:01:10 +0530 Subject: [PATCH 38/60] refactoring - rearranged imports - refactored code based on Pylint recommendations --- CHANGELOG.md | 14 ++++++++++++++ splunklib/client.py | 8 ++++---- tests/modularinput/modularinput_testlib.py | 6 +++--- tests/modularinput/test_input_definition.py | 2 +- tests/searchcommands/test_decorators.py | 2 +- tests/searchcommands/test_internals_v1.py | 6 ++---- tests/test_app.py | 2 +- tests/test_binding.py | 4 ++-- tests/test_kvstore_batch.py | 4 +--- tests/test_kvstore_conf.py | 7 ++++--- tests/test_kvstore_data.py | 4 ++-- tests/test_modular_input.py | 2 +- tests/test_modular_input_kinds.py | 4 ++-- tests/test_results.py | 6 +++--- tests/test_saved_search.py | 12 ++++++------ tests/test_utils.py | 3 +-- 16 files changed, 48 insertions(+), 38 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ceda287..c1759f7d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Splunk Enterprise SDK for Python Changelog +## Version 2.0.0-beta + +### Feature updates +* `ensure_binary`, `ensure_str`, `ensure_text` and `assert_regex` utility methods have been migrated from `six.py` to `splunklib/__init__.py` + +### Major changes +* Removed Code specific to Python2 +* Removed six.py dependency +* Removed `__future__` imports +* Refactored & Updated `splunklib` and `tests` to utilise Python3 features +* Updated CI test matrix to run with Python versions - 3.7, 3.9 and 3.10.7 +* Refactored Code throwing `deprecation` warnings +* Refactored Code violating Pylint rules + ## Version 1.7.2 ### Minor changes diff --git a/splunklib/client.py b/splunklib/client.py index a8c5ac34..7dcdc253 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -692,7 +692,7 @@ def splunk_version(self): @property def splunk_instance(self): if self._instance_type is None : - splunk_info = self.info; + splunk_info = self.info if hasattr(splunk_info, 'instance_type') : self._instance_type = splunk_info['instance_type'] else: @@ -1823,7 +1823,7 @@ def __getitem__(self, key): # This screws up the default implementation of __getitem__ from Collection, which thinks # that multiple entities means a name collision, so we have to override it here. try: - response = self.get(key) + self.get(key) return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) except HTTPError as he: if he.status == 404: # No entity matching key @@ -1835,7 +1835,7 @@ def __contains__(self, key): # configs/conf-{name} never returns a 404. We have to post to properties/{name} # in order to find out if a configuration exists. try: - response = self.get(key) + self.get(key) return True except HTTPError as he: if he.status == 404: # No entity matching key @@ -3617,7 +3617,7 @@ class Roles(Collection): Retrieve this collection using :meth:`Service.roles`.""" def __init__(self, service): - return Collection.__init__(self, service, PATH_ROLES, item=Role) + Collection.__init__(self, service, PATH_ROLES, item=Role) def __getitem__(self, key): return Collection.__getitem__(self, key.lower()) diff --git a/tests/modularinput/modularinput_testlib.py b/tests/modularinput/modularinput_testlib.py index d4846a40..96388fd6 100644 --- a/tests/modularinput/modularinput_testlib.py +++ b/tests/modularinput/modularinput_testlib.py @@ -14,10 +14,10 @@ # License for the specific language governing permissions and limitations # under the License. -# Utility file for unit tests, import common functions and modules -import unittest -import sys, os import io +import os +import sys +import unittest sys.path.insert(0, os.path.join('../../splunklib', '..')) diff --git a/tests/modularinput/test_input_definition.py b/tests/modularinput/test_input_definition.py index 520eafbc..efbbb5b0 100644 --- a/tests/modularinput/test_input_definition.py +++ b/tests/modularinput/test_input_definition.py @@ -70,7 +70,7 @@ def test_attempt_to_parse_malformed_input_definition_will_throw_exception(self): """Does malformed XML cause the expected exception.""" with self.assertRaises(ValueError): - found = InputDefinition.parse(data_open("data/conf_with_invalid_inputs.xml")) + InputDefinition.parse(data_open("data/conf_with_invalid_inputs.xml")) if __name__ == "__main__": diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index d258729c..dba5d7c1 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -283,7 +283,7 @@ def fix_up(cls, command_class): setattr(settings_instance, name, value) - self.assertIn(backing_field_name, settings_instance.__dict__), + self.assertIn(backing_field_name, settings_instance.__dict__) self.assertEqual(getattr(settings_instance, name), value) self.assertEqual(settings_instance.__dict__[backing_field_name], value) diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index a6a68840..003cf829 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -17,8 +17,9 @@ from contextlib import closing from unittest import main, TestCase import os -import pytest +from io import StringIO, BytesIO from functools import reduce +import pytest from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1 from splunklib.searchcommands.decorators import Configuration, Option @@ -26,9 +27,6 @@ from splunklib.searchcommands.search_command import SearchCommand -from io import StringIO, BytesIO - - @pytest.mark.smoke class TestInternals(TestCase): diff --git a/tests/test_app.py b/tests/test_app.py index 39b68a08..d7984aff 100755 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -83,7 +83,7 @@ def test_update(self): def test_delete(self): name = testlib.tmpname() - app = self.service.apps.create(name) + self.service.apps.create(name) self.assertTrue(name in self.service.apps) self.service.apps.delete(name) self.assertFalse(name in self.service.apps) diff --git a/tests/test_binding.py b/tests/test_binding.py index c54dc3c8..caa4c5db 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -501,7 +501,7 @@ def urllib2_insert_cookie_handler(url, message, **kwargs): # Mimic the insertion of 3rd party cookies into the response. # An example is "sticky session"/"insert cookie" persistence # of a load balancer for a SHC. - header_list = [(k, v) for k, v in response.info().items()] + header_list = list(response.info().items()) header_list.append(("Set-Cookie", "BIGipServer_splunk-shc-8089=1234567890.12345.0000; path=/; Httponly; Secure")) header_list.append(("Set-Cookie", "home_made=yummy")) @@ -633,7 +633,7 @@ def test_got_updated_cookie_with_get(self): def test_login_fails_with_bad_cookie(self): # We should get an error if using a bad cookie try: - new_context = binding.connect(**{"cookie": "bad=cookie"}) + binding.connect(**{"cookie": "bad=cookie"}) self.fail() except AuthenticationError as ae: self.assertEqual(str(ae), "Login failed.") diff --git a/tests/test_kvstore_batch.py b/tests/test_kvstore_batch.py index 9c2f3afe..10dfe142 100755 --- a/tests/test_kvstore_batch.py +++ b/tests/test_kvstore_batch.py @@ -16,8 +16,6 @@ from tests import testlib -from splunklib import client - class KVStoreBatchTestCase(testlib.SDKTestCase): def setUp(self): @@ -66,7 +64,7 @@ def test_insert_find_update_data(self): def tearDown(self): confs = self.service.kvstore - if ('test' in confs): + if 'test' in confs: confs['test'].delete() diff --git a/tests/test_kvstore_conf.py b/tests/test_kvstore_conf.py index beca1f69..bb44bf65 100755 --- a/tests/test_kvstore_conf.py +++ b/tests/test_kvstore_conf.py @@ -17,6 +17,7 @@ from tests import testlib from splunklib import client + class KVStoreConfTestCase(testlib.SDKTestCase): def setUp(self): super().setUp() @@ -43,7 +44,6 @@ def test_update_collection(self): self.assertEqual(self.confs['test']['accelerated_fields.ind1'], '{"a": 1}') self.confs['test'].delete() - def test_update_fields(self): self.confs.create('test') self.confs['test'].post(**{'field.a': 'number'}) @@ -52,7 +52,6 @@ def test_update_fields(self): self.assertEqual(self.confs['test']['field.a'], 'string') self.confs['test'].delete() - def test_create_unique_collection(self): self.confs.create('test') self.assertTrue('test' in self.confs) @@ -83,9 +82,11 @@ def test_create_accelerated_fields_fields(self): """ def tearDown(self): - if ('test' in self.confs): + if 'test' in self.confs: self.confs['test'].delete() + if __name__ == "__main__": import unittest + unittest.main() diff --git a/tests/test_kvstore_data.py b/tests/test_kvstore_data.py index 5627921f..4c1dd86d 100755 --- a/tests/test_kvstore_data.py +++ b/tests/test_kvstore_data.py @@ -55,7 +55,7 @@ def test_update_delete_data(self): self.assertEqual(len(self.col.query(query='{"num": 50}')), 0) def test_query_data(self): - if ('test1' in self.confs): + if 'test1' in self.confs: self.confs['test1'].delete() self.confs.create('test1') self.col = self.confs['test1'].data @@ -87,7 +87,7 @@ def test_params_data_type_conversion(self): self.assertTrue('_key' not in data[x]) def tearDown(self): - if ('test' in self.confs): + if 'test' in self.confs: self.confs['test'].delete() diff --git a/tests/test_modular_input.py b/tests/test_modular_input.py index 688b26b6..526a676b 100755 --- a/tests/test_modular_input.py +++ b/tests/test_modular_input.py @@ -14,8 +14,8 @@ # License for the specific language governing permissions and limitations # under the License. -from tests import testlib import pytest +from tests import testlib @pytest.mark.smoke diff --git a/tests/test_modular_input_kinds.py b/tests/test_modular_input_kinds.py index 30380475..52c0c320 100755 --- a/tests/test_modular_input_kinds.py +++ b/tests/test_modular_input_kinds.py @@ -14,12 +14,12 @@ # License for the specific language governing permissions and limitations # under the License. +import pytest + from tests import testlib from splunklib import client -import pytest - class ModularInputKindTestCase(testlib.SDKTestCase): def setUp(self): diff --git a/tests/test_results.py b/tests/test_results.py index 03595124..d3350591 100755 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -14,12 +14,12 @@ # License for the specific language governing permissions and limitations # under the License. +import io from io import BytesIO -from tests import testlib from time import sleep +from tests import testlib from splunklib import results -import io class ResultsTestCase(testlib.SDKTestCase): @@ -158,7 +158,7 @@ def test_read_raw_field_with_segmentation(self): def assert_parsed_results_equals(self, xml_text, expected_results): results_reader = results.ResultsReader(BytesIO(xml_text.encode('utf-8'))) - actual_results = [x for x in results_reader] + actual_results = list(results_reader) self.assertEqual(expected_results, actual_results) diff --git a/tests/test_saved_search.py b/tests/test_saved_search.py index 8d559bc5..d089939d 100755 --- a/tests/test_saved_search.py +++ b/tests/test_saved_search.py @@ -15,6 +15,7 @@ # under the License. import datetime +import pytest from tests import testlib import logging @@ -22,7 +23,6 @@ from splunklib import client -import pytest @pytest.mark.smoke @@ -75,9 +75,9 @@ def check_saved_search(self, saved_search): self.assertGreaterEqual(saved_search.suppressed, 0) self.assertGreaterEqual(saved_search['suppressed'], 0) is_scheduled = saved_search.content['is_scheduled'] - self.assertTrue(is_scheduled == '1' or is_scheduled == '0') + self.assertTrue(is_scheduled in ('1', '0')) is_visible = saved_search.content['is_visible'] - self.assertTrue(is_visible == '1' or is_visible == '0') + self.assertTrue(is_visible in ('1', '0')) def test_create(self): self.assertTrue(self.saved_search_name in self.service.saved_searches) @@ -157,13 +157,13 @@ def test_dispatch_with_options(self): def test_history(self): try: old_jobs = self.saved_search.history() - N = len(old_jobs) - logging.debug("Found %d jobs in saved search history", N) + num = len(old_jobs) + logging.debug("Found %d jobs in saved search history", num) job = self.saved_search.dispatch() while not job.is_ready(): sleep(0.1) history = self.saved_search.history() - self.assertEqual(len(history), N + 1) + self.assertEqual(len(history), num + 1) self.assertTrue(job.sid in [j.sid for j in history]) finally: job.cancel() diff --git a/tests/test_utils.py b/tests/test_utils.py index 922d380f..40ed5319 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,5 @@ import unittest import os -import sys from tests import testlib from utils import dslice @@ -77,7 +76,7 @@ def test_dslice_all_args(self): class FilePermissionTest(unittest.TestCase): def setUp(self): - super(FilePermissionTest, self).setUp() + super().setUp() # Check for any change in the default file permission(i.e 644) for all files within splunklib def test_filePermissions(self): From 0766ed8a299f6f2cbf095171604d88f855dff406 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 22 Feb 2023 11:46:56 +0530 Subject: [PATCH 39/60] Update client.py --- splunklib/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index 3cfd4aad..39751a97 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -3724,7 +3724,7 @@ def create(self, name, accelerated_fields={}, fields={}, **kwargs): for k, v in list(accelerated_fields.items()): if isinstance(v, dict): v = json.dumps(v) - kwargs['index.' + k] = v + kwargs['accelerated_fields.' + k] = v for k, v in list(fields.items()): kwargs['field.' + k] = v return self.post(name=name, **kwargs) From b003be23f5bf1356aa786e4aa6276281bd6bd63e Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Fri, 3 Mar 2023 14:58:54 +0530 Subject: [PATCH 40/60] update string formatting --- tests/searchcommands/test_decorators.py | 20 ++++++------- tests/test_app.py | 3 +- tests/test_binding.py | 38 +++++++++---------------- 3 files changed, 23 insertions(+), 38 deletions(-) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index dba5d7c1..b38e200a 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -292,8 +292,7 @@ def fix_up(cls, command_class): new_configuration_settings_class(name, value) except Exception as error: self.assertIsInstance(error, ValueError, - 'Expected ValueError, not {}({}) for {}={}'.format(type(error).__name__, - error, name, repr(value))) + f'Expected ValueError, not {type(error).__name__}({error}) for {name}={repr(value)}') else: self.fail(f'Expected ValueError, not success for {name}={repr(value)}') @@ -352,8 +351,6 @@ def test_option(self): command = TestSearchCommand() options = command.options - #itervalues = lambda: options.values() - options.reset() missing = options.get_missing() self.assertListEqual(missing, [option.name for option in list(options.values()) if option.is_required]) @@ -452,14 +449,13 @@ def test_option(self): self.assertEqual(expected[x.name], x.value) expected = ( - 'foo="f" boolean="f" code="foo == \\"bar\\"" duration="24:59:59" fieldname="some.field_name" ' - 'file=' + json_encode_string(__file__) + ' float="99.9" integer="100" map="foo" match="123-45-6789" ' - 'optionname="some_option_name" record="f" regularexpression="\\\\s+" required_boolean="f" ' - 'required_code="foo == \\"bar\\"" required_duration="24:59:59" required_fieldname="some.field_name" ' - 'required_file=' + json_encode_string( - __file__) + ' required_float="99.9" required_integer="100" required_map="foo" ' - 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' - 'required_set="bar" set="bar" show_configuration="f"') + 'foo="f" boolean="f" code="foo == \\"bar\\"" duration="24:59:59" fieldname="some.field_name" ' + 'file=' + json_encode_string(__file__) + ' float="99.9" integer="100" map="foo" match="123-45-6789" ' + 'optionname="some_option_name" record="f" regularexpression="\\\\s+" required_boolean="f" ' + 'required_code="foo == \\"bar\\"" required_duration="24:59:59" required_fieldname="some.field_name" ' + 'required_file=' + json_encode_string(__file__) + ' required_float="99.9" required_integer="100" required_map="foo" ' + 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' + 'required_set="bar" set="bar" show_configuration="f"') observed = str(command.options) diff --git a/tests/test_app.py b/tests/test_app.py index d7984aff..e5288a76 100755 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -36,7 +36,8 @@ def setUp(self): self.app_name = testlib.tmpname() self.app = self.service.apps.create(self.app_name) logging.debug(f"Creating app {self.app_name}") - logging.debug(f"App {self.app_name} already exists. Skipping creation.") + else: + logging.debug(f"App {self.app_name} already exists. Skipping creation.") if self.service.restart_required: self.service.restart(120) diff --git a/tests/test_binding.py b/tests/test_binding.py index 847aed71..44c61023 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -278,13 +278,10 @@ def test_post_with_get_arguments_to_receivers_stream(self): class TestSocket(BindingTestCase): def test_socket(self): socket = self.context.connect() - socket.write(("POST %s HTTP/1.1\r\n" % \ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) - socket.write(("Host: %s:%s\r\n" % \ - (self.context.host, self.context.port)).encode('utf-8')) + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write(("Authorization: %s\r\n" % \ - self.context.token).encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) socket.write("\r\n".encode('utf-8')) socket.close() @@ -799,13 +796,10 @@ def test_preexisting_token(self): self.assertEqual(response.status, 200) socket = newContext.connect() - socket.write(("POST %s HTTP/1.1\r\n" % \ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) - socket.write(("Host: %s:%s\r\n" % \ - (self.context.host, self.context.port)).encode('utf-8')) + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write(("Authorization: %s\r\n" % \ - self.context.token).encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) socket.write("\r\n".encode('utf-8')) socket.close() @@ -827,13 +821,10 @@ def test_preexisting_token_sans_splunk(self): self.assertEqual(response.status, 200) socket = newContext.connect() - socket.write(("POST %s HTTP/1.1\r\n" % \ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) - socket.write(("Host: %s:%s\r\n" % \ - (self.context.host, self.context.port)).encode('utf-8')) + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write(("Authorization: %s\r\n" % \ - self.context.token).encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) socket.write("\r\n".encode('utf-8')) socket.close() @@ -850,13 +841,10 @@ def test_connect_with_preexisting_token_sans_user_and_pass(self): self.assertEqual(response.status, 200) socket = newContext.connect() - socket.write(("POST %s HTTP/1.1\r\n" % \ - self.context._abspath("some/path/to/post/to")).encode('utf-8')) - socket.write(("Host: %s:%s\r\n" % \ - (self.context.host, self.context.port)).encode('utf-8')) + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write(("Authorization: %s\r\n" % \ - self.context.token).encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) socket.write("\r\n".encode('utf-8')) socket.close() @@ -973,7 +961,7 @@ def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) assert handler.headers['content-type'] == 'application/x-www-form-urlencoded' - assert body.decode('utf-8') == 'baz=baf&hep=cat' or body.decode('utf-8') == 'hep=cat&baz=baf' + assert body.decode('utf-8') in ['baz=baf&hep=cat', 'hep=cat&baz=baf'] with MockServer(POST=check_response): ctx = binding.connect(port=9093, scheme='http', token="waffle") From 33544033e6ef6a157da888316736c199279f36ab Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Thu, 9 Mar 2023 11:13:01 +0530 Subject: [PATCH 41/60] Update test.yml Temporary change to Test compatibility with latest Python v3.11.2 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c71b58a2..a723f44a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,7 @@ jobs: matrix: os: - ubuntu-latest - python: [ 3.7, 3.9, 3.10.7] + python: [ 3.10.7, 3.11.2] splunk-version: - "8.1" - "8.2" From d18b735cb2ad4fbb17998fd306159a0d0b8587e0 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Thu, 9 Mar 2023 15:42:28 +0530 Subject: [PATCH 42/60] Copyright year updates Updated Copyright section with 2023 and few code refactoring --- setup.py | 2 +- sitecustomize.py | 2 +- splunklib/__init__.py | 4 ++-- splunklib/binding.py | 2 +- splunklib/client.py | 2 +- splunklib/data.py | 2 +- splunklib/modularinput/argument.py | 2 +- splunklib/modularinput/event.py | 2 +- splunklib/modularinput/event_writer.py | 2 +- splunklib/modularinput/input_definition.py | 2 +- splunklib/modularinput/scheme.py | 2 +- splunklib/modularinput/script.py | 2 +- splunklib/modularinput/utils.py | 2 +- splunklib/modularinput/validation_definition.py | 2 +- splunklib/results.py | 2 +- splunklib/searchcommands/__init__.py | 2 +- splunklib/searchcommands/decorators.py | 2 +- splunklib/searchcommands/environment.py | 2 +- splunklib/searchcommands/eventing_command.py | 2 +- splunklib/searchcommands/external_search_command.py | 4 ++-- splunklib/searchcommands/generating_command.py | 2 +- splunklib/searchcommands/internals.py | 4 ++-- splunklib/searchcommands/reporting_command.py | 2 +- splunklib/searchcommands/search_command.py | 4 +--- splunklib/searchcommands/streaming_command.py | 2 +- splunklib/searchcommands/validators.py | 2 +- tests/modularinput/modularinput_testlib.py | 2 +- tests/modularinput/test_event.py | 2 +- tests/modularinput/test_input_definition.py | 2 +- tests/modularinput/test_scheme.py | 2 +- tests/modularinput/test_validation_definition.py | 2 +- tests/searchcommands/__init__.py | 2 +- .../searchcommands/test_apps/eventing_app/bin/eventingcsc.py | 2 +- .../test_apps/generating_app/bin/generatingcsc.py | 2 +- .../test_apps/reporting_app/bin/reportingcsc.py | 2 +- .../test_apps/streaming_app/bin/streamingcsc.py | 2 +- tests/searchcommands/test_builtin_options.py | 2 +- tests/searchcommands/test_configuration_settings.py | 2 +- tests/searchcommands/test_csc_apps.py | 2 +- tests/searchcommands/test_decorators.py | 2 +- tests/searchcommands/test_internals_v1.py | 2 +- tests/searchcommands/test_internals_v2.py | 2 +- tests/searchcommands/test_search_command.py | 2 +- tests/searchcommands/test_validators.py | 2 +- tests/test_all.py | 2 +- tests/test_app.py | 2 +- tests/test_binding.py | 2 +- tests/test_collection.py | 2 +- tests/test_conf.py | 2 +- tests/test_data.py | 2 +- tests/test_event_type.py | 2 +- tests/test_fired_alert.py | 2 +- tests/test_index.py | 2 +- tests/test_input.py | 2 +- tests/test_job.py | 2 +- tests/test_logger.py | 2 +- tests/test_message.py | 2 +- tests/test_modular_input.py | 2 +- tests/test_modular_input_kinds.py | 2 +- tests/test_results.py | 2 +- tests/test_role.py | 2 +- tests/test_saved_search.py | 2 +- tests/test_service.py | 2 +- tests/test_storage_passwords.py | 2 +- tests/test_user.py | 2 +- tests/testlib.py | 2 +- utils/__init__.py | 2 +- utils/cmdopts.py | 2 +- 68 files changed, 71 insertions(+), 73 deletions(-) diff --git a/setup.py b/setup.py index 54856a28..1d558594 100755 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/sitecustomize.py b/sitecustomize.py index 21fbaebe..a6831050 100644 --- a/sitecustomize.py +++ b/sitecustomize.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 886b40e3..90d93921 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain @@ -74,5 +74,5 @@ def assertRegex(self, *args, **kwargs): return getattr(self, "assertRegex")(*args, **kwargs) -__version_info__ = (1, 7, 3) +__version_info__ = (2, 0, 0) __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/binding.py b/splunklib/binding.py index ceb89ad2..c630d6ab 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/client.py b/splunklib/client.py index 39751a97..774cc8dc 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/data.py b/splunklib/data.py index c889ff9b..69f6ad62 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/argument.py b/splunklib/modularinput/argument.py index f16ea99e..979331c4 100644 --- a/splunklib/modularinput/argument.py +++ b/splunklib/modularinput/argument.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/event.py b/splunklib/modularinput/event.py index 6a9fba93..1ad738b8 100644 --- a/splunklib/modularinput/event.py +++ b/splunklib/modularinput/event.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py index 5aa83d96..de721849 100644 --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/input_definition.py b/splunklib/modularinput/input_definition.py index c0e8e1ac..4a71e4c1 100644 --- a/splunklib/modularinput/input_definition.py +++ b/splunklib/modularinput/input_definition.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/scheme.py b/splunklib/modularinput/scheme.py index e84ce00d..97cce0c6 100644 --- a/splunklib/modularinput/scheme.py +++ b/splunklib/modularinput/scheme.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/script.py b/splunklib/modularinput/script.py index 5df6d0fc..54f8d57c 100644 --- a/splunklib/modularinput/script.py +++ b/splunklib/modularinput/script.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index 6429c0a7..a36d0522 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/validation_definition.py b/splunklib/modularinput/validation_definition.py index 0ad40e9e..3e5a5297 100644 --- a/splunklib/modularinput/validation_definition.py +++ b/splunklib/modularinput/validation_definition.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/results.py b/splunklib/results.py index 8420cf3d..22f5d70f 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/__init__.py b/splunklib/searchcommands/__init__.py index 3d6fbea6..a6a4d955 100644 --- a/splunklib/searchcommands/__init__.py +++ b/splunklib/searchcommands/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index 47902969..a3ec7abf 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/environment.py b/splunklib/searchcommands/environment.py index 2896df7b..7e5f27bf 100644 --- a/splunklib/searchcommands/environment.py +++ b/splunklib/searchcommands/environment.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/eventing_command.py b/splunklib/searchcommands/eventing_command.py index ab27d32e..4773ccfe 100644 --- a/splunklib/searchcommands/eventing_command.py +++ b/splunklib/searchcommands/eventing_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/external_search_command.py b/splunklib/searchcommands/external_search_command.py index 18fc2643..763fe4a5 100644 --- a/splunklib/searchcommands/external_search_command.py +++ b/splunklib/searchcommands/external_search_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain @@ -87,7 +87,7 @@ def execute(self): self._execute(self._path, self._argv, self._environ) except: error_type, error, tb = sys.exc_info() - message = 'Command execution failed: ' + str(error) + message = f'Command execution failed: {str(error)}' self._logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) sys.exit(1) diff --git a/splunklib/searchcommands/generating_command.py b/splunklib/searchcommands/generating_command.py index 139935b8..e57cfa87 100644 --- a/splunklib/searchcommands/generating_command.py +++ b/splunklib/searchcommands/generating_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index 6bbec4b3..e2ccff41 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain @@ -632,7 +632,7 @@ def _write_record(self, record): if value_t is bool: value = str(value.real) elif value_t is str: - value = str(value) + value = value elif isinstance(value, int) or value_t is float or value_t is complex: value = str(value) elif issubclass(value_t, (dict, list, tuple)): diff --git a/splunklib/searchcommands/reporting_command.py b/splunklib/searchcommands/reporting_command.py index 3551f4cd..cd5c8089 100644 --- a/splunklib/searchcommands/reporting_command.py +++ b/splunklib/searchcommands/reporting_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 084ebb4b..5bfdc5d8 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain @@ -967,8 +967,6 @@ def _execute_v2(self, ifile, process): self._finished = getattr(metadata, 'finished', False) self._record_writer.is_flushed = False - # metadata.update(self._metadata) - # self._metadata = metadata self._metadata.update(metadata) self._execute_chunk_v2(process, result) diff --git a/splunklib/searchcommands/streaming_command.py b/splunklib/searchcommands/streaming_command.py index b3eb4375..dd0e4b41 100644 --- a/splunklib/searchcommands/streaming_command.py +++ b/splunklib/searchcommands/streaming_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/validators.py b/splunklib/searchcommands/validators.py index ef460a4b..7cc004d7 100644 --- a/splunklib/searchcommands/validators.py +++ b/splunklib/searchcommands/validators.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/modularinput_testlib.py b/tests/modularinput/modularinput_testlib.py index 96388fd6..238760ab 100644 --- a/tests/modularinput/modularinput_testlib.py +++ b/tests/modularinput/modularinput_testlib.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_event.py b/tests/modularinput/test_event.py index 278abb81..20cbafa4 100644 --- a/tests/modularinput/test_event.py +++ b/tests/modularinput/test_event.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_input_definition.py b/tests/modularinput/test_input_definition.py index efbbb5b0..4aaf3c1d 100644 --- a/tests/modularinput/test_input_definition.py +++ b/tests/modularinput/test_input_definition.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_scheme.py b/tests/modularinput/test_scheme.py index e38d81a5..303bf1de 100644 --- a/tests/modularinput/test_scheme.py +++ b/tests/modularinput/test_scheme.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_validation_definition.py b/tests/modularinput/test_validation_definition.py index 43871c51..82a5bf15 100644 --- a/tests/modularinput/test_validation_definition.py +++ b/tests/modularinput/test_validation_definition.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/__init__.py b/tests/searchcommands/__init__.py index 0f260b58..ad42ad03 100644 --- a/tests/searchcommands/__init__.py +++ b/tests/searchcommands/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py b/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py index d34aa14e..f3c32502 100644 --- a/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py +++ b/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py b/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py index 2094ade7..6f2f72f9 100644 --- a/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py +++ b/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py b/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py index 78be5775..f7b214b9 100644 --- a/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py +++ b/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py b/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py index 348496cc..74401bba 100644 --- a/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py +++ b/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_builtin_options.py b/tests/searchcommands/test_builtin_options.py index 07a343ef..82c45299 100644 --- a/tests/searchcommands/test_builtin_options.py +++ b/tests/searchcommands/test_builtin_options.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_configuration_settings.py b/tests/searchcommands/test_configuration_settings.py index 65d0d3a4..0220244e 100644 --- a/tests/searchcommands/test_configuration_settings.py +++ b/tests/searchcommands/test_configuration_settings.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_csc_apps.py b/tests/searchcommands/test_csc_apps.py index b15574d1..98873ac7 100755 --- a/tests/searchcommands/test_csc_apps.py +++ b/tests/searchcommands/test_csc_apps.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index b38e200a..93fbdcca 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index 003cf829..c01d8396 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index f5454977..ead7a796 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index 0aadc8db..ace24be1 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index 7b815491..cc6d1555 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_all.py b/tests/test_all.py index e7421797..55b4d77f 100755 --- a/tests/test_all.py +++ b/tests/test_all.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_app.py b/tests/test_app.py index e5288a76..706aa749 100755 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_binding.py b/tests/test_binding.py index 44c61023..39bb34a7 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_collection.py b/tests/test_collection.py index bf74e30c..a92e18ea 100755 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_conf.py b/tests/test_conf.py index 6b1f9b09..16dd08fb 100755 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_data.py b/tests/test_data.py index c6e54efe..c3bd3f7b 100755 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_event_type.py b/tests/test_event_type.py index 9e495977..92358a12 100755 --- a/tests/test_event_type.py +++ b/tests/test_event_type.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_fired_alert.py b/tests/test_fired_alert.py index fb185dbe..c7f4e157 100755 --- a/tests/test_fired_alert.py +++ b/tests/test_fired_alert.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_index.py b/tests/test_index.py index fb876496..f577df3e 100755 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_input.py b/tests/test_input.py index 26943cd9..02f585bc 100755 --- a/tests/test_input.py +++ b/tests/test_input.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_job.py b/tests/test_job.py index bab74f65..6fd4b81c 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_logger.py b/tests/test_logger.py index 0541d79a..8afd10cc 100755 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_message.py b/tests/test_message.py index 0c94402e..da041b45 100755 --- a/tests/test_message.py +++ b/tests/test_message.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_modular_input.py b/tests/test_modular_input.py index 526a676b..6473cdde 100755 --- a/tests/test_modular_input.py +++ b/tests/test_modular_input.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_modular_input_kinds.py b/tests/test_modular_input_kinds.py index 52c0c320..c780e41f 100755 --- a/tests/test_modular_input_kinds.py +++ b/tests/test_modular_input_kinds.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_results.py b/tests/test_results.py index d3350591..1454e733 100755 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_role.py b/tests/test_role.py index ca9f5009..d1294413 100755 --- a/tests/test_role.py +++ b/tests/test_role.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_saved_search.py b/tests/test_saved_search.py index d089939d..411d3bbc 100755 --- a/tests/test_saved_search.py +++ b/tests/test_saved_search.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_service.py b/tests/test_service.py index fb6e7730..6c035d58 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_storage_passwords.py b/tests/test_storage_passwords.py index 578b4fb0..4e611066 100644 --- a/tests/test_storage_passwords.py +++ b/tests/test_storage_passwords.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_user.py b/tests/test_user.py index 38958814..a508c3d5 100755 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/testlib.py b/tests/testlib.py index ac8a3e1e..f2ca1755 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/utils/__init__.py b/utils/__init__.py index 3a2b48de..60e60530 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/utils/cmdopts.py b/utils/cmdopts.py index e9fffb3b..1a8e9b02 100644 --- a/utils/cmdopts.py +++ b/utils/cmdopts.py @@ -1,4 +1,4 @@ -# Copyright 2011-2015 Splunk, Inc. +# Copyright © 2011-2023 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain From 2ee50001655c0a7a3d599e7da2b16f5521546403 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 7 Aug 2023 14:31:37 +0530 Subject: [PATCH 43/60] matrix update --- .github/workflows/test.yml | 2 +- tests/test_service.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bf948bfe..385cad7a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,7 @@ jobs: matrix: os: - ubuntu-latest - python: [ 3.10.7, 3.11.2] + python: [ 3.7, 3.9] splunk-version: - "8.1" - "8.2" diff --git a/tests/test_service.py b/tests/test_service.py index 9015754f..93744ccf 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -15,6 +15,7 @@ # under the License. import unittest +import pytest from tests import testlib from splunklib import client From 21323ea12be5929682d517a4d1e524239b0456af Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Mon, 21 Aug 2023 12:10:03 +0530 Subject: [PATCH 44/60] moved utility/helper functions to utils.py file --- CHANGELOG.md | 2 +- splunklib/__init__.py | 44 --------------- splunklib/modularinput/event.py | 2 +- splunklib/modularinput/event_writer.py | 2 +- splunklib/searchcommands/search_command.py | 8 +-- splunklib/utils.py | 60 +++++++++++++++++++++ tests/searchcommands/test_search_command.py | 7 +-- 7 files changed, 72 insertions(+), 53 deletions(-) create mode 100644 splunklib/utils.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 404636be..02faca87 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ ## Version 2.0.0-beta ### Feature updates -* `ensure_binary`, `ensure_str`, `ensure_text` and `assert_regex` utility methods have been migrated from `six.py` to `splunklib/__init__.py` +* `ensure_binary`, `ensure_str`, `ensure_text` and `assert_regex` utility methods have been migrated from `six.py` to `splunklib/utils.py` ### Major changes * Removed Code specific to Python2 diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 90d93921..035e0f81 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -30,49 +30,5 @@ def setup_logging(level, log_format=DEFAULT_LOG_FORMAT, date_format=DEFAULT_DATE datefmt=date_format) -def ensure_binary(s, encoding='utf-8', errors='strict'): - """ - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, str): - return s.encode(encoding, errors) - - if isinstance(s, bytes): - return s - - raise TypeError(f"not expecting type '{type(s)}'") - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """ - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, bytes): - return s.decode(encoding, errors) - - if isinstance(s, str): - return s - - raise TypeError(f"not expecting type '{type(s)}'") - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """ - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, bytes): - return s.decode(encoding, errors) - if isinstance(s, str): - return s - raise TypeError(f"not expecting type '{type(s)}'") - - -def assertRegex(self, *args, **kwargs): - return getattr(self, "assertRegex")(*args, **kwargs) - - __version_info__ = (2, 0, 0) __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/modularinput/event.py b/splunklib/modularinput/event.py index 1ad738b8..2e398cfa 100644 --- a/splunklib/modularinput/event.py +++ b/splunklib/modularinput/event.py @@ -15,7 +15,7 @@ from io import TextIOBase import xml.etree.ElementTree as ET -from splunklib import ensure_text +from splunklib.utils import ensure_text class Event: diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py index adc9195e..cfff8721 100644 --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -14,7 +14,7 @@ import sys -from splunklib import ensure_str +from splunklib.utils import ensure_str from .event import ET diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 47504ed9..ab8be07c 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -34,6 +34,8 @@ from urllib.parse import urlsplit from warnings import warn from xml.etree import ElementTree +from splunklib.utils import ensure_str + # Relative imports import splunklib @@ -888,7 +890,7 @@ def _read_chunk(istream): if not header: return None - match = SearchCommand._header.match(splunklib.ensure_str(header)) + match = SearchCommand._header.match(ensure_str(header)) if match is None: raise RuntimeError(f'Failed to parse transport header: {header}') @@ -905,7 +907,7 @@ def _read_chunk(istream): decoder = MetadataDecoder() try: - metadata = decoder.decode(splunklib.ensure_str(metadata)) + metadata = decoder.decode(ensure_str(metadata)) except Exception as error: raise RuntimeError(f'Failed to parse metadata of length {metadata_length}: {error}') @@ -919,7 +921,7 @@ def _read_chunk(istream): except Exception as error: raise RuntimeError(f'Failed to read body of length {body_length}: {error}') - return metadata, splunklib.ensure_str(body,errors="replace") + return metadata, ensure_str(body,errors="replace") _header = re.compile(r'chunked\s+1.0\s*,\s*(\d+)\s*,\s*(\d+)\s*\n') diff --git a/splunklib/utils.py b/splunklib/utils.py new file mode 100644 index 00000000..3bb80ca2 --- /dev/null +++ b/splunklib/utils.py @@ -0,0 +1,60 @@ +# Copyright © 2011-2023 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The **splunklib.utils** File for utility functions. +""" + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """ + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, str): + return s.encode(encoding, errors) + + if isinstance(s, bytes): + return s + + raise TypeError(f"not expecting type '{type(s)}'") + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """ + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, bytes): + return s.decode(encoding, errors) + + if isinstance(s, str): + return s + + raise TypeError(f"not expecting type '{type(s)}'") + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """ + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, bytes): + return s.decode(encoding, errors) + if isinstance(s, str): + return s + raise TypeError(f"not expecting type '{type(s)}'") + + +def assertRegex(self, *args, **kwargs): + return getattr(self, "assertRegex")(*args, **kwargs) diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index ace24be1..8ecb3fd7 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -32,15 +32,16 @@ from splunklib.searchcommands.decorators import ConfigurationSetting, Option from splunklib.searchcommands.search_command import SearchCommand from splunklib.client import Service +from splunklib.utils import ensure_binary from io import StringIO, BytesIO def build_command_input(getinfo_metadata, execute_metadata, execute_body): - input = (f'chunked 1.0,{len(splunklib.ensure_binary(getinfo_metadata))},0\n{getinfo_metadata}' + - f'chunked 1.0,{len(splunklib.ensure_binary(execute_metadata))},{len(splunklib.ensure_binary(execute_body))}\n{execute_metadata}{execute_body}') + input = (f'chunked 1.0,{len(ensure_binary(getinfo_metadata))},0\n{getinfo_metadata}' + + f'chunked 1.0,{len(ensure_binary(execute_metadata))},{len(ensure_binary(execute_body))}\n{execute_metadata}{execute_body}') - ifile = BytesIO(splunklib.ensure_binary(input)) + ifile = BytesIO(ensure_binary(input)) ifile = TextIOWrapper(ifile) From d8d7da456a5df71846e6e658b23e274fce6e4716 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 8 Sep 2023 12:52:57 +0530 Subject: [PATCH 45/60] updates --- .github/workflows/release.yml | 2 +- CHANGELOG.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 22da468a..90ef8171 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: - name: Build package run: python setup.py sdist - name: Publish package to PyPI - uses: pypa/gh-action-pypi-publish@v1.8.8 + uses: pypa/gh-action-pypi-publish@v1.8.10 with: user: __token__ password: ${{ secrets.pypi_password }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 404636be..df87b0b4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ * Removed six.py dependency * Removed `__future__` imports * Refactored & Updated `splunklib` and `tests` to utilise Python3 features -* Updated CI test matrix to run with Python versions - 3.7, 3.9 and 3.10.7 +* Updated CI test matrix to run with Python versions - 3.7 and 3.9 * Refactored Code throwing `deprecation` warnings * Refactored Code violating Pylint rules ## Version 1.7.4 From 54a6926f0cee6c035305b012dbb2b8eef8abffa8 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Wed, 8 Nov 2023 15:54:37 +0530 Subject: [PATCH 46/60] added check for user role --- splunklib/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index 774cc8dc..c8855944 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -3476,7 +3476,8 @@ def role_entities(self): :return: The list of roles. :rtype: ``list`` """ - return [self.service.roles[name] for name in self.content.roles] + all_role_names = [r.name for r in self.service.roles.list()] + return [self.service.roles[name] for name in self.content.roles if name in all_role_names] # Splunk automatically lowercases new user names so we need to match that From 07387789de61acc78abcf9caba906c0df9fea546 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 5 Feb 2024 15:27:54 +0530 Subject: [PATCH 47/60] Removed __test__ flags --- tests/searchcommands/test_decorators.py | 2 -- tests/searchcommands/test_internals_v2.py | 3 --- tests/searchcommands/test_search_command.py | 3 --- 3 files changed, 8 deletions(-) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 93fbdcca..de2cf9a2 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -462,7 +462,5 @@ def test_option(self): self.assertEqual(observed, expected) -TestSearchCommand.__test__ = False - if __name__ == "__main__": main() diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index ead7a796..29fbba97 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -409,9 +409,6 @@ def _run(self): # test.record() # test.playback() -Test.__test__ = False -TestRecorder.__test__ = False - if __name__ == "__main__": main() diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index 8ecb3fd7..69800c18 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -266,8 +266,5 @@ def test_process_scpv2(self): _package_directory = os.path.dirname(os.path.abspath(__file__)) -TestCommand.__test__ = False -TestStreamingCommand.__test__ = False - if __name__ == "__main__": main() From a8ede1a1c05d63ac8998141689af0574b7c42ae9 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 7 Feb 2024 16:03:52 +0530 Subject: [PATCH 48/60] Updates as per the review/feedback comments --- CHANGELOG.md | 2 +- Makefile | 6 +++--- README.md | 12 +++++------ scripts/test_specific.sh | 2 +- splunklib/binding.py | 2 +- splunklib/client.py | 5 +---- splunklib/modularinput/event.py | 4 ++-- splunklib/searchcommands/decorators.py | 2 +- splunklib/searchcommands/internals.py | 22 +++------------------ splunklib/utils.py | 12 ----------- tests/README.md | 6 ++---- tests/searchcommands/chunked_data_stream.py | 11 ++++++----- tests/test_binding.py | 3 ++- tests/test_collection.py | 16 +++++++-------- tests/test_job.py | 5 ----- tests/testlib.py | 2 +- 16 files changed, 38 insertions(+), 74 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 29ce30f6..ab20e262 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ ## Version 2.0.0-beta ### Feature updates -* `ensure_binary`, `ensure_str`, `ensure_text` and `assert_regex` utility methods have been migrated from `six.py` to `splunklib/utils.py` +* `ensure_binary`, `ensure_str` and `assert_regex` utility methods have been migrated from `six.py` to `splunklib/utils.py` ### Major changes * Removed Code specific to Python2 diff --git a/Makefile b/Makefile index 9f1bbd8b..58d53228 100644 --- a/Makefile +++ b/Makefile @@ -44,17 +44,17 @@ test_specific: .PHONY: test_smoke test_smoke: @echo "$(ATTN_COLOR)==> test_smoke $(NO_COLOR)" - @tox -e py27,py37 -- -m smoke + @tox -e py37,py39 -- -m smoke .PHONY: test_no_app test_no_app: @echo "$(ATTN_COLOR)==> test_no_app $(NO_COLOR)" - @tox -e py27,py37 -- -m "not app" + @tox -e py37,py39 -- -m "not app" .PHONY: test_smoke_no_app test_smoke_no_app: @echo "$(ATTN_COLOR)==> test_smoke_no_app $(NO_COLOR)" - @tox -e py27,py37 -- -m "smoke and not app" + @tox -e py37,py39 -- -m "smoke and not app" .PHONY: env env: diff --git a/README.md b/README.md index c9bb8cbd..dd58b446 100644 --- a/README.md +++ b/README.md @@ -25,13 +25,13 @@ The Splunk Enterprise SDK for Python contains library code, and it's examples ar Here's what you need to get going with the Splunk Enterprise SDK for Python. -* Python 2.7+ or Python 3.7. +* Python 3.7 or Python 3.9. - The Splunk Enterprise SDK for Python has been tested with Python v2.7 and v3.7. + The Splunk Enterprise SDK for Python is compatible with python3 and has been tested with Python v3.7 and v3.9. -* Splunk Enterprise 9.0 or 8.2 +* Splunk Enterprise 9.2 or 8.2 - The Splunk Enterprise SDK for Python has been tested with Splunk Enterprise 9.0, 8.2 and 8.1 + The Splunk Enterprise SDK for Python has been tested with Splunk Enterprise 9.2, 8.2 and 8.1 If you haven't already installed Splunk Enterprise, download it [here](http://www.splunk.com/download). For more information, see the Splunk Enterprise [_Installation Manual_](https://docs.splunk.com/Documentation/Splunk/latest/Installation). @@ -61,7 +61,7 @@ Install the sources you cloned from GitHub: You'll need `docker` and `docker-compose` to get up and running using this method. ``` -make up SPLUNK_VERSION=9.0 +make up SPLUNK_VERSION=9.2 make wait_up make test make down @@ -110,7 +110,7 @@ here is an example of .env file: # Access scheme (default: https) scheme=https # Your version of Splunk Enterprise - version=9.0 + version=9.2 # Bearer token for authentication #splunkToken= # Session key for authentication diff --git a/scripts/test_specific.sh b/scripts/test_specific.sh index b2890383..9f751530 100644 --- a/scripts/test_specific.sh +++ b/scripts/test_specific.sh @@ -1,4 +1,4 @@ echo "To run a specific test:" -echo " tox -e py27,py37 [test_file_path]::[TestClassName]::[test_method]" +echo " tox -e py37,py39 [test_file_path]::[TestClassName]::[test_method]" echo "For Example, To run 'test_autologin' testcase from 'test_service.py' file run" echo " tox -e py37 -- tests/test_service.py::ServiceTestCase::test_autologin" diff --git a/splunklib/binding.py b/splunklib/binding.py index 85b7038c..fcad0058 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -201,7 +201,7 @@ def __new__(self, val='', skip_encode=False, encode_slash=False): return str.__new__(self, val) if encode_slash: return str.__new__(self, parse.quote_plus(val)) - # When subclassing str, just call str.__new__ method + # When subclassing str, just call str.__new__ method # with your class and the value you want to have in the # new string. return str.__new__(self, parse.quote(val)) diff --git a/splunklib/client.py b/splunklib/client.py index c8855944..97f8b3fe 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -1982,9 +1982,6 @@ def delete(self, username, realm=None): :return: The `StoragePassword` collection. :rtype: ``self`` """ - if self.service.namespace.owner == '-' or self.service.namespace.app == '-': - raise ValueError("app context must be specified when removing a password.") - if realm is None: # This case makes the username optional, so # the full name can be passed in as realm. @@ -3751,7 +3748,7 @@ def update_accelerated_field(self, name, value): :return: Result of POST request """ kwargs = {} - kwargs['accelerated_fields.' + name] = value if isinstance(value, str) else json.dumps(value) + kwargs['accelerated_fields.' + name] = json.dumps(value) if isinstance(value, dict) else value return self.post(**kwargs) def update_field(self, name, value): diff --git a/splunklib/modularinput/event.py b/splunklib/modularinput/event.py index 2e398cfa..dbd9d867 100644 --- a/splunklib/modularinput/event.py +++ b/splunklib/modularinput/event.py @@ -15,7 +15,7 @@ from io import TextIOBase import xml.etree.ElementTree as ET -from splunklib.utils import ensure_text +from splunklib.utils import ensure_str class Event: @@ -105,7 +105,7 @@ def write_to(self, stream): ET.SubElement(event, "done") if isinstance(stream, TextIOBase): - stream.write(ensure_text(ET.tostring(event))) + stream.write(ensure_str(ET.tostring(event))) else: stream.write(ET.tostring(event)) stream.flush() diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index a3ec7abf..ae7ff6e5 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -15,7 +15,7 @@ # under the License. -from collections import OrderedDict # must be python 2.7 +from collections import OrderedDict from inspect import getmembers, isclass, isfunction diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index e2ccff41..65d93d20 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -40,30 +40,15 @@ def set_binary_mode(fh): """ Helper method to set up binary mode for file handles. Emphasis being sys.stdin, sys.stdout, sys.stderr. For python3, we want to return .buffer - For python2+windows we want to set os.O_BINARY """ - typefile = TextIOWrapper if sys.version_info >= (3, 0) else file + typefile = TextIOWrapper # check for file handle if not isinstance(fh, typefile): return fh - # check for python3 and buffer - if sys.version_info >= (3, 0) and hasattr(fh, 'buffer'): + # check for buffer + if hasattr(fh, 'buffer'): return fh.buffer - # check for python3 - if sys.version_info >= (3, 0): - pass - # check for windows python2. SPL-175233 -- python3 stdout is already binary - elif sys.platform == 'win32': - # Work around the fact that on Windows '\n' is mapped to '\r\n'. The typical solution is to simply open files in - # binary mode, but stdout is already open, thus this hack. 'CPython' and 'PyPy' work differently. We assume that - # all other Python implementations are compatible with 'CPython'. This might or might not be a valid assumption. - from platform import python_implementation - implementation = python_implementation() - if implementation == 'PyPy': - return os.fdopen(fh.fileno(), 'wb', 0) - import msvcrt - msvcrt.setmode(fh.fileno(), os.O_BINARY) return fh @@ -684,7 +669,6 @@ def _write_record(self, record): # We may be running under PyPy 2.5 which does not include the _json module _iterencode_json = JSONEncoder(separators=(',', ':')).iterencode else: - # Creating _iterencode_json this way yields a two-fold performance improvement on Python 2.7.9 and 2.7.10 from json.encoder import encode_basestring_ascii @staticmethod diff --git a/splunklib/utils.py b/splunklib/utils.py index 3bb80ca2..2e974999 100644 --- a/splunklib/utils.py +++ b/splunklib/utils.py @@ -44,17 +44,5 @@ def ensure_str(s, encoding='utf-8', errors='strict'): raise TypeError(f"not expecting type '{type(s)}'") -def ensure_text(s, encoding='utf-8', errors='strict'): - """ - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, bytes): - return s.decode(encoding, errors) - if isinstance(s, str): - return s - raise TypeError(f"not expecting type '{type(s)}'") - - def assertRegex(self, *args, **kwargs): return getattr(self, "assertRegex")(*args, **kwargs) diff --git a/tests/README.md b/tests/README.md index 3da69c9f..da02228c 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,10 +1,8 @@ # Splunk Test Suite The test suite uses Python's standard library and the built-in **unittest** -library. If you're using Python 2.7 or Python 3.7, you're all set. However, if you are using -Python 2.6, you'll also need to install the **unittest2** library to get the -additional features that were added to Python 2.7 (just run `pip install -unittest2` or `easy_install unittest2`). +library. The Splunk Enterprise SDK for Python has been tested with Python v3.7 +and v3.9. To run the unit tests, open a command prompt in the **/splunk-sdk-python** directory and enter: diff --git a/tests/searchcommands/chunked_data_stream.py b/tests/searchcommands/chunked_data_stream.py index 39782c44..d1ac5a5f 100644 --- a/tests/searchcommands/chunked_data_stream.py +++ b/tests/searchcommands/chunked_data_stream.py @@ -4,11 +4,12 @@ import json import splunklib.searchcommands.internals +from splunklib.utils import ensure_binary, ensure_str class Chunk: def __init__(self, version, meta, data): - self.version = version + self.version = ensure_str(version) self.meta = json.loads(meta) dialect = splunklib.searchcommands.internals.CsvDialect self.data = csv.DictReader(io.StringIO(data.decode("utf-8")), @@ -20,9 +21,9 @@ def __init__(self, chunk_stream): self.chunk_stream = chunk_stream def __next__(self): - return next(self) + return self.next() - def __next__(self): + def next(self): try: return self.chunk_stream.read_chunk() except EOFError: @@ -53,7 +54,7 @@ def read_chunk(self): def build_chunk(keyval, data=None): - metadata = json.dumps(keyval).encode('utf-8') + metadata = ensure_binary(json.dumps(keyval)) data_output = _build_data_csv(data) return b"chunked 1.0,%d,%d\n%s%s" % (len(metadata), len(data_output), metadata, data_output) @@ -96,4 +97,4 @@ def _build_data_csv(data): writer.writeheader() for datum in data: writer.writerow(datum) - return csvout.getvalue().encode('utf-8') + return ensure_binary(csvout.getvalue()) diff --git a/tests/test_binding.py b/tests/test_binding.py index 4df87198..b226ef50 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -32,6 +32,7 @@ from splunklib import binding from splunklib.binding import HTTPError, AuthenticationError, UrlEncoded from splunklib import data +from splunklib.utils import ensure_str import pytest @@ -963,7 +964,7 @@ def check_response(handler): length = int(handler.headers.get('content-length', 0)) body = handler.rfile.read(length) assert handler.headers['content-type'] == 'application/x-www-form-urlencoded' - assert body.decode('utf-8') in ['baz=baf&hep=cat', 'hep=cat&baz=baf'] + assert ensure_str(body) in ['baz=baf&hep=cat', 'hep=cat&baz=baf'] with MockServer(POST=check_response): ctx = binding.connect(port=9093, scheme='http', token="waffle") diff --git a/tests/test_collection.py b/tests/test_collection.py index a92e18ea..03ec54b2 100755 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -80,7 +80,7 @@ def test_list(self): logging.debug(f"No entities in collection {coll_name}; skipping test.") found = [ent.name for ent in coll.list()][:10] self.assertEqual(expected, found, - msg=f'on {coll_name} (expected {expected}, found {found})') + msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_list_with_count(self): N = 5 @@ -111,7 +111,7 @@ def test_list_with_search(self): # TODO: DVPL-5868 - This should use a real search instead of *. Otherwise the test passes trivially. found = [ent.name for ent in coll.list(search="*")] self.assertEqual(expected, found, - msg=f'on {coll_name} (expected {expected}, found {found})') + msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_list_with_sort_dir(self): for coll_name in collections: @@ -127,7 +127,7 @@ def test_list_with_sort_dir(self): found = [ent.name for ent in coll.list(**found_kwargs)] self.assertEqual(sorted(expected), sorted(found), - msg=f'on {coll_name} (expected {expected}, found {found})') + msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_list_with_sort_mode_auto(self): # The jobs collection requires special handling. The sort_dir kwarg is @@ -151,7 +151,7 @@ def test_list_with_sort_mode_auto(self): else: found = [ent.name for ent in coll.list()] - self.assertEqual(expected, found, msg=f'on {coll_name} (expected {expected}, found {found})') + self.assertEqual(expected, found, msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_list_with_sort_mode_alpha_case(self): for coll_name in collections: @@ -166,7 +166,7 @@ def test_list_with_sort_mode_alpha_case(self): logging.debug(f"No entities in collection {coll_name}; skipping test.") expected = sorted(found) self.assertEqual(expected, found, - msg=f'on {coll_name} (expected {expected}, found {found})') + msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_list_with_sort_mode_alpha(self): for coll_name in collections: @@ -184,7 +184,7 @@ def test_list_with_sort_mode_alpha(self): logging.debug(f"No entities in collection {coll_name}; skipping test.") expected = sorted(found, key=str.lower) self.assertEqual(expected, found, - msg=f'on {coll_name} (expected {expected}, found {found})') + msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_iteration(self): for coll_name in collections: @@ -197,7 +197,7 @@ def test_iteration(self): for ent in coll.iter(pagesize=max(int(total / 5.0), 1), count=10): found.append(ent.name) self.assertEqual(expected, found, - msg=f'on {coll_name} (expected {expected}, found {found})') + msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_paging(self): for coll_name in collections: @@ -218,7 +218,7 @@ def test_paging(self): found.extend([ent.name for ent in page]) logging.debug("Iterate: offset=%d/%d", offset, total) self.assertEqual(expected, found, - msg=f'on {coll_name} (expected {expected}, found {found})') + msg=f'on {coll_name} (expected: {expected}, found: {found})') def test_getitem_with_nonsense(self): for coll_name in collections: diff --git a/tests/test_job.py b/tests/test_job.py index c94c8e45..8f3cef93 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -30,11 +30,6 @@ import pytest -# TODO: Determine if we should be importing ExpatError if ParseError is not available (e.g., on Python 2.6) -# There's code below that now catches SyntaxError instead of ParseError. Should we be catching ExpathError instead? - -# from xml.etree.ElementTree import ParseError - class TestUtilities(testlib.SDKTestCase): def test_service_search(self): diff --git a/tests/testlib.py b/tests/testlib.py index f2ca1755..79ace526 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -55,7 +55,7 @@ def to_bool(x): return True if x == '0': return False - raise ValueError("Not a boolean value: %s", x) + raise ValueError(f"Not a boolean value: {x}") def tmpname(): From bad91276c33d6d05832c766ac3b0976edb36274f Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Sat, 17 Feb 2024 14:43:08 +0530 Subject: [PATCH 49/60] Copyright year updates --- docs/conf.py | 2 +- scripts/build-env.py | 2 +- setup.py | 2 +- sitecustomize.py | 2 +- splunklib/__init__.py | 2 +- splunklib/binding.py | 2 +- splunklib/client.py | 2 +- splunklib/data.py | 2 +- splunklib/modularinput/argument.py | 2 +- splunklib/modularinput/event.py | 2 +- splunklib/modularinput/event_writer.py | 2 +- splunklib/modularinput/input_definition.py | 2 +- splunklib/modularinput/scheme.py | 2 +- splunklib/modularinput/script.py | 2 +- splunklib/modularinput/utils.py | 2 +- splunklib/modularinput/validation_definition.py | 2 +- splunklib/results.py | 2 +- splunklib/searchcommands/__init__.py | 2 +- splunklib/searchcommands/decorators.py | 2 +- splunklib/searchcommands/environment.py | 2 +- splunklib/searchcommands/eventing_command.py | 2 +- splunklib/searchcommands/external_search_command.py | 2 +- splunklib/searchcommands/generating_command.py | 2 +- splunklib/searchcommands/internals.py | 2 +- splunklib/searchcommands/reporting_command.py | 2 +- splunklib/searchcommands/search_command.py | 2 +- splunklib/searchcommands/streaming_command.py | 2 +- splunklib/searchcommands/validators.py | 2 +- splunklib/utils.py | 2 +- tests/modularinput/modularinput_testlib.py | 2 +- tests/modularinput/test_event.py | 2 +- tests/modularinput/test_input_definition.py | 2 +- tests/modularinput/test_scheme.py | 2 +- tests/modularinput/test_validation_definition.py | 2 +- tests/searchcommands/__init__.py | 2 +- tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py | 2 +- .../test_apps/generating_app/bin/generatingcsc.py | 2 +- .../searchcommands/test_apps/reporting_app/bin/reportingcsc.py | 2 +- .../searchcommands/test_apps/streaming_app/bin/streamingcsc.py | 2 +- tests/searchcommands/test_builtin_options.py | 2 +- tests/searchcommands/test_configuration_settings.py | 2 +- tests/searchcommands/test_csc_apps.py | 2 +- tests/searchcommands/test_decorators.py | 2 +- tests/searchcommands/test_internals_v1.py | 2 +- tests/searchcommands/test_internals_v2.py | 2 +- tests/searchcommands/test_search_command.py | 2 +- tests/searchcommands/test_validators.py | 2 +- tests/test_app.py | 2 +- tests/test_binding.py | 2 +- tests/test_collection.py | 2 +- tests/test_conf.py | 2 +- tests/test_data.py | 2 +- tests/test_event_type.py | 2 +- tests/test_fired_alert.py | 2 +- tests/test_index.py | 2 +- tests/test_input.py | 2 +- tests/test_job.py | 2 +- tests/test_kvstore_batch.py | 2 +- tests/test_kvstore_data.py | 2 +- tests/test_logger.py | 2 +- tests/test_message.py | 2 +- tests/test_modular_input.py | 2 +- tests/test_modular_input_kinds.py | 2 +- tests/test_results.py | 2 +- tests/test_role.py | 2 +- tests/test_saved_search.py | 2 +- tests/test_service.py | 2 +- tests/test_storage_passwords.py | 2 +- tests/test_user.py | 2 +- tests/testlib.py | 2 +- utils/__init__.py | 2 +- utils/cmdopts.py | 2 +- 72 files changed, 72 insertions(+), 72 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5c358631..6b8bfe1d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -43,7 +43,7 @@ # General information about the project. project = u'Splunk SDK for Python' -copyright = u'2021, Splunk Inc' +copyright = u'2024, Splunk Inc' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/scripts/build-env.py b/scripts/build-env.py index e1a153d4..fcf55ae1 100644 --- a/scripts/build-env.py +++ b/scripts/build-env.py @@ -1,4 +1,4 @@ -# Copyright 2011-2020 Splunk, Inc. +# Copyright 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/setup.py b/setup.py index 1d558594..62725534 100755 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/sitecustomize.py b/sitecustomize.py index a6831050..eb94c154 100644 --- a/sitecustomize.py +++ b/sitecustomize.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 035e0f81..2613d284 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/binding.py b/splunklib/binding.py index fcad0058..43ac2d48 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/client.py b/splunklib/client.py index 97f8b3fe..c0df2600 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/data.py b/splunklib/data.py index 69f6ad62..32fbb522 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/argument.py b/splunklib/modularinput/argument.py index 979331c4..ec643875 100644 --- a/splunklib/modularinput/argument.py +++ b/splunklib/modularinput/argument.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/event.py b/splunklib/modularinput/event.py index dbd9d867..7ee7266a 100644 --- a/splunklib/modularinput/event.py +++ b/splunklib/modularinput/event.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py index cfff8721..c048a5b5 100644 --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/input_definition.py b/splunklib/modularinput/input_definition.py index 4a71e4c1..190192f7 100644 --- a/splunklib/modularinput/input_definition.py +++ b/splunklib/modularinput/input_definition.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/scheme.py b/splunklib/modularinput/scheme.py index 97cce0c6..a3b08682 100644 --- a/splunklib/modularinput/scheme.py +++ b/splunklib/modularinput/scheme.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/script.py b/splunklib/modularinput/script.py index 54f8d57c..e912d737 100644 --- a/splunklib/modularinput/script.py +++ b/splunklib/modularinput/script.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index a36d0522..dad73dd0 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/modularinput/validation_definition.py b/splunklib/modularinput/validation_definition.py index 3e5a5297..b71e1e7c 100644 --- a/splunklib/modularinput/validation_definition.py +++ b/splunklib/modularinput/validation_definition.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/results.py b/splunklib/results.py index 22f5d70f..30476c84 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/__init__.py b/splunklib/searchcommands/__init__.py index a6a4d955..94dbbda9 100644 --- a/splunklib/searchcommands/__init__.py +++ b/splunklib/searchcommands/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index ae7ff6e5..b475d26e 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/environment.py b/splunklib/searchcommands/environment.py index 7e5f27bf..35f1deaf 100644 --- a/splunklib/searchcommands/environment.py +++ b/splunklib/searchcommands/environment.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/eventing_command.py b/splunklib/searchcommands/eventing_command.py index 4773ccfe..d42d056d 100644 --- a/splunklib/searchcommands/eventing_command.py +++ b/splunklib/searchcommands/eventing_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/external_search_command.py b/splunklib/searchcommands/external_search_command.py index 763fe4a5..ef05c88b 100644 --- a/splunklib/searchcommands/external_search_command.py +++ b/splunklib/searchcommands/external_search_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/generating_command.py b/splunklib/searchcommands/generating_command.py index e57cfa87..36b014c3 100644 --- a/splunklib/searchcommands/generating_command.py +++ b/splunklib/searchcommands/generating_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index 65d93d20..962d8c8b 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/reporting_command.py b/splunklib/searchcommands/reporting_command.py index cd5c8089..5df3dc7e 100644 --- a/splunklib/searchcommands/reporting_command.py +++ b/splunklib/searchcommands/reporting_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index ab8be07c..ee4c7180 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/streaming_command.py b/splunklib/searchcommands/streaming_command.py index dd0e4b41..e2a3a407 100644 --- a/splunklib/searchcommands/streaming_command.py +++ b/splunklib/searchcommands/streaming_command.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/searchcommands/validators.py b/splunklib/searchcommands/validators.py index 7cc004d7..ccaebca0 100644 --- a/splunklib/searchcommands/validators.py +++ b/splunklib/searchcommands/validators.py @@ -1,6 +1,6 @@ # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/splunklib/utils.py b/splunklib/utils.py index 2e974999..db9c3126 100644 --- a/splunklib/utils.py +++ b/splunklib/utils.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/modularinput_testlib.py b/tests/modularinput/modularinput_testlib.py index 238760ab..4bf0df13 100644 --- a/tests/modularinput/modularinput_testlib.py +++ b/tests/modularinput/modularinput_testlib.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_event.py b/tests/modularinput/test_event.py index 20cbafa4..4039d8e2 100644 --- a/tests/modularinput/test_event.py +++ b/tests/modularinput/test_event.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_input_definition.py b/tests/modularinput/test_input_definition.py index 4aaf3c1d..93601b35 100644 --- a/tests/modularinput/test_input_definition.py +++ b/tests/modularinput/test_input_definition.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_scheme.py b/tests/modularinput/test_scheme.py index 303bf1de..7e7ddcc9 100644 --- a/tests/modularinput/test_scheme.py +++ b/tests/modularinput/test_scheme.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/modularinput/test_validation_definition.py b/tests/modularinput/test_validation_definition.py index 82a5bf15..1b71a220 100644 --- a/tests/modularinput/test_validation_definition.py +++ b/tests/modularinput/test_validation_definition.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/__init__.py b/tests/searchcommands/__init__.py index ad42ad03..41d8d066 100644 --- a/tests/searchcommands/__init__.py +++ b/tests/searchcommands/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py b/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py index f3c32502..fafbe46f 100644 --- a/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py +++ b/tests/searchcommands/test_apps/eventing_app/bin/eventingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py b/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py index 6f2f72f9..4fe3e765 100644 --- a/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py +++ b/tests/searchcommands/test_apps/generating_app/bin/generatingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py b/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py index f7b214b9..477f5fe2 100644 --- a/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py +++ b/tests/searchcommands/test_apps/reporting_app/bin/reportingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py b/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py index 74401bba..8ee2c91e 100644 --- a/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py +++ b/tests/searchcommands/test_apps/streaming_app/bin/streamingcsc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_builtin_options.py b/tests/searchcommands/test_builtin_options.py index 82c45299..174baed0 100644 --- a/tests/searchcommands/test_builtin_options.py +++ b/tests/searchcommands/test_builtin_options.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_configuration_settings.py b/tests/searchcommands/test_configuration_settings.py index 0220244e..171a3616 100644 --- a/tests/searchcommands/test_configuration_settings.py +++ b/tests/searchcommands/test_configuration_settings.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_csc_apps.py b/tests/searchcommands/test_csc_apps.py index 2b35306d..64b03dc3 100755 --- a/tests/searchcommands/test_csc_apps.py +++ b/tests/searchcommands/test_csc_apps.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index de2cf9a2..be9c3d3c 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index c01d8396..e408271b 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index 29fbba97..c3122b3e 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index 69800c18..c8fe7d80 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index cc6d1555..80149aa6 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # coding=utf-8 # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_app.py b/tests/test_app.py index 706aa749..35be3814 100755 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_binding.py b/tests/test_binding.py index b226ef50..5f967c80 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_collection.py b/tests/test_collection.py index 03ec54b2..ec641a6d 100755 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_conf.py b/tests/test_conf.py index 16dd08fb..40c3f0f2 100755 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_data.py b/tests/test_data.py index c3bd3f7b..b2bd0158 100755 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_event_type.py b/tests/test_event_type.py index fdf3235c..c50e1ea3 100755 --- a/tests/test_event_type.py +++ b/tests/test_event_type.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_fired_alert.py b/tests/test_fired_alert.py index c7f4e157..9d16fddc 100755 --- a/tests/test_fired_alert.py +++ b/tests/test_fired_alert.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_index.py b/tests/test_index.py index f577df3e..2582934b 100755 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_input.py b/tests/test_input.py index 02f585bc..f97ca4b4 100755 --- a/tests/test_input.py +++ b/tests/test_input.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_job.py b/tests/test_job.py index 8f3cef93..a276e212 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_kvstore_batch.py b/tests/test_kvstore_batch.py index 10dfe142..a17a3e9d 100755 --- a/tests/test_kvstore_batch.py +++ b/tests/test_kvstore_batch.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2020 Splunk, Inc. +# Copyright 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_kvstore_data.py b/tests/test_kvstore_data.py index 4c1dd86d..5860f6fc 100755 --- a/tests/test_kvstore_data.py +++ b/tests/test_kvstore_data.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2011-2020 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_logger.py b/tests/test_logger.py index 8afd10cc..46623e36 100755 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_message.py b/tests/test_message.py index da041b45..29f6a869 100755 --- a/tests/test_message.py +++ b/tests/test_message.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_modular_input.py b/tests/test_modular_input.py index 6473cdde..50a49d23 100755 --- a/tests/test_modular_input.py +++ b/tests/test_modular_input.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_modular_input_kinds.py b/tests/test_modular_input_kinds.py index c780e41f..1304f269 100755 --- a/tests/test_modular_input_kinds.py +++ b/tests/test_modular_input_kinds.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_results.py b/tests/test_results.py index 1454e733..bde1c4ab 100755 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_role.py b/tests/test_role.py index d1294413..2c087603 100755 --- a/tests/test_role.py +++ b/tests/test_role.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_saved_search.py b/tests/test_saved_search.py index 411d3bbc..a78d9420 100755 --- a/tests/test_saved_search.py +++ b/tests/test_saved_search.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_service.py b/tests/test_service.py index 93744ccf..6433b56b 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_storage_passwords.py b/tests/test_storage_passwords.py index 4e611066..bda832dd 100644 --- a/tests/test_storage_passwords.py +++ b/tests/test_storage_passwords.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/test_user.py b/tests/test_user.py index a508c3d5..e20b9694 100755 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/tests/testlib.py b/tests/testlib.py index 79ace526..c3109e24 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/utils/__init__.py b/utils/__init__.py index 60e60530..b6c45565 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain diff --git a/utils/cmdopts.py b/utils/cmdopts.py index 1a8e9b02..63cdfb1d 100644 --- a/utils/cmdopts.py +++ b/utils/cmdopts.py @@ -1,4 +1,4 @@ -# Copyright © 2011-2023 Splunk, Inc. +# Copyright © 2011-2024 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain From 621232a847a3abec49c7529bba7f4164b69a44e6 Mon Sep 17 00:00:00 2001 From: maszyk99 Date: Mon, 4 Mar 2024 15:59:34 +0100 Subject: [PATCH 50/60] Remove conversion of iterables into list objects --- splunklib/data.py | 6 +++--- splunklib/searchcommands/decorators.py | 8 ++++---- tests/searchcommands/test_decorators.py | 12 ++++++------ tests/searchcommands/test_internals_v1.py | 6 +++--- tests/searchcommands/test_internals_v2.py | 6 +++--- tests/test_conf.py | 2 +- tests/test_input.py | 10 +++++----- 7 files changed, 25 insertions(+), 25 deletions(-) diff --git a/splunklib/data.py b/splunklib/data.py index 32fbb522..34f3ffac 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -97,7 +97,7 @@ def load(text, match=None): def load_attrs(element): if not hasattrs(element): return None attrs = record() - for key, value in list(element.attrib.items()): + for key, value in element.attrib.items(): attrs[key] = value return attrs @@ -126,7 +126,7 @@ def load_elem(element, nametable=None): return name, attrs # Both attrs & value are complex, so merge the two dicts, resolving collisions. collision_keys = [] - for key, val in list(attrs.items()): + for key, val in attrs.items(): if key in value and key in collision_keys: value[key].append(val) elif key in value and key not in collision_keys: @@ -242,7 +242,7 @@ def __getitem__(self, key): return dict.__getitem__(self, key) key += self.sep result = record() - for k, v in list(self.items()): + for k, v in self.items(): if not k.startswith(key): continue suffix = k[len(key):] diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index b475d26e..1393d789 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -416,21 +416,21 @@ def __init__(self, command): OrderedDict.__init__(self, ((option.name, item_class(command, option)) for (name, option) in definitions)) def __repr__(self): - text = 'Option.View([' + ','.join([repr(item) for item in list(self.values())]) + '])' + text = 'Option.View([' + ','.join([repr(item) for item in self.values()]) + '])' return text def __str__(self): - text = ' '.join([str(item) for item in list(self.values()) if item.is_set]) + text = ' '.join([str(item) for item in self.values() if item.is_set]) return text # region Methods def get_missing(self): - missing = [item.name for item in list(self.values()) if item.is_required and not item.is_set] + missing = [item.name for item in self.values() if item.is_required and not item.is_set] return missing if len(missing) > 0 else None def reset(self): - for value in list(self.values()): + for value in self.values(): value.reset() # endregion diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index be9c3d3c..3cc571dd 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -353,9 +353,9 @@ def test_option(self): options.reset() missing = options.get_missing() - self.assertListEqual(missing, [option.name for option in list(options.values()) if option.is_required]) - self.assertListEqual(presets, [str(option) for option in list(options.values()) if option.value is not None]) - self.assertListEqual(presets, [str(option) for option in list(options.values()) if str(option) != option.name + '=None']) + self.assertListEqual(missing, [option.name for option in options.values() if option.is_required]) + self.assertListEqual(presets, [str(option) for option in options.values() if option.value is not None]) + self.assertListEqual(presets, [str(option) for option in options.values() if str(option) != option.name + '=None']) test_option_values = { validators.Boolean: ('0', 'non-boolean value'), @@ -372,7 +372,7 @@ def test_option(self): validators.RegularExpression: ('\\s+', '(poorly formed regular expression'), validators.Set: ('bar', 'non-existent set entry')} - for option in list(options.values()): + for option in options.values(): validator = option.validator if validator is None: @@ -431,9 +431,9 @@ def test_option(self): self.maxDiff = None tuplewrap = lambda x: x if isinstance(x, tuple) else (x,) - invert = lambda x: {v: k for k, v in list(x.items())} + invert = lambda x: {v: k for k, v in x.items()} - for x in list(command.options.values()): + for x in command.options.values(): # isinstance doesn't work for some reason if type(x.value).__name__ == 'Code': self.assertEqual(expected[x.name], x.value.source) diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py index e408271b..bea5c618 100755 --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -53,7 +53,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, options) - for option in list(command.options.values()): + for option in command.options.values(): if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) continue @@ -70,7 +70,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, options + fieldnames) - for option in list(command.options.values()): + for option in command.options.values(): if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) continue @@ -85,7 +85,7 @@ def fix_up(cls, command_class): pass command = TestCommandLineParserCommand() CommandLineParser.parse(command, ['required_option=true'] + fieldnames) - for option in list(command.options.values()): + for option in command.options.values(): if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', 'show_configuration']: self.assertFalse(option.is_set) diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index c3122b3e..722aaae2 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -157,7 +157,7 @@ def test_record_writer_with_random_data(self, save_recording=False): test_data['metrics'] = metrics - for name, metric in list(metrics.items()): + for name, metric in metrics.items(): writer.write_metric(name, metric) self.assertEqual(writer._chunk_count, 0) @@ -172,8 +172,8 @@ def test_record_writer_with_random_data(self, save_recording=False): self.assertListEqual(writer._inspector['messages'], messages) self.assertDictEqual( - dict(k_v for k_v in list(writer._inspector.items()) if k_v[0].startswith('metric.')), - dict(('metric.' + k_v1[0], k_v1[1]) for k_v1 in list(metrics.items()))) + dict(k_v for k_v in writer._inspector.items() if k_v[0].startswith('metric.')), + dict(('metric.' + k_v1[0], k_v1[1]) for k_v1 in metrics.items())) writer.flush(finished=True) diff --git a/tests/test_conf.py b/tests/test_conf.py index 40c3f0f2..b00dbcc9 100755 --- a/tests/test_conf.py +++ b/tests/test_conf.py @@ -87,7 +87,7 @@ def test_confs(self): testlib.tmpname(): testlib.tmpname()} stanza.submit(values) stanza.refresh() - for key, value in list(values.items()): + for key, value in values.items(): self.assertTrue(key in stanza) self.assertEqual(value, stanza[key]) diff --git a/tests/test_input.py b/tests/test_input.py index f97ca4b4..53436f73 100755 --- a/tests/test_input.py +++ b/tests/test_input.py @@ -200,7 +200,7 @@ def setUp(self): def tearDown(self): super().tearDown() - for entity in list(self._test_entities.values()): + for entity in self._test_entities.values(): try: self.service.inputs.delete( kind=entity.kind, @@ -231,7 +231,7 @@ def test_lists_modular_inputs(self): def test_create(self): inputs = self.service.inputs - for entity in list(self._test_entities.values()): + for entity in self._test_entities.values(): self.check_entity(entity) self.assertTrue(isinstance(entity, client.Input)) @@ -242,7 +242,7 @@ def test_get_kind_list(self): def test_read(self): inputs = self.service.inputs - for this_entity in list(self._test_entities.values()): + for this_entity in self._test_entities.values(): kind, name = this_entity.kind, this_entity.name read_entity = inputs[name, kind] self.assertEqual(this_entity.kind, read_entity.kind) @@ -258,7 +258,7 @@ def test_read_indiviually(self): def test_update(self): inputs = self.service.inputs - for entity in list(self._test_entities.values()): + for entity in self._test_entities.values(): kind, name = entity.kind, entity.name kwargs = {'host': 'foo'} entity.update(**kwargs) @@ -269,7 +269,7 @@ def test_update(self): def test_delete(self): inputs = self.service.inputs remaining = len(self._test_entities) - 1 - for input_entity in list(self._test_entities.values()): + for input_entity in self._test_entities.values(): name = input_entity.name kind = input_entity.kind self.assertTrue(name in inputs) From 608288105d6ce8be7c7dc64e12e2f2241ac937d0 Mon Sep 17 00:00:00 2001 From: maszyk99 Date: Mon, 4 Mar 2024 16:01:02 +0100 Subject: [PATCH 51/60] Remove pass statement --- splunklib/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index c0df2600..28024d50 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -2376,7 +2376,6 @@ def __contains__(self, key): entries = self._load_list(response) if len(entries) > 0: return True - pass except HTTPError as he: if he.status == 404: pass # Just carry on to the next kind. From 9053482a3ef97e9d04684fec9d4117f1e40f6394 Mon Sep 17 00:00:00 2001 From: maszyk99 Date: Mon, 4 Mar 2024 16:02:54 +0100 Subject: [PATCH 52/60] Remove unneeded string format usage --- splunklib/searchcommands/search_command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index ee4c7180..55e67b61 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -739,7 +739,7 @@ def _process_protocol_v2(self, argv, ifile, ofile): if missing is not None: if len(missing) == 1: - self.write_error(f'A value for "{missing[0]}" is required'.format()) + self.write_error(f'A value for "{missing[0]}" is required') else: self.write_error(f'Values for these required options are missing: {", ".join(missing)}') error_count += 1 From 1d1cc207f131682e02773f54df80960def228245 Mon Sep 17 00:00:00 2001 From: maszyk99 <157725801+maszyk99@users.noreply.github.com> Date: Thu, 7 Mar 2024 18:20:58 +0100 Subject: [PATCH 53/60] Pass correct args to logger Co-authored-by: Iuri Chaer --- splunklib/searchcommands/external_search_command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splunklib/searchcommands/external_search_command.py b/splunklib/searchcommands/external_search_command.py index ef05c88b..a8929f8d 100644 --- a/splunklib/searchcommands/external_search_command.py +++ b/splunklib/searchcommands/external_search_command.py @@ -120,7 +120,7 @@ def _execute(path, argv=None, environ=None): raise ValueError(f'Cannot find command on path: {path}') path = found - logger.debug(f'starting command="{path}", arguments={path}') + logger.debug(f'starting command="{path}", arguments={argv}') def terminate(signal_number): sys.exit(f'External search command is terminating on receipt of signal={signal_number}.') From d805a7a4ee74a82607d64b2a2b33d4a44bf33d4c Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 13 Mar 2024 16:40:17 +0530 Subject: [PATCH 54/60] updated code - based on feedback updated code --- splunklib/binding.py | 2994 +++---- splunklib/client.py | 7815 ++++++++++--------- splunklib/searchcommands/search_command.py | 2286 +++--- tests/searchcommands/chunked_data_stream.py | 200 +- tests/searchcommands/test_internals_v1.py | 686 +- tests/test_binding.py | 1950 ++--- tests/testlib.py | 522 +- 7 files changed, 8227 insertions(+), 8226 deletions(-) mode change 100755 => 100644 tests/searchcommands/test_internals_v1.py mode change 100755 => 100644 tests/test_binding.py diff --git a/splunklib/binding.py b/splunklib/binding.py index 43ac2d48..7437fc2b 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -1,1497 +1,1497 @@ -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""The **splunklib.binding** module provides a low-level binding interface to the -`Splunk REST API `_. - -This module handles the wire details of calling the REST API, such as -authentication tokens, prefix paths, URL encoding, and so on. Actual path -segments, ``GET`` and ``POST`` arguments, and the parsing of responses is left -to the user. - -If you want a friendlier interface to the Splunk REST API, use the -:mod:`splunklib.client` module. -""" - -import io -import json -import logging -import socket -import ssl -import time -from base64 import b64encode -from contextlib import contextmanager -from datetime import datetime -from functools import wraps -from io import BytesIO -from urllib import parse -from http import client -from http.cookies import SimpleCookie -from xml.etree.ElementTree import XML, ParseError -from splunklib.data import record -from splunklib import __version__ - - -logger = logging.getLogger(__name__) - -__all__ = [ - "AuthenticationError", - "connect", - "Context", - "handler", - "HTTPError", - "UrlEncoded", - "_encode", - "_make_cookie_header", - "_NoAuthenticationToken", - "namespace" -] - -SENSITIVE_KEYS = ['Authorization', 'Cookie', 'action.email.auth_password', 'auth', 'auth_password', 'clear_password', 'clientId', - 'crc-salt', 'encr_password', 'oldpassword', 'passAuth', 'password', 'session', 'suppressionKey', - 'token'] - -# If you change these, update the docstring -# on _authority as well. -DEFAULT_HOST = "localhost" -DEFAULT_PORT = "8089" -DEFAULT_SCHEME = "https" - - -def _log_duration(f): - @wraps(f) - def new_f(*args, **kwargs): - start_time = datetime.now() - val = f(*args, **kwargs) - end_time = datetime.now() - logger.debug("Operation took %s", end_time - start_time) - return val - - return new_f - - -def mask_sensitive_data(data): - ''' - Masked sensitive fields data for logging purpose - ''' - if not isinstance(data, dict): - try: - data = json.loads(data) - except Exception as ex: - return data - - # json.loads will return "123"(str) as 123(int), so return the data if it's not 'dict' type - if not isinstance(data, dict): - return data - mdata = {} - for k, v in data.items(): - if k in SENSITIVE_KEYS: - mdata[k] = "******" - else: - mdata[k] = mask_sensitive_data(v) - return mdata - - -def _parse_cookies(cookie_str, dictionary): - """Tries to parse any key-value pairs of cookies in a string, - then updates the the dictionary with any key-value pairs found. - - **Example**:: - - dictionary = {} - _parse_cookies('my=value', dictionary) - # Now the following is True - dictionary['my'] == 'value' - - :param cookie_str: A string containing "key=value" pairs from an HTTP "Set-Cookie" header. - :type cookie_str: ``str`` - :param dictionary: A dictionary to update with any found key-value pairs. - :type dictionary: ``dict`` - """ - parsed_cookie = SimpleCookie(cookie_str) - for cookie in list(parsed_cookie.values()): - dictionary[cookie.key] = cookie.coded_value - - -def _make_cookie_header(cookies): - """ - Takes a list of 2-tuples of key-value pairs of - cookies, and returns a valid HTTP ``Cookie`` - header. - - **Example**:: - - header = _make_cookie_header([("key", "value"), ("key_2", "value_2")]) - # Now the following is True - header == "key=value; key_2=value_2" - - :param cookies: A list of 2-tuples of cookie key-value pairs. - :type cookies: ``list`` of 2-tuples - :return: ``str` An HTTP header cookie string. - :rtype: ``str`` - """ - return "; ".join(f"{key}={value}" for key, value in cookies) - - -# Singleton values to eschew None -class _NoAuthenticationToken: - """The value stored in a :class:`Context` or :class:`splunklib.client.Service` - class that is not logged in. - - If a ``Context`` or ``Service`` object is created without an authentication - token, and there has not yet been a call to the ``login`` method, the token - field of the ``Context`` or ``Service`` object is set to - ``_NoAuthenticationToken``. - - Likewise, after a ``Context`` or ``Service`` object has been logged out, the - token is set to this value again. - """ - - -class UrlEncoded(str): - """This class marks URL-encoded strings. - It should be considered an SDK-private implementation detail. - - Manually tracking whether strings are URL encoded can be difficult. Avoid - calling ``urllib.quote`` to replace special characters with escapes. When - you receive a URL-encoded string, *do* use ``urllib.unquote`` to replace - escapes with single characters. Then, wrap any string you want to use as a - URL in ``UrlEncoded``. Note that because the ``UrlEncoded`` class is - idempotent, making multiple calls to it is OK. - - ``UrlEncoded`` objects are identical to ``str`` objects (including being - equal if their contents are equal) except when passed to ``UrlEncoded`` - again. - - ``UrlEncoded`` removes the ``str`` type support for interpolating values - with ``%`` (doing that raises a ``TypeError``). There is no reliable way to - encode values this way, so instead, interpolate into a string, quoting by - hand, and call ``UrlEncode`` with ``skip_encode=True``. - - **Example**:: - - import urllib - UrlEncoded(f'{scheme}://{urllib.quote(host)}', skip_encode=True) - - If you append ``str`` strings and ``UrlEncoded`` strings, the result is also - URL encoded. - - **Example**:: - - UrlEncoded('ab c') + 'de f' == UrlEncoded('ab cde f') - 'ab c' + UrlEncoded('de f') == UrlEncoded('ab cde f') - """ - - def __new__(self, val='', skip_encode=False, encode_slash=False): - if isinstance(val, UrlEncoded): - # Don't urllib.quote something already URL encoded. - return val - if skip_encode: - return str.__new__(self, val) - if encode_slash: - return str.__new__(self, parse.quote_plus(val)) - # When subclassing str, just call str.__new__ method - # with your class and the value you want to have in the - # new string. - return str.__new__(self, parse.quote(val)) - - def __add__(self, other): - """self + other - - If *other* is not a ``UrlEncoded``, URL encode it before - adding it. - """ - if isinstance(other, UrlEncoded): - return UrlEncoded(str.__add__(self, other), skip_encode=True) - - return UrlEncoded(str.__add__(self, parse.quote(other)), skip_encode=True) - - def __radd__(self, other): - """other + self - - If *other* is not a ``UrlEncoded``, URL _encode it before - adding it. - """ - if isinstance(other, UrlEncoded): - return UrlEncoded(str.__radd__(self, other), skip_encode=True) - - return UrlEncoded(str.__add__(parse.quote(other), self), skip_encode=True) - - def __mod__(self, fields): - """Interpolation into ``UrlEncoded``s is disabled. - - If you try to write ``UrlEncoded("%s") % "abc", will get a - ``TypeError``. - """ - raise TypeError("Cannot interpolate into a UrlEncoded object.") - - def __repr__(self): - return f"UrlEncoded({repr(parse.unquote(str(self)))})" - - -@contextmanager -def _handle_auth_error(msg): - """Handle re-raising HTTP authentication errors as something clearer. - - If an ``HTTPError`` is raised with status 401 (access denied) in - the body of this context manager, re-raise it as an - ``AuthenticationError`` instead, with *msg* as its message. - - This function adds no round trips to the server. - - :param msg: The message to be raised in ``AuthenticationError``. - :type msg: ``str`` - - **Example**:: - - with _handle_auth_error("Your login failed."): - ... # make an HTTP request - """ - try: - yield - except HTTPError as he: - if he.status == 401: - raise AuthenticationError(msg, he) - else: - raise - - -def _authentication(request_fun): - """Decorator to handle autologin and authentication errors. - - *request_fun* is a function taking no arguments that needs to - be run with this ``Context`` logged into Splunk. - - ``_authentication``'s behavior depends on whether the - ``autologin`` field of ``Context`` is set to ``True`` or - ``False``. If it's ``False``, then ``_authentication`` - aborts if the ``Context`` is not logged in, and raises an - ``AuthenticationError`` if an ``HTTPError`` of status 401 is - raised in *request_fun*. If it's ``True``, then - ``_authentication`` will try at all sensible places to - log in before issuing the request. - - If ``autologin`` is ``False``, ``_authentication`` makes - one roundtrip to the server if the ``Context`` is logged in, - or zero if it is not. If ``autologin`` is ``True``, it's less - deterministic, and may make at most three roundtrips (though - that would be a truly pathological case). - - :param request_fun: A function of no arguments encapsulating - the request to make to the server. - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(..., autologin=True) - c.logout() - def f(): - c.get("/services") - return 42 - print(_authentication(f)) - """ - - @wraps(request_fun) - def wrapper(self, *args, **kwargs): - if self.token is _NoAuthenticationToken and not self.has_cookies(): - # Not yet logged in. - if self.autologin and self.username and self.password: - # This will throw an uncaught - # AuthenticationError if it fails. - self.login() - else: - # Try the request anyway without authentication. - # Most requests will fail. Some will succeed, such as - # 'GET server/info'. - with _handle_auth_error("Request aborted: not logged in."): - return request_fun(self, *args, **kwargs) - try: - # Issue the request - return request_fun(self, *args, **kwargs) - except HTTPError as he: - if he.status == 401 and self.autologin: - # Authentication failed. Try logging in, and then - # rerunning the request. If either step fails, throw - # an AuthenticationError and give up. - with _handle_auth_error("Autologin failed."): - self.login() - with _handle_auth_error("Authentication Failed! If session token is used, it seems to have been expired."): - return request_fun(self, *args, **kwargs) - elif he.status == 401 and not self.autologin: - raise AuthenticationError( - "Request failed: Session is not logged in.", he) - else: - raise - - return wrapper - - -def _authority(scheme=DEFAULT_SCHEME, host=DEFAULT_HOST, port=DEFAULT_PORT): - """Construct a URL authority from the given *scheme*, *host*, and *port*. - - Named in accordance with RFC2396_, which defines URLs as:: - - ://? - - .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt - - So ``https://localhost:8000/a/b/b?boris=hilda`` would be parsed as:: - - scheme := https - authority := localhost:8000 - path := /a/b/c - query := boris=hilda - - :param scheme: URL scheme (the default is "https") - :type scheme: "http" or "https" - :param host: The host name (the default is "localhost") - :type host: string - :param port: The port number (the default is 8089) - :type port: integer - :return: The URL authority. - :rtype: UrlEncoded (subclass of ``str``) - - **Example**:: - - _authority() == "https://localhost:8089" - - _authority(host="splunk.utopia.net") == "https://splunk.utopia.net:8089" - - _authority(host="2001:0db8:85a3:0000:0000:8a2e:0370:7334") == \ - "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089" - - _authority(scheme="http", host="splunk.utopia.net", port="471") == \ - "http://splunk.utopia.net:471" - - """ - # check if host is an IPv6 address and not enclosed in [ ] - if ':' in host and not (host.startswith('[') and host.endswith(']')): - # IPv6 addresses must be enclosed in [ ] in order to be well - # formed. - host = '[' + host + ']' - return UrlEncoded(f"{scheme}://{host}:{port}", skip_encode=True) - - -# kwargs: sharing, owner, app -def namespace(sharing=None, owner=None, app=None, **kwargs): - """This function constructs a Splunk namespace. - - Every Splunk resource belongs to a namespace. The namespace is specified by - the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. - The possible values for ``sharing`` are: "user", "app", "global" and "system", - which map to the following combinations of ``owner`` and ``app`` values: - - "user" => {owner}, {app} - - "app" => nobody, {app} - - "global" => nobody, {app} - - "system" => nobody, system - - "nobody" is a special user name that basically means no user, and "system" - is the name reserved for system resources. - - "-" is a wildcard that can be used for both ``owner`` and ``app`` values and - refers to all users and all apps, respectively. - - In general, when you specify a namespace you can specify any combination of - these three values and the library will reconcile the triple, overriding the - provided values as appropriate. - - Finally, if no namespacing is specified the library will make use of the - ``/services`` branch of the REST API, which provides a namespaced view of - Splunk resources equivelent to using ``owner={currentUser}`` and - ``app={defaultApp}``. - - The ``namespace`` function returns a representation of the namespace from - reconciling the values you provide. It ignores any keyword arguments other - than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of - configuration information without first having to extract individual keys. - - :param sharing: The sharing mode (the default is "user"). - :type sharing: "system", "global", "app", or "user" - :param owner: The owner context (the default is "None"). - :type owner: ``string`` - :param app: The app context (the default is "None"). - :type app: ``string`` - :returns: A :class:`splunklib.data.Record` containing the reconciled - namespace. - - **Example**:: - - import splunklib.binding as binding - n = binding.namespace(sharing="user", owner="boris", app="search") - n = binding.namespace(sharing="global", app="search") - """ - if sharing in ["system"]: - return record({'sharing': sharing, 'owner': "nobody", 'app': "system"}) - if sharing in ["global", "app"]: - return record({'sharing': sharing, 'owner': "nobody", 'app': app}) - if sharing in ["user", None]: - return record({'sharing': sharing, 'owner': owner, 'app': app}) - raise ValueError("Invalid value for argument: 'sharing'") - - -class Context: - """This class represents a context that encapsulates a splunkd connection. - - The ``Context`` class encapsulates the details of HTTP requests, - authentication, a default namespace, and URL prefixes to simplify access to - the REST API. - - After creating a ``Context`` object, you must call its :meth:`login` - method before you can issue requests to splunkd. Or, use the :func:`connect` - function to create an already-authenticated ``Context`` object. You can - provide a session token explicitly (the same token can be shared by multiple - ``Context`` objects) to provide authentication. - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verification for https connections. - :type verify: ``Boolean`` - :param sharing: The sharing mode for the namespace (the default is "user"). - :type sharing: "global", "system", "app", or "user" - :param owner: The owner context of the namespace (optional, the default is "None"). - :type owner: ``string`` - :param app: The app context of the namespace (optional, the default is "None"). - :type app: ``string`` - :param token: A session token. When provided, you don't need to call :meth:`login`. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param username: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param password: The password for the Splunk account. - :type password: ``string`` - :param splunkToken: Splunk authentication token - :type splunkToken: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param retires: Number of retries for each HTTP connection (optional, the default is 0). - NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER AND BLOCK THE - CURRENT THREAD WHILE RETRYING. - :type retries: ``int`` - :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). - :type retryDelay: ``int`` (in seconds) - :param handler: The HTTP request handler (optional). - :returns: A ``Context`` instance. - - **Example**:: - - import splunklib.binding as binding - c = binding.Context(username="boris", password="natasha", ...) - c.login() - # Or equivalently - c = binding.connect(username="boris", password="natasha") - # Or if you already have a session token - c = binding.Context(token="atg232342aa34324a") - # Or if you already have a valid cookie - c = binding.Context(cookie="splunkd_8089=...") - """ - - def __init__(self, handler=None, **kwargs): - self.http = HttpLib(handler, kwargs.get("verify", False), key_file=kwargs.get("key_file"), - cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), - # Default to False for backward compat - retries=kwargs.get("retries", 0), retryDelay=kwargs.get("retryDelay", 10)) - self.token = kwargs.get("token", _NoAuthenticationToken) - if self.token is None: # In case someone explicitly passes token=None - self.token = _NoAuthenticationToken - self.scheme = kwargs.get("scheme", DEFAULT_SCHEME) - self.host = kwargs.get("host", DEFAULT_HOST) - self.port = int(kwargs.get("port", DEFAULT_PORT)) - self.authority = _authority(self.scheme, self.host, self.port) - self.namespace = namespace(**kwargs) - self.username = kwargs.get("username", "") - self.password = kwargs.get("password", "") - self.basic = kwargs.get("basic", False) - self.bearerToken = kwargs.get("splunkToken", "") - self.autologin = kwargs.get("autologin", False) - self.additional_headers = kwargs.get("headers", []) - - # Store any cookies in the self.http._cookies dict - if "cookie" in kwargs and kwargs['cookie'] not in [None, _NoAuthenticationToken]: - _parse_cookies(kwargs["cookie"], self.http._cookies) - - def get_cookies(self): - """Gets the dictionary of cookies from the ``HttpLib`` member of this instance. - - :return: Dictionary of cookies stored on the ``self.http``. - :rtype: ``dict`` - """ - return self.http._cookies - - def has_cookies(self): - """Returns true if the ``HttpLib`` member of this instance has auth token stored. - - :return: ``True`` if there is auth token present, else ``False`` - :rtype: ``bool`` - """ - auth_token_key = "splunkd_" - return any(auth_token_key in key for key in list(self.get_cookies().keys())) - - # Shared per-context request headers - @property - def _auth_headers(self): - """Headers required to authenticate a request. - - Assumes your ``Context`` already has a authentication token or - cookie, either provided explicitly or obtained by logging - into the Splunk instance. - - :returns: A list of 2-tuples containing key and value - """ - header = [] - if self.has_cookies(): - return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] - elif self.basic and (self.username and self.password): - token = f'Basic {b64encode(("%s:%s" % (self.username, self.password)).encode("utf-8")).decode("ascii")}' - elif self.bearerToken: - token = f'Bearer {self.bearerToken}' - elif self.token is _NoAuthenticationToken: - token = [] - else: - # Ensure the token is properly formatted - if self.token.startswith('Splunk '): - token = self.token - else: - token = f'Splunk {self.token}' - if token: - header.append(("Authorization", token)) - if self.get_cookies(): - header.append(("Cookie", _make_cookie_header(list(self.get_cookies().items())))) - - return header - - def connect(self): - """Returns an open connection (socket) to the Splunk instance. - - This method is used for writing bulk events to an index or similar tasks - where the overhead of opening a connection multiple times would be - prohibitive. - - :returns: A socket. - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(...) - socket = c.connect() - socket.write("POST %s HTTP/1.1\\r\\n" % "some/path/to/post/to") - socket.write("Host: %s:%s\\r\\n" % (c.host, c.port)) - socket.write("Accept-Encoding: identity\\r\\n") - socket.write("Authorization: %s\\r\\n" % c.token) - socket.write("X-Splunk-Input-Mode: Streaming\\r\\n") - socket.write("\\r\\n") - """ - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - if self.scheme == "https": - sock = ssl.wrap_socket(sock) - sock.connect((socket.gethostbyname(self.host), self.port)) - return sock - - @_authentication - @_log_duration - def delete(self, path_segment, owner=None, app=None, sharing=None, **query): - """Performs a DELETE operation at the REST path segment with the given - namespace and query. - - This method is named to match the HTTP method. ``delete`` makes at least - one round trip to the server, one additional round trip for each 303 - status returned, and at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.delete('saved/searches/boris') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '1786'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:53:06 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - c.delete('nonexistant/path') # raises HTTPError - c.logout() - c.delete('apps/local') # raises AuthenticationError - """ - path = self.authority + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) - logger.debug("DELETE request to %s (body: %s)", path, mask_sensitive_data(query)) - response = self.http.delete(path, self._auth_headers, **query) - return response - - @_authentication - @_log_duration - def get(self, path_segment, owner=None, app=None, headers=None, sharing=None, **query): - """Performs a GET operation from the REST path segment with the given - namespace and query. - - This method is named to match the HTTP method. ``get`` makes at least - one round trip to the server, one additional round trip for each 303 - status returned, and at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.get('apps/local') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '26208'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:30:35 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - c.get('nonexistant/path') # raises HTTPError - c.logout() - c.get('apps/local') # raises AuthenticationError - """ - if headers is None: - headers = [] - - path = self.authority + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) - logger.debug("GET request to %s (body: %s)", path, mask_sensitive_data(query)) - all_headers = headers + self.additional_headers + self._auth_headers - response = self.http.get(path, all_headers, **query) - return response - - @_authentication - @_log_duration - def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, **query): - """Performs a POST operation from the REST path segment with the given - namespace and query. - - This method is named to match the HTTP method. ``post`` makes at least - one round trip to the server, one additional round trip for each 303 - status returned, and at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - Some of Splunk's endpoints, such as ``receivers/simple`` and - ``receivers/stream``, require unstructured data in the POST body - and all metadata passed as GET-style arguments. If you provide - a ``body`` argument to ``post``, it will be used as the POST - body, and all other keyword arguments will be passed as - GET-style arguments in the URL. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param query: All other keyword arguments, which are used as query - parameters. - :param body: Parameters to be used in the post body. If specified, - any parameters in the query will be applied to the URL instead of - the body. If a dict is supplied, the key-value pairs will be form - encoded. If a string is supplied, the body will be passed through - in the request unchanged. - :type body: ``dict`` or ``str`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.post('saved/searches', name='boris', - search='search * earliest=-1m | head 1') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '10455'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:46:06 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'Created', - 'status': 201} - c.post('nonexistant/path') # raises HTTPError - c.logout() - # raises AuthenticationError: - c.post('saved/searches', name='boris', - search='search * earliest=-1m | head 1') - """ - if headers is None: - headers = [] - - path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - - logger.debug("POST request to %s (body: %s)", path, mask_sensitive_data(query)) - all_headers = headers + self.additional_headers + self._auth_headers - response = self.http.post(path, all_headers, **query) - return response - - @_authentication - @_log_duration - def request(self, path_segment, method="GET", headers=None, body={}, - owner=None, app=None, sharing=None): - """Issues an arbitrary HTTP request to the REST path segment. - - This method is named to match ``httplib.request``. This function - makes a single round trip to the server. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param method: The HTTP method to use (optional). - :type method: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param body: Content of the HTTP request (optional). - :type body: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.request('saved/searches', method='GET') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '46722'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 17:24:19 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - c.request('nonexistant/path', method='GET') # raises HTTPError - c.logout() - c.get('apps/local') # raises AuthenticationError - """ - if headers is None: - headers = [] - - path = self.authority \ - + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) - - all_headers = headers + self.additional_headers + self._auth_headers - logger.debug("%s request to %s (headers: %s, body: %s)", - method, path, str(mask_sensitive_data(dict(all_headers))), mask_sensitive_data(body)) - if body: - body = _encode(**body) - - if method == "GET": - path = path + UrlEncoded('?' + body, skip_encode=True) - message = {'method': method, - 'headers': all_headers} - else: - message = {'method': method, - 'headers': all_headers, - 'body': body} - else: - message = {'method': method, - 'headers': all_headers} - - response = self.http.request(path, message) - - return response - - def login(self): - """Logs into the Splunk instance referred to by the :class:`Context` - object. - - Unless a ``Context`` is created with an explicit authentication token - (probably obtained by logging in from a different ``Context`` object) - you must call :meth:`login` before you can issue requests. - The authentication token obtained from the server is stored in the - ``token`` field of the ``Context`` object. - - :raises AuthenticationError: Raised when login fails. - :returns: The ``Context`` object, so you can chain calls. - - **Example**:: - - import splunklib.binding as binding - c = binding.Context(...).login() - # Then issue requests... - """ - - if self.has_cookies() and \ - (not self.username and not self.password): - # If we were passed session cookie(s), but no username or - # password, then login is a nop, since we're automatically - # logged in. - return - - if self.token is not _NoAuthenticationToken and \ - (not self.username and not self.password): - # If we were passed a session token, but no username or - # password, then login is a nop, since we're automatically - # logged in. - return - - if self.basic and (self.username and self.password): - # Basic auth mode requested, so this method is a nop as long - # as credentials were passed in. - return - - if self.bearerToken: - # Bearer auth mode requested, so this method is a nop as long - # as authentication token was passed in. - return - # Only try to get a token and updated cookie if username & password are specified - try: - response = self.http.post( - self.authority + self._abspath("/services/auth/login"), - username=self.username, - password=self.password, - headers=self.additional_headers, - cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header - - body = response.body.read() - session = XML(body).findtext("./sessionKey") - self.token = f"Splunk {session}" - return self - except HTTPError as he: - if he.status == 401: - raise AuthenticationError("Login failed.", he) - else: - raise - - def logout(self): - """Forgets the current session token, and cookies.""" - self.token = _NoAuthenticationToken - self.http._cookies = {} - return self - - def _abspath(self, path_segment, - owner=None, app=None, sharing=None): - """Qualifies *path_segment* into an absolute path for a URL. - - If *path_segment* is already absolute, returns it unchanged. - If *path_segment* is relative, then qualifies it with either - the provided namespace arguments or the ``Context``'s default - namespace. Any forbidden characters in *path_segment* are URL - encoded. This function has no network activity. - - Named to be consistent with RFC2396_. - - .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt - - :param path_segment: A relative or absolute URL path segment. - :type path_segment: ``string`` - :param owner, app, sharing: Components of a namespace (defaults - to the ``Context``'s namespace if all - three are omitted) - :type owner, app, sharing: ``string`` - :return: A ``UrlEncoded`` (a subclass of ``str``). - :rtype: ``string`` - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(owner='boris', app='search', sharing='user') - c._abspath('/a/b/c') == '/a/b/c' - c._abspath('/a/b c/d') == '/a/b%20c/d' - c._abspath('apps/local/search') == \ - '/servicesNS/boris/search/apps/local/search' - c._abspath('apps/local/search', sharing='system') == \ - '/servicesNS/nobody/system/apps/local/search' - url = c.authority + c._abspath('apps/local/sharing') - """ - skip_encode = isinstance(path_segment, UrlEncoded) - # If path_segment is absolute, escape all forbidden characters - # in it and return it. - if path_segment.startswith('/'): - return UrlEncoded(path_segment, skip_encode=skip_encode) - - # path_segment is relative, so we need a namespace to build an - # absolute path. - if owner or app or sharing: - ns = namespace(owner=owner, app=app, sharing=sharing) - else: - ns = self.namespace - - # If no app or owner are specified, then use the /services - # endpoint. Otherwise, use /servicesNS with the specified - # namespace. If only one of app and owner is specified, use - # '-' for the other. - if ns.app is None and ns.owner is None: - return UrlEncoded(f"/services/{path_segment}", skip_encode=skip_encode) - - oname = "nobody" if ns.owner is None else ns.owner - aname = "system" if ns.app is None else ns.app - path = UrlEncoded(f"/servicesNS/{oname}/{aname}/{path_segment}", skip_encode=skip_encode) - return path - - -def connect(**kwargs): - """This function returns an authenticated :class:`Context` object. - - This function is a shorthand for calling :meth:`Context.login`. - - This function makes one round trip to the server. - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param owner: The owner context of the namespace (the default is "None"). - :type owner: ``string`` - :param app: The app context of the namespace (the default is "None"). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (the default is "user"). - :type sharing: "global", "system", "app", or "user" - :param token: The current session token (optional). Session tokens can be - shared across multiple service instances. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param username: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param password: The password for the Splunk account. - :type password: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param autologin: When ``True``, automatically tries to log in again if the - session terminates. - :type autologin: ``Boolean`` - :return: An initialized :class:`Context` instance. - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(...) - response = c.get("apps/local") - """ - c = Context(**kwargs) - c.login() - return c - - -# Note: the error response schema supports multiple messages but we only -# return the first, although we do return the body so that an exception -# handler that wants to read multiple messages can do so. -class HTTPError(Exception): - """This exception is raised for HTTP responses that return an error.""" - - def __init__(self, response, _message=None): - status = response.status - reason = response.reason - body = response.body.read() - try: - detail = XML(body).findtext("./messages/msg") - except ParseError: - detail = body - detail_formatted = "" if detail is None else f" -- {detail}" - message = f"HTTP {status} {reason}{detail_formatted}" - Exception.__init__(self, _message or message) - self.status = status - self.reason = reason - self.headers = response.headers - self.body = body - self._response = response - - -class AuthenticationError(HTTPError): - """Raised when a login request to Splunk fails. - - If your username was unknown or you provided an incorrect password - in a call to :meth:`Context.login` or :meth:`splunklib.client.Service.login`, - this exception is raised. - """ - - def __init__(self, message, cause): - # Put the body back in the response so that HTTPError's constructor can - # read it again. - cause._response.body = BytesIO(cause.body) - - HTTPError.__init__(self, cause._response, message) - - -# -# The HTTP interface used by the Splunk binding layer abstracts the underlying -# HTTP library using request & response 'messages' which are implemented as -# dictionaries with the following structure: -# -# # HTTP request message (only method required) -# request { -# method : str, -# headers? : [(str, str)*], -# body? : str, -# } -# -# # HTTP response message (all keys present) -# response { -# status : int, -# reason : str, -# headers : [(str, str)*], -# body : file, -# } -# - -# Encode the given kwargs as a query string. This wrapper will also _encode -# a list value as a sequence of assignments to the corresponding arg name, -# for example an argument such as 'foo=[1,2,3]' will be encoded as -# 'foo=1&foo=2&foo=3'. -def _encode(**kwargs): - items = [] - for key, value in list(kwargs.items()): - if isinstance(value, list): - items.extend([(key, item) for item in value]) - else: - items.append((key, value)) - return parse.urlencode(items) - - -# Crack the given url into (scheme, host, port, path) -def _spliturl(url): - parsed_url = parse.urlparse(url) - host = parsed_url.hostname - port = parsed_url.port - path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path - # Strip brackets if its an IPv6 address - if host.startswith('[') and host.endswith(']'): host = host[1:-1] - if port is None: port = DEFAULT_PORT - return parsed_url.scheme, host, port, path - - -# Given an HTTP request handler, this wrapper objects provides a related -# family of convenience methods built using that handler. -class HttpLib: - """A set of convenient methods for making HTTP calls. - - ``HttpLib`` provides a general :meth:`request` method, and :meth:`delete`, - :meth:`post`, and :meth:`get` methods for the three HTTP methods that Splunk - uses. - - By default, ``HttpLib`` uses Python's built-in ``httplib`` library, - but you can replace it by passing your own handling function to the - constructor for ``HttpLib``. - - The handling function should have the type: - - ``handler(`url`, `request_dict`) -> response_dict`` - - where `url` is the URL to make the request to (including any query and - fragment sections) as a dictionary with the following keys: - - - method: The method for the request, typically ``GET``, ``POST``, or ``DELETE``. - - - headers: A list of pairs specifying the HTTP headers (for example: ``[('key': value), ...]``). - - - body: A string containing the body to send with the request (this string - should default to ''). - - and ``response_dict`` is a dictionary with the following keys: - - - status: An integer containing the HTTP status code (such as 200 or 404). - - - reason: The reason phrase, if any, returned by the server. - - - headers: A list of pairs containing the response headers (for example, ``[('key': value), ...]``). - - - body: A stream-like object supporting ``read(size=None)`` and ``close()`` - methods to get the body of the response. - - The response dictionary is returned directly by ``HttpLib``'s methods with - no further processing. By default, ``HttpLib`` calls the :func:`handler` function - to get a handler function. - - If using the default handler, SSL verification can be disabled by passing verify=False. - """ - - def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, - retryDelay=10): - if custom_handler is None: - self.handler = handler(verify=verify, key_file=key_file, cert_file=cert_file, context=context) - else: - self.handler = custom_handler - self._cookies = {} - self.retries = retries - self.retryDelay = retryDelay - - def delete(self, url, headers=None, **kwargs): - """Sends a DELETE request to a URL. - - :param url: The URL. - :type url: ``string`` - :param headers: A list of pairs specifying the headers for the HTTP - response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). - :type headers: ``list`` - :param kwargs: Additional keyword arguments (optional). These arguments - are interpreted as the query part of the URL. The order of keyword - arguments is not preserved in the request, but the keywords and - their arguments will be URL encoded. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - if headers is None: headers = [] - if kwargs: - # url is already a UrlEncoded. We have to manually declare - # the query to be encoded or it will get automatically URL - # encoded by being appended to url. - url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - message = { - 'method': "DELETE", - 'headers': headers, - } - return self.request(url, message) - - def get(self, url, headers=None, **kwargs): - """Sends a GET request to a URL. - - :param url: The URL. - :type url: ``string`` - :param headers: A list of pairs specifying the headers for the HTTP - response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). - :type headers: ``list`` - :param kwargs: Additional keyword arguments (optional). These arguments - are interpreted as the query part of the URL. The order of keyword - arguments is not preserved in the request, but the keywords and - their arguments will be URL encoded. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - if headers is None: headers = [] - if kwargs: - # url is already a UrlEncoded. We have to manually declare - # the query to be encoded or it will get automatically URL - # encoded by being appended to url. - url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - return self.request(url, {'method': "GET", 'headers': headers}) - - def post(self, url, headers=None, **kwargs): - """Sends a POST request to a URL. - - :param url: The URL. - :type url: ``string`` - :param headers: A list of pairs specifying the headers for the HTTP - response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). - :type headers: ``list`` - :param kwargs: Additional keyword arguments (optional). If the argument - is ``body``, the value is used as the body for the request, and the - keywords and their arguments will be URL encoded. If there is no - ``body`` keyword argument, all the keyword arguments are encoded - into the body of the request in the format ``x-www-form-urlencoded``. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - if headers is None: headers = [] - - # We handle GET-style arguments and an unstructured body. This is here - # to support the receivers/stream endpoint. - if 'body' in kwargs: - # We only use application/x-www-form-urlencoded if there is no other - # Content-Type header present. This can happen in cases where we - # send requests as application/json, e.g. for KV Store. - if len([x for x in headers if x[0].lower() == "content-type"]) == 0: - headers.append(("Content-Type", "application/x-www-form-urlencoded")) - - body = kwargs.pop('body') - if isinstance(body, dict): - body = _encode(**body).encode('utf-8') - if len(kwargs) > 0: - url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - else: - body = _encode(**kwargs).encode('utf-8') - message = { - 'method': "POST", - 'headers': headers, - 'body': body - } - return self.request(url, message) - - def request(self, url, message, **kwargs): - """Issues an HTTP request to a URL. - - :param url: The URL. - :type url: ``string`` - :param message: A dictionary with the format as described in - :class:`HttpLib`. - :type message: ``dict`` - :param kwargs: Additional keyword arguments (optional). These arguments - are passed unchanged to the handler. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - while True: - try: - response = self.handler(url, message, **kwargs) - break - except Exception: - if self.retries <= 0: - raise - else: - time.sleep(self.retryDelay) - self.retries -= 1 - response = record(response) - if 400 <= response.status: - raise HTTPError(response) - - # Update the cookie with any HTTP request - # Initially, assume list of 2-tuples - key_value_tuples = response.headers - # If response.headers is a dict, get the key-value pairs as 2-tuples - # this is the case when using urllib2 - if isinstance(response.headers, dict): - key_value_tuples = list(response.headers.items()) - for key, value in key_value_tuples: - if key.lower() == "set-cookie": - _parse_cookies(value, self._cookies) - - return response - - -# Converts an httplib response into a file-like object. -class ResponseReader(io.RawIOBase): - """This class provides a file-like interface for :class:`httplib` responses. - - The ``ResponseReader`` class is intended to be a layer to unify the different - types of HTTP libraries used with this SDK. This class also provides a - preview of the stream and a few useful predicates. - """ - - # For testing, you can use a StringIO as the argument to - # ``ResponseReader`` instead of an ``httplib.HTTPResponse``. It - # will work equally well. - def __init__(self, response, connection=None): - self._response = response - self._connection = connection - self._buffer = b'' - - def __str__(self): - return str(self.read(), 'UTF-8') - - @property - def empty(self): - """Indicates whether there is any more data in the response.""" - return self.peek(1) == b"" - - def peek(self, size): - """Nondestructively retrieves a given number of characters. - - The next :meth:`read` operation behaves as though this method was never - called. - - :param size: The number of characters to retrieve. - :type size: ``integer`` - """ - c = self.read(size) - self._buffer = self._buffer + c - return c - - def close(self): - """Closes this response.""" - if self._connection: - self._connection.close() - self._response.close() - - def read(self, size=None): - """Reads a given number of characters from the response. - - :param size: The number of characters to read, or "None" to read the - entire response. - :type size: ``integer`` or "None" - - """ - r = self._buffer - self._buffer = b'' - if size is not None: - size -= len(r) - r = r + self._response.read(size) - return r - - def readable(self): - """ Indicates that the response reader is readable.""" - return True - - def readinto(self, byte_array): - """ Read data into a byte array, upto the size of the byte array. - - :param byte_array: A byte array/memory view to pour bytes into. - :type byte_array: ``bytearray`` or ``memoryview`` - - """ - max_size = len(byte_array) - data = self.read(max_size) - bytes_read = len(data) - byte_array[:bytes_read] = data - return bytes_read - - -def handler(key_file=None, cert_file=None, timeout=None, verify=False, context=None): - """This class returns an instance of the default HTTP request handler using - the values you provide. - - :param `key_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing your private key (optional). - :type key_file: ``string`` - :param `cert_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing a certificate chain file (optional). - :type cert_file: ``string`` - :param `timeout`: The request time-out period, in seconds (optional). - :type timeout: ``integer`` or "None" - :param `verify`: Set to False to disable SSL verification on https connections. - :type verify: ``Boolean`` - :param `context`: The SSLContext that can is used with the HTTPSConnection when verify=True is enabled and context is specified - :type context: ``SSLContext` - """ - - def connect(scheme, host, port): - kwargs = {} - if timeout is not None: kwargs['timeout'] = timeout - if scheme == "http": - return client.HTTPConnection(host, port, **kwargs) - if scheme == "https": - if key_file is not None: kwargs['key_file'] = key_file - if cert_file is not None: kwargs['cert_file'] = cert_file - - if not verify: - kwargs['context'] = ssl._create_unverified_context() - elif context: - # verify is True in elif branch and context is not None - kwargs['context'] = context - - return client.HTTPSConnection(host, port, **kwargs) - raise ValueError(f"unsupported scheme: {scheme}") - - def request(url, message, **kwargs): - scheme, host, port, path = _spliturl(url) - body = message.get("body", "") - head = { - "Content-Length": str(len(body)), - "Host": host, - "User-Agent": "splunk-sdk-python/%s" % __version__, - "Accept": "*/*", - "Connection": "Close", - } # defaults - for key, value in message["headers"]: - head[key] = value - method = message.get("method", "GET") - - connection = connect(scheme, host, port) - is_keepalive = False - try: - connection.request(method, path, body, head) - if timeout is not None: - connection.sock.settimeout(timeout) - response = connection.getresponse() - is_keepalive = "keep-alive" in response.getheader("connection", default="close").lower() - finally: - if not is_keepalive: - connection.close() - - return { - "status": response.status, - "reason": response.reason, - "headers": response.getheaders(), - "body": ResponseReader(response, connection if is_keepalive else None), - } - - return request +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The **splunklib.binding** module provides a low-level binding interface to the +`Splunk REST API `_. + +This module handles the wire details of calling the REST API, such as +authentication tokens, prefix paths, URL encoding, and so on. Actual path +segments, ``GET`` and ``POST`` arguments, and the parsing of responses is left +to the user. + +If you want a friendlier interface to the Splunk REST API, use the +:mod:`splunklib.client` module. +""" + +import io +import json +import logging +import socket +import ssl +import time +from base64 import b64encode +from contextlib import contextmanager +from datetime import datetime +from functools import wraps +from io import BytesIO +from urllib import parse +from http import client +from http.cookies import SimpleCookie +from xml.etree.ElementTree import XML, ParseError +from splunklib.data import record +from splunklib import __version__ + + +logger = logging.getLogger(__name__) + +__all__ = [ + "AuthenticationError", + "connect", + "Context", + "handler", + "HTTPError", + "UrlEncoded", + "_encode", + "_make_cookie_header", + "_NoAuthenticationToken", + "namespace" +] + +SENSITIVE_KEYS = ['Authorization', 'Cookie', 'action.email.auth_password', 'auth', 'auth_password', 'clear_password', 'clientId', + 'crc-salt', 'encr_password', 'oldpassword', 'passAuth', 'password', 'session', 'suppressionKey', + 'token'] + +# If you change these, update the docstring +# on _authority as well. +DEFAULT_HOST = "localhost" +DEFAULT_PORT = "8089" +DEFAULT_SCHEME = "https" + + +def _log_duration(f): + @wraps(f) + def new_f(*args, **kwargs): + start_time = datetime.now() + val = f(*args, **kwargs) + end_time = datetime.now() + logger.debug("Operation took %s", end_time - start_time) + return val + + return new_f + + +def mask_sensitive_data(data): + ''' + Masked sensitive fields data for logging purpose + ''' + if not isinstance(data, dict): + try: + data = json.loads(data) + except Exception as ex: + return data + + # json.loads will return "123"(str) as 123(int), so return the data if it's not 'dict' type + if not isinstance(data, dict): + return data + mdata = {} + for k, v in data.items(): + if k in SENSITIVE_KEYS: + mdata[k] = "******" + else: + mdata[k] = mask_sensitive_data(v) + return mdata + + +def _parse_cookies(cookie_str, dictionary): + """Tries to parse any key-value pairs of cookies in a string, + then updates the the dictionary with any key-value pairs found. + + **Example**:: + + dictionary = {} + _parse_cookies('my=value', dictionary) + # Now the following is True + dictionary['my'] == 'value' + + :param cookie_str: A string containing "key=value" pairs from an HTTP "Set-Cookie" header. + :type cookie_str: ``str`` + :param dictionary: A dictionary to update with any found key-value pairs. + :type dictionary: ``dict`` + """ + parsed_cookie = SimpleCookie(cookie_str) + for cookie in parsed_cookie.values(): + dictionary[cookie.key] = cookie.coded_value + + +def _make_cookie_header(cookies): + """ + Takes a list of 2-tuples of key-value pairs of + cookies, and returns a valid HTTP ``Cookie`` + header. + + **Example**:: + + header = _make_cookie_header([("key", "value"), ("key_2", "value_2")]) + # Now the following is True + header == "key=value; key_2=value_2" + + :param cookies: A list of 2-tuples of cookie key-value pairs. + :type cookies: ``list`` of 2-tuples + :return: ``str` An HTTP header cookie string. + :rtype: ``str`` + """ + return "; ".join(f"{key}={value}" for key, value in cookies) + + +# Singleton values to eschew None +class _NoAuthenticationToken: + """The value stored in a :class:`Context` or :class:`splunklib.client.Service` + class that is not logged in. + + If a ``Context`` or ``Service`` object is created without an authentication + token, and there has not yet been a call to the ``login`` method, the token + field of the ``Context`` or ``Service`` object is set to + ``_NoAuthenticationToken``. + + Likewise, after a ``Context`` or ``Service`` object has been logged out, the + token is set to this value again. + """ + + +class UrlEncoded(str): + """This class marks URL-encoded strings. + It should be considered an SDK-private implementation detail. + + Manually tracking whether strings are URL encoded can be difficult. Avoid + calling ``urllib.quote`` to replace special characters with escapes. When + you receive a URL-encoded string, *do* use ``urllib.unquote`` to replace + escapes with single characters. Then, wrap any string you want to use as a + URL in ``UrlEncoded``. Note that because the ``UrlEncoded`` class is + idempotent, making multiple calls to it is OK. + + ``UrlEncoded`` objects are identical to ``str`` objects (including being + equal if their contents are equal) except when passed to ``UrlEncoded`` + again. + + ``UrlEncoded`` removes the ``str`` type support for interpolating values + with ``%`` (doing that raises a ``TypeError``). There is no reliable way to + encode values this way, so instead, interpolate into a string, quoting by + hand, and call ``UrlEncode`` with ``skip_encode=True``. + + **Example**:: + + import urllib + UrlEncoded(f'{scheme}://{urllib.quote(host)}', skip_encode=True) + + If you append ``str`` strings and ``UrlEncoded`` strings, the result is also + URL encoded. + + **Example**:: + + UrlEncoded('ab c') + 'de f' == UrlEncoded('ab cde f') + 'ab c' + UrlEncoded('de f') == UrlEncoded('ab cde f') + """ + + def __new__(self, val='', skip_encode=False, encode_slash=False): + if isinstance(val, UrlEncoded): + # Don't urllib.quote something already URL encoded. + return val + if skip_encode: + return str.__new__(self, val) + if encode_slash: + return str.__new__(self, parse.quote_plus(val)) + # When subclassing str, just call str.__new__ method + # with your class and the value you want to have in the + # new string. + return str.__new__(self, parse.quote(val)) + + def __add__(self, other): + """self + other + + If *other* is not a ``UrlEncoded``, URL encode it before + adding it. + """ + if isinstance(other, UrlEncoded): + return UrlEncoded(str.__add__(self, other), skip_encode=True) + + return UrlEncoded(str.__add__(self, parse.quote(other)), skip_encode=True) + + def __radd__(self, other): + """other + self + + If *other* is not a ``UrlEncoded``, URL _encode it before + adding it. + """ + if isinstance(other, UrlEncoded): + return UrlEncoded(str.__radd__(self, other), skip_encode=True) + + return UrlEncoded(str.__add__(parse.quote(other), self), skip_encode=True) + + def __mod__(self, fields): + """Interpolation into ``UrlEncoded``s is disabled. + + If you try to write ``UrlEncoded("%s") % "abc", will get a + ``TypeError``. + """ + raise TypeError("Cannot interpolate into a UrlEncoded object.") + + def __repr__(self): + return f"UrlEncoded({repr(parse.unquote(str(self)))})" + + +@contextmanager +def _handle_auth_error(msg): + """Handle re-raising HTTP authentication errors as something clearer. + + If an ``HTTPError`` is raised with status 401 (access denied) in + the body of this context manager, re-raise it as an + ``AuthenticationError`` instead, with *msg* as its message. + + This function adds no round trips to the server. + + :param msg: The message to be raised in ``AuthenticationError``. + :type msg: ``str`` + + **Example**:: + + with _handle_auth_error("Your login failed."): + ... # make an HTTP request + """ + try: + yield + except HTTPError as he: + if he.status == 401: + raise AuthenticationError(msg, he) + else: + raise + + +def _authentication(request_fun): + """Decorator to handle autologin and authentication errors. + + *request_fun* is a function taking no arguments that needs to + be run with this ``Context`` logged into Splunk. + + ``_authentication``'s behavior depends on whether the + ``autologin`` field of ``Context`` is set to ``True`` or + ``False``. If it's ``False``, then ``_authentication`` + aborts if the ``Context`` is not logged in, and raises an + ``AuthenticationError`` if an ``HTTPError`` of status 401 is + raised in *request_fun*. If it's ``True``, then + ``_authentication`` will try at all sensible places to + log in before issuing the request. + + If ``autologin`` is ``False``, ``_authentication`` makes + one roundtrip to the server if the ``Context`` is logged in, + or zero if it is not. If ``autologin`` is ``True``, it's less + deterministic, and may make at most three roundtrips (though + that would be a truly pathological case). + + :param request_fun: A function of no arguments encapsulating + the request to make to the server. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(..., autologin=True) + c.logout() + def f(): + c.get("/services") + return 42 + print(_authentication(f)) + """ + + @wraps(request_fun) + def wrapper(self, *args, **kwargs): + if self.token is _NoAuthenticationToken and not self.has_cookies(): + # Not yet logged in. + if self.autologin and self.username and self.password: + # This will throw an uncaught + # AuthenticationError if it fails. + self.login() + else: + # Try the request anyway without authentication. + # Most requests will fail. Some will succeed, such as + # 'GET server/info'. + with _handle_auth_error("Request aborted: not logged in."): + return request_fun(self, *args, **kwargs) + try: + # Issue the request + return request_fun(self, *args, **kwargs) + except HTTPError as he: + if he.status == 401 and self.autologin: + # Authentication failed. Try logging in, and then + # rerunning the request. If either step fails, throw + # an AuthenticationError and give up. + with _handle_auth_error("Autologin failed."): + self.login() + with _handle_auth_error("Authentication Failed! If session token is used, it seems to have been expired."): + return request_fun(self, *args, **kwargs) + elif he.status == 401 and not self.autologin: + raise AuthenticationError( + "Request failed: Session is not logged in.", he) + else: + raise + + return wrapper + + +def _authority(scheme=DEFAULT_SCHEME, host=DEFAULT_HOST, port=DEFAULT_PORT): + """Construct a URL authority from the given *scheme*, *host*, and *port*. + + Named in accordance with RFC2396_, which defines URLs as:: + + ://? + + .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt + + So ``https://localhost:8000/a/b/b?boris=hilda`` would be parsed as:: + + scheme := https + authority := localhost:8000 + path := /a/b/c + query := boris=hilda + + :param scheme: URL scheme (the default is "https") + :type scheme: "http" or "https" + :param host: The host name (the default is "localhost") + :type host: string + :param port: The port number (the default is 8089) + :type port: integer + :return: The URL authority. + :rtype: UrlEncoded (subclass of ``str``) + + **Example**:: + + _authority() == "https://localhost:8089" + + _authority(host="splunk.utopia.net") == "https://splunk.utopia.net:8089" + + _authority(host="2001:0db8:85a3:0000:0000:8a2e:0370:7334") == \ + "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089" + + _authority(scheme="http", host="splunk.utopia.net", port="471") == \ + "http://splunk.utopia.net:471" + + """ + # check if host is an IPv6 address and not enclosed in [ ] + if ':' in host and not (host.startswith('[') and host.endswith(']')): + # IPv6 addresses must be enclosed in [ ] in order to be well + # formed. + host = '[' + host + ']' + return UrlEncoded(f"{scheme}://{host}:{port}", skip_encode=True) + + +# kwargs: sharing, owner, app +def namespace(sharing=None, owner=None, app=None, **kwargs): + """This function constructs a Splunk namespace. + + Every Splunk resource belongs to a namespace. The namespace is specified by + the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. + The possible values for ``sharing`` are: "user", "app", "global" and "system", + which map to the following combinations of ``owner`` and ``app`` values: + + "user" => {owner}, {app} + + "app" => nobody, {app} + + "global" => nobody, {app} + + "system" => nobody, system + + "nobody" is a special user name that basically means no user, and "system" + is the name reserved for system resources. + + "-" is a wildcard that can be used for both ``owner`` and ``app`` values and + refers to all users and all apps, respectively. + + In general, when you specify a namespace you can specify any combination of + these three values and the library will reconcile the triple, overriding the + provided values as appropriate. + + Finally, if no namespacing is specified the library will make use of the + ``/services`` branch of the REST API, which provides a namespaced view of + Splunk resources equivelent to using ``owner={currentUser}`` and + ``app={defaultApp}``. + + The ``namespace`` function returns a representation of the namespace from + reconciling the values you provide. It ignores any keyword arguments other + than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of + configuration information without first having to extract individual keys. + + :param sharing: The sharing mode (the default is "user"). + :type sharing: "system", "global", "app", or "user" + :param owner: The owner context (the default is "None"). + :type owner: ``string`` + :param app: The app context (the default is "None"). + :type app: ``string`` + :returns: A :class:`splunklib.data.Record` containing the reconciled + namespace. + + **Example**:: + + import splunklib.binding as binding + n = binding.namespace(sharing="user", owner="boris", app="search") + n = binding.namespace(sharing="global", app="search") + """ + if sharing in ["system"]: + return record({'sharing': sharing, 'owner': "nobody", 'app': "system"}) + if sharing in ["global", "app"]: + return record({'sharing': sharing, 'owner': "nobody", 'app': app}) + if sharing in ["user", None]: + return record({'sharing': sharing, 'owner': owner, 'app': app}) + raise ValueError("Invalid value for argument: 'sharing'") + + +class Context: + """This class represents a context that encapsulates a splunkd connection. + + The ``Context`` class encapsulates the details of HTTP requests, + authentication, a default namespace, and URL prefixes to simplify access to + the REST API. + + After creating a ``Context`` object, you must call its :meth:`login` + method before you can issue requests to splunkd. Or, use the :func:`connect` + function to create an already-authenticated ``Context`` object. You can + provide a session token explicitly (the same token can be shared by multiple + ``Context`` objects) to provide authentication. + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verification for https connections. + :type verify: ``Boolean`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param owner: The owner context of the namespace (optional, the default is "None"). + :type owner: ``string`` + :param app: The app context of the namespace (optional, the default is "None"). + :type app: ``string`` + :param token: A session token. When provided, you don't need to call :meth:`login`. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param username: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param password: The password for the Splunk account. + :type password: ``string`` + :param splunkToken: Splunk authentication token + :type splunkToken: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param retires: Number of retries for each HTTP connection (optional, the default is 0). + NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER AND BLOCK THE + CURRENT THREAD WHILE RETRYING. + :type retries: ``int`` + :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). + :type retryDelay: ``int`` (in seconds) + :param handler: The HTTP request handler (optional). + :returns: A ``Context`` instance. + + **Example**:: + + import splunklib.binding as binding + c = binding.Context(username="boris", password="natasha", ...) + c.login() + # Or equivalently + c = binding.connect(username="boris", password="natasha") + # Or if you already have a session token + c = binding.Context(token="atg232342aa34324a") + # Or if you already have a valid cookie + c = binding.Context(cookie="splunkd_8089=...") + """ + + def __init__(self, handler=None, **kwargs): + self.http = HttpLib(handler, kwargs.get("verify", False), key_file=kwargs.get("key_file"), + cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), + # Default to False for backward compat + retries=kwargs.get("retries", 0), retryDelay=kwargs.get("retryDelay", 10)) + self.token = kwargs.get("token", _NoAuthenticationToken) + if self.token is None: # In case someone explicitly passes token=None + self.token = _NoAuthenticationToken + self.scheme = kwargs.get("scheme", DEFAULT_SCHEME) + self.host = kwargs.get("host", DEFAULT_HOST) + self.port = int(kwargs.get("port", DEFAULT_PORT)) + self.authority = _authority(self.scheme, self.host, self.port) + self.namespace = namespace(**kwargs) + self.username = kwargs.get("username", "") + self.password = kwargs.get("password", "") + self.basic = kwargs.get("basic", False) + self.bearerToken = kwargs.get("splunkToken", "") + self.autologin = kwargs.get("autologin", False) + self.additional_headers = kwargs.get("headers", []) + + # Store any cookies in the self.http._cookies dict + if "cookie" in kwargs and kwargs['cookie'] not in [None, _NoAuthenticationToken]: + _parse_cookies(kwargs["cookie"], self.http._cookies) + + def get_cookies(self): + """Gets the dictionary of cookies from the ``HttpLib`` member of this instance. + + :return: Dictionary of cookies stored on the ``self.http``. + :rtype: ``dict`` + """ + return self.http._cookies + + def has_cookies(self): + """Returns true if the ``HttpLib`` member of this instance has auth token stored. + + :return: ``True`` if there is auth token present, else ``False`` + :rtype: ``bool`` + """ + auth_token_key = "splunkd_" + return any(auth_token_key in key for key in self.get_cookies().keys()) + + # Shared per-context request headers + @property + def _auth_headers(self): + """Headers required to authenticate a request. + + Assumes your ``Context`` already has a authentication token or + cookie, either provided explicitly or obtained by logging + into the Splunk instance. + + :returns: A list of 2-tuples containing key and value + """ + header = [] + if self.has_cookies(): + return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] + elif self.basic and (self.username and self.password): + token = f'Basic {b64encode(("%s:%s" % (self.username, self.password)).encode("utf-8")).decode("ascii")}' + elif self.bearerToken: + token = f'Bearer {self.bearerToken}' + elif self.token is _NoAuthenticationToken: + token = [] + else: + # Ensure the token is properly formatted + if self.token.startswith('Splunk '): + token = self.token + else: + token = f'Splunk {self.token}' + if token: + header.append(("Authorization", token)) + if self.get_cookies(): + header.append(("Cookie", _make_cookie_header(list(self.get_cookies().items())))) + + return header + + def connect(self): + """Returns an open connection (socket) to the Splunk instance. + + This method is used for writing bulk events to an index or similar tasks + where the overhead of opening a connection multiple times would be + prohibitive. + + :returns: A socket. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(...) + socket = c.connect() + socket.write("POST %s HTTP/1.1\\r\\n" % "some/path/to/post/to") + socket.write("Host: %s:%s\\r\\n" % (c.host, c.port)) + socket.write("Accept-Encoding: identity\\r\\n") + socket.write("Authorization: %s\\r\\n" % c.token) + socket.write("X-Splunk-Input-Mode: Streaming\\r\\n") + socket.write("\\r\\n") + """ + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + if self.scheme == "https": + sock = ssl.wrap_socket(sock) + sock.connect((socket.gethostbyname(self.host), self.port)) + return sock + + @_authentication + @_log_duration + def delete(self, path_segment, owner=None, app=None, sharing=None, **query): + """Performs a DELETE operation at the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``delete`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.delete('saved/searches/boris') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '1786'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:53:06 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.delete('nonexistant/path') # raises HTTPError + c.logout() + c.delete('apps/local') # raises AuthenticationError + """ + path = self.authority + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + logger.debug("DELETE request to %s (body: %s)", path, mask_sensitive_data(query)) + response = self.http.delete(path, self._auth_headers, **query) + return response + + @_authentication + @_log_duration + def get(self, path_segment, owner=None, app=None, headers=None, sharing=None, **query): + """Performs a GET operation from the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``get`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.get('apps/local') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.get('nonexistant/path') # raises HTTPError + c.logout() + c.get('apps/local') # raises AuthenticationError + """ + if headers is None: + headers = [] + + path = self.authority + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + logger.debug("GET request to %s (body: %s)", path, mask_sensitive_data(query)) + all_headers = headers + self.additional_headers + self._auth_headers + response = self.http.get(path, all_headers, **query) + return response + + @_authentication + @_log_duration + def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, **query): + """Performs a POST operation from the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``post`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + Some of Splunk's endpoints, such as ``receivers/simple`` and + ``receivers/stream``, require unstructured data in the POST body + and all metadata passed as GET-style arguments. If you provide + a ``body`` argument to ``post``, it will be used as the POST + body, and all other keyword arguments will be passed as + GET-style arguments in the URL. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param query: All other keyword arguments, which are used as query + parameters. + :param body: Parameters to be used in the post body. If specified, + any parameters in the query will be applied to the URL instead of + the body. If a dict is supplied, the key-value pairs will be form + encoded. If a string is supplied, the body will be passed through + in the request unchanged. + :type body: ``dict`` or ``str`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.post('saved/searches', name='boris', + search='search * earliest=-1m | head 1') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '10455'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:46:06 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'Created', + 'status': 201} + c.post('nonexistant/path') # raises HTTPError + c.logout() + # raises AuthenticationError: + c.post('saved/searches', name='boris', + search='search * earliest=-1m | head 1') + """ + if headers is None: + headers = [] + + path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) + + logger.debug("POST request to %s (body: %s)", path, mask_sensitive_data(query)) + all_headers = headers + self.additional_headers + self._auth_headers + response = self.http.post(path, all_headers, **query) + return response + + @_authentication + @_log_duration + def request(self, path_segment, method="GET", headers=None, body={}, + owner=None, app=None, sharing=None): + """Issues an arbitrary HTTP request to the REST path segment. + + This method is named to match ``httplib.request``. This function + makes a single round trip to the server. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param method: The HTTP method to use (optional). + :type method: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param body: Content of the HTTP request (optional). + :type body: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.request('saved/searches', method='GET') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '46722'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 17:24:19 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.request('nonexistant/path', method='GET') # raises HTTPError + c.logout() + c.get('apps/local') # raises AuthenticationError + """ + if headers is None: + headers = [] + + path = self.authority \ + + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + + all_headers = headers + self.additional_headers + self._auth_headers + logger.debug("%s request to %s (headers: %s, body: %s)", + method, path, str(mask_sensitive_data(dict(all_headers))), mask_sensitive_data(body)) + if body: + body = _encode(**body) + + if method == "GET": + path = path + UrlEncoded('?' + body, skip_encode=True) + message = {'method': method, + 'headers': all_headers} + else: + message = {'method': method, + 'headers': all_headers, + 'body': body} + else: + message = {'method': method, + 'headers': all_headers} + + response = self.http.request(path, message) + + return response + + def login(self): + """Logs into the Splunk instance referred to by the :class:`Context` + object. + + Unless a ``Context`` is created with an explicit authentication token + (probably obtained by logging in from a different ``Context`` object) + you must call :meth:`login` before you can issue requests. + The authentication token obtained from the server is stored in the + ``token`` field of the ``Context`` object. + + :raises AuthenticationError: Raised when login fails. + :returns: The ``Context`` object, so you can chain calls. + + **Example**:: + + import splunklib.binding as binding + c = binding.Context(...).login() + # Then issue requests... + """ + + if self.has_cookies() and \ + (not self.username and not self.password): + # If we were passed session cookie(s), but no username or + # password, then login is a nop, since we're automatically + # logged in. + return + + if self.token is not _NoAuthenticationToken and \ + (not self.username and not self.password): + # If we were passed a session token, but no username or + # password, then login is a nop, since we're automatically + # logged in. + return + + if self.basic and (self.username and self.password): + # Basic auth mode requested, so this method is a nop as long + # as credentials were passed in. + return + + if self.bearerToken: + # Bearer auth mode requested, so this method is a nop as long + # as authentication token was passed in. + return + # Only try to get a token and updated cookie if username & password are specified + try: + response = self.http.post( + self.authority + self._abspath("/services/auth/login"), + username=self.username, + password=self.password, + headers=self.additional_headers, + cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header + + body = response.body.read() + session = XML(body).findtext("./sessionKey") + self.token = f"Splunk {session}" + return self + except HTTPError as he: + if he.status == 401: + raise AuthenticationError("Login failed.", he) + else: + raise + + def logout(self): + """Forgets the current session token, and cookies.""" + self.token = _NoAuthenticationToken + self.http._cookies = {} + return self + + def _abspath(self, path_segment, + owner=None, app=None, sharing=None): + """Qualifies *path_segment* into an absolute path for a URL. + + If *path_segment* is already absolute, returns it unchanged. + If *path_segment* is relative, then qualifies it with either + the provided namespace arguments or the ``Context``'s default + namespace. Any forbidden characters in *path_segment* are URL + encoded. This function has no network activity. + + Named to be consistent with RFC2396_. + + .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt + + :param path_segment: A relative or absolute URL path segment. + :type path_segment: ``string`` + :param owner, app, sharing: Components of a namespace (defaults + to the ``Context``'s namespace if all + three are omitted) + :type owner, app, sharing: ``string`` + :return: A ``UrlEncoded`` (a subclass of ``str``). + :rtype: ``string`` + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(owner='boris', app='search', sharing='user') + c._abspath('/a/b/c') == '/a/b/c' + c._abspath('/a/b c/d') == '/a/b%20c/d' + c._abspath('apps/local/search') == \ + '/servicesNS/boris/search/apps/local/search' + c._abspath('apps/local/search', sharing='system') == \ + '/servicesNS/nobody/system/apps/local/search' + url = c.authority + c._abspath('apps/local/sharing') + """ + skip_encode = isinstance(path_segment, UrlEncoded) + # If path_segment is absolute, escape all forbidden characters + # in it and return it. + if path_segment.startswith('/'): + return UrlEncoded(path_segment, skip_encode=skip_encode) + + # path_segment is relative, so we need a namespace to build an + # absolute path. + if owner or app or sharing: + ns = namespace(owner=owner, app=app, sharing=sharing) + else: + ns = self.namespace + + # If no app or owner are specified, then use the /services + # endpoint. Otherwise, use /servicesNS with the specified + # namespace. If only one of app and owner is specified, use + # '-' for the other. + if ns.app is None and ns.owner is None: + return UrlEncoded(f"/services/{path_segment}", skip_encode=skip_encode) + + oname = "nobody" if ns.owner is None else ns.owner + aname = "system" if ns.app is None else ns.app + path = UrlEncoded(f"/servicesNS/{oname}/{aname}/{path_segment}", skip_encode=skip_encode) + return path + + +def connect(**kwargs): + """This function returns an authenticated :class:`Context` object. + + This function is a shorthand for calling :meth:`Context.login`. + + This function makes one round trip to the server. + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param owner: The owner context of the namespace (the default is "None"). + :type owner: ``string`` + :param app: The app context of the namespace (the default is "None"). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param token: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param username: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param password: The password for the Splunk account. + :type password: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param autologin: When ``True``, automatically tries to log in again if the + session terminates. + :type autologin: ``Boolean`` + :return: An initialized :class:`Context` instance. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(...) + response = c.get("apps/local") + """ + c = Context(**kwargs) + c.login() + return c + + +# Note: the error response schema supports multiple messages but we only +# return the first, although we do return the body so that an exception +# handler that wants to read multiple messages can do so. +class HTTPError(Exception): + """This exception is raised for HTTP responses that return an error.""" + + def __init__(self, response, _message=None): + status = response.status + reason = response.reason + body = response.body.read() + try: + detail = XML(body).findtext("./messages/msg") + except ParseError: + detail = body + detail_formatted = "" if detail is None else f" -- {detail}" + message = f"HTTP {status} {reason}{detail_formatted}" + Exception.__init__(self, _message or message) + self.status = status + self.reason = reason + self.headers = response.headers + self.body = body + self._response = response + + +class AuthenticationError(HTTPError): + """Raised when a login request to Splunk fails. + + If your username was unknown or you provided an incorrect password + in a call to :meth:`Context.login` or :meth:`splunklib.client.Service.login`, + this exception is raised. + """ + + def __init__(self, message, cause): + # Put the body back in the response so that HTTPError's constructor can + # read it again. + cause._response.body = BytesIO(cause.body) + + HTTPError.__init__(self, cause._response, message) + + +# +# The HTTP interface used by the Splunk binding layer abstracts the underlying +# HTTP library using request & response 'messages' which are implemented as +# dictionaries with the following structure: +# +# # HTTP request message (only method required) +# request { +# method : str, +# headers? : [(str, str)*], +# body? : str, +# } +# +# # HTTP response message (all keys present) +# response { +# status : int, +# reason : str, +# headers : [(str, str)*], +# body : file, +# } +# + +# Encode the given kwargs as a query string. This wrapper will also _encode +# a list value as a sequence of assignments to the corresponding arg name, +# for example an argument such as 'foo=[1,2,3]' will be encoded as +# 'foo=1&foo=2&foo=3'. +def _encode(**kwargs): + items = [] + for key, value in kwargs.items(): + if isinstance(value, list): + items.extend([(key, item) for item in value]) + else: + items.append((key, value)) + return parse.urlencode(items) + + +# Crack the given url into (scheme, host, port, path) +def _spliturl(url): + parsed_url = parse.urlparse(url) + host = parsed_url.hostname + port = parsed_url.port + path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path + # Strip brackets if its an IPv6 address + if host.startswith('[') and host.endswith(']'): host = host[1:-1] + if port is None: port = DEFAULT_PORT + return parsed_url.scheme, host, port, path + + +# Given an HTTP request handler, this wrapper objects provides a related +# family of convenience methods built using that handler. +class HttpLib: + """A set of convenient methods for making HTTP calls. + + ``HttpLib`` provides a general :meth:`request` method, and :meth:`delete`, + :meth:`post`, and :meth:`get` methods for the three HTTP methods that Splunk + uses. + + By default, ``HttpLib`` uses Python's built-in ``httplib`` library, + but you can replace it by passing your own handling function to the + constructor for ``HttpLib``. + + The handling function should have the type: + + ``handler(`url`, `request_dict`) -> response_dict`` + + where `url` is the URL to make the request to (including any query and + fragment sections) as a dictionary with the following keys: + + - method: The method for the request, typically ``GET``, ``POST``, or ``DELETE``. + + - headers: A list of pairs specifying the HTTP headers (for example: ``[('key': value), ...]``). + + - body: A string containing the body to send with the request (this string + should default to ''). + + and ``response_dict`` is a dictionary with the following keys: + + - status: An integer containing the HTTP status code (such as 200 or 404). + + - reason: The reason phrase, if any, returned by the server. + + - headers: A list of pairs containing the response headers (for example, ``[('key': value), ...]``). + + - body: A stream-like object supporting ``read(size=None)`` and ``close()`` + methods to get the body of the response. + + The response dictionary is returned directly by ``HttpLib``'s methods with + no further processing. By default, ``HttpLib`` calls the :func:`handler` function + to get a handler function. + + If using the default handler, SSL verification can be disabled by passing verify=False. + """ + + def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, + retryDelay=10): + if custom_handler is None: + self.handler = handler(verify=verify, key_file=key_file, cert_file=cert_file, context=context) + else: + self.handler = custom_handler + self._cookies = {} + self.retries = retries + self.retryDelay = retryDelay + + def delete(self, url, headers=None, **kwargs): + """Sends a DELETE request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). These arguments + are interpreted as the query part of the URL. The order of keyword + arguments is not preserved in the request, but the keywords and + their arguments will be URL encoded. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + if kwargs: + # url is already a UrlEncoded. We have to manually declare + # the query to be encoded or it will get automatically URL + # encoded by being appended to url. + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + message = { + 'method': "DELETE", + 'headers': headers, + } + return self.request(url, message) + + def get(self, url, headers=None, **kwargs): + """Sends a GET request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). These arguments + are interpreted as the query part of the URL. The order of keyword + arguments is not preserved in the request, but the keywords and + their arguments will be URL encoded. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + if kwargs: + # url is already a UrlEncoded. We have to manually declare + # the query to be encoded or it will get automatically URL + # encoded by being appended to url. + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + return self.request(url, {'method': "GET", 'headers': headers}) + + def post(self, url, headers=None, **kwargs): + """Sends a POST request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). If the argument + is ``body``, the value is used as the body for the request, and the + keywords and their arguments will be URL encoded. If there is no + ``body`` keyword argument, all the keyword arguments are encoded + into the body of the request in the format ``x-www-form-urlencoded``. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + + # We handle GET-style arguments and an unstructured body. This is here + # to support the receivers/stream endpoint. + if 'body' in kwargs: + # We only use application/x-www-form-urlencoded if there is no other + # Content-Type header present. This can happen in cases where we + # send requests as application/json, e.g. for KV Store. + if len([x for x in headers if x[0].lower() == "content-type"]) == 0: + headers.append(("Content-Type", "application/x-www-form-urlencoded")) + + body = kwargs.pop('body') + if isinstance(body, dict): + body = _encode(**body).encode('utf-8') + if len(kwargs) > 0: + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + else: + body = _encode(**kwargs).encode('utf-8') + message = { + 'method': "POST", + 'headers': headers, + 'body': body + } + return self.request(url, message) + + def request(self, url, message, **kwargs): + """Issues an HTTP request to a URL. + + :param url: The URL. + :type url: ``string`` + :param message: A dictionary with the format as described in + :class:`HttpLib`. + :type message: ``dict`` + :param kwargs: Additional keyword arguments (optional). These arguments + are passed unchanged to the handler. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + while True: + try: + response = self.handler(url, message, **kwargs) + break + except Exception: + if self.retries <= 0: + raise + else: + time.sleep(self.retryDelay) + self.retries -= 1 + response = record(response) + if 400 <= response.status: + raise HTTPError(response) + + # Update the cookie with any HTTP request + # Initially, assume list of 2-tuples + key_value_tuples = response.headers + # If response.headers is a dict, get the key-value pairs as 2-tuples + # this is the case when using urllib2 + if isinstance(response.headers, dict): + key_value_tuples = list(response.headers.items()) + for key, value in key_value_tuples: + if key.lower() == "set-cookie": + _parse_cookies(value, self._cookies) + + return response + + +# Converts an httplib response into a file-like object. +class ResponseReader(io.RawIOBase): + """This class provides a file-like interface for :class:`httplib` responses. + + The ``ResponseReader`` class is intended to be a layer to unify the different + types of HTTP libraries used with this SDK. This class also provides a + preview of the stream and a few useful predicates. + """ + + # For testing, you can use a StringIO as the argument to + # ``ResponseReader`` instead of an ``httplib.HTTPResponse``. It + # will work equally well. + def __init__(self, response, connection=None): + self._response = response + self._connection = connection + self._buffer = b'' + + def __str__(self): + return str(self.read(), 'UTF-8') + + @property + def empty(self): + """Indicates whether there is any more data in the response.""" + return self.peek(1) == b"" + + def peek(self, size): + """Nondestructively retrieves a given number of characters. + + The next :meth:`read` operation behaves as though this method was never + called. + + :param size: The number of characters to retrieve. + :type size: ``integer`` + """ + c = self.read(size) + self._buffer = self._buffer + c + return c + + def close(self): + """Closes this response.""" + if self._connection: + self._connection.close() + self._response.close() + + def read(self, size=None): + """Reads a given number of characters from the response. + + :param size: The number of characters to read, or "None" to read the + entire response. + :type size: ``integer`` or "None" + + """ + r = self._buffer + self._buffer = b'' + if size is not None: + size -= len(r) + r = r + self._response.read(size) + return r + + def readable(self): + """ Indicates that the response reader is readable.""" + return True + + def readinto(self, byte_array): + """ Read data into a byte array, upto the size of the byte array. + + :param byte_array: A byte array/memory view to pour bytes into. + :type byte_array: ``bytearray`` or ``memoryview`` + + """ + max_size = len(byte_array) + data = self.read(max_size) + bytes_read = len(data) + byte_array[:bytes_read] = data + return bytes_read + + +def handler(key_file=None, cert_file=None, timeout=None, verify=False, context=None): + """This class returns an instance of the default HTTP request handler using + the values you provide. + + :param `key_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing your private key (optional). + :type key_file: ``string`` + :param `cert_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing a certificate chain file (optional). + :type cert_file: ``string`` + :param `timeout`: The request time-out period, in seconds (optional). + :type timeout: ``integer`` or "None" + :param `verify`: Set to False to disable SSL verification on https connections. + :type verify: ``Boolean`` + :param `context`: The SSLContext that can is used with the HTTPSConnection when verify=True is enabled and context is specified + :type context: ``SSLContext` + """ + + def connect(scheme, host, port): + kwargs = {} + if timeout is not None: kwargs['timeout'] = timeout + if scheme == "http": + return client.HTTPConnection(host, port, **kwargs) + if scheme == "https": + if key_file is not None: kwargs['key_file'] = key_file + if cert_file is not None: kwargs['cert_file'] = cert_file + + if not verify: + kwargs['context'] = ssl._create_unverified_context() + elif context: + # verify is True in elif branch and context is not None + kwargs['context'] = context + + return client.HTTPSConnection(host, port, **kwargs) + raise ValueError(f"unsupported scheme: {scheme}") + + def request(url, message, **kwargs): + scheme, host, port, path = _spliturl(url) + body = message.get("body", "") + head = { + "Content-Length": str(len(body)), + "Host": host, + "User-Agent": "splunk-sdk-python/%s" % __version__, + "Accept": "*/*", + "Connection": "Close", + } # defaults + for key, value in message["headers"]: + head[key] = value + method = message.get("method", "GET") + + connection = connect(scheme, host, port) + is_keepalive = False + try: + connection.request(method, path, body, head) + if timeout is not None: + connection.sock.settimeout(timeout) + response = connection.getresponse() + is_keepalive = "keep-alive" in response.getheader("connection", default="close").lower() + finally: + if not is_keepalive: + connection.close() + + return { + "status": response.status, + "reason": response.reason, + "headers": response.getheaders(), + "body": ResponseReader(response, connection if is_keepalive else None), + } + + return request diff --git a/splunklib/client.py b/splunklib/client.py index 28024d50..090f9192 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -1,3907 +1,3908 @@ -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# The purpose of this module is to provide a friendlier domain interface to -# various Splunk endpoints. The approach here is to leverage the binding -# layer to capture endpoint context and provide objects and methods that -# offer simplified access their corresponding endpoints. The design avoids -# caching resource state. From the perspective of this module, the 'policy' -# for caching resource state belongs in the application or a higher level -# framework, and its the purpose of this module to provide simplified -# access to that resource state. -# -# A side note, the objects below that provide helper methods for updating eg: -# Entity state, are written so that they may be used in a fluent style. -# - -"""The **splunklib.client** module provides a Pythonic interface to the -`Splunk REST API `_, -allowing you programmatically access Splunk's resources. - -**splunklib.client** wraps a Pythonic layer around the wire-level -binding of the **splunklib.binding** module. The core of the library is the -:class:`Service` class, which encapsulates a connection to the server, and -provides access to the various aspects of Splunk's functionality, which are -exposed via the REST API. Typically you connect to a running Splunk instance -with the :func:`connect` function:: - - import splunklib.client as client - service = client.connect(host='localhost', port=8089, - username='admin', password='...') - assert isinstance(service, client.Service) - -:class:`Service` objects have fields for the various Splunk resources (such as apps, -jobs, saved searches, inputs, and indexes). All of these fields are -:class:`Collection` objects:: - - appcollection = service.apps - my_app = appcollection.create('my_app') - my_app = appcollection['my_app'] - appcollection.delete('my_app') - -The individual elements of the collection, in this case *applications*, -are subclasses of :class:`Entity`. An ``Entity`` object has fields for its -attributes, and methods that are specific to each kind of entity. For example:: - - print(my_app['author']) # Or: print(my_app.author) - my_app.package() # Creates a compressed package of this application -""" - -import contextlib -import datetime -import json -import logging -import re -import socket -from datetime import datetime, timedelta -from time import sleep -from urllib import parse - -from splunklib import data -from splunklib.data import record -from splunklib.binding import (AuthenticationError, Context, HTTPError, UrlEncoded, - _encode, _make_cookie_header, _NoAuthenticationToken, - namespace) - -logger = logging.getLogger(__name__) - -__all__ = [ - "connect", - "NotSupportedError", - "OperationError", - "IncomparableException", - "Service", - "namespace", - "AuthenticationError" -] - -PATH_APPS = "apps/local/" -PATH_CAPABILITIES = "authorization/capabilities/" -PATH_CONF = "configs/conf-%s/" -PATH_PROPERTIES = "properties/" -PATH_DEPLOYMENT_CLIENTS = "deployment/client/" -PATH_DEPLOYMENT_TENANTS = "deployment/tenants/" -PATH_DEPLOYMENT_SERVERS = "deployment/server/" -PATH_DEPLOYMENT_SERVERCLASSES = "deployment/serverclass/" -PATH_EVENT_TYPES = "saved/eventtypes/" -PATH_FIRED_ALERTS = "alerts/fired_alerts/" -PATH_INDEXES = "data/indexes/" -PATH_INPUTS = "data/inputs/" -PATH_JOBS = "search/jobs/" -PATH_JOBS_V2 = "search/v2/jobs/" -PATH_LOGGER = "/services/server/logger/" -PATH_MESSAGES = "messages/" -PATH_MODULAR_INPUTS = "data/modular-inputs" -PATH_ROLES = "authorization/roles/" -PATH_SAVED_SEARCHES = "saved/searches/" -PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) -PATH_USERS = "authentication/users/" -PATH_RECEIVERS_STREAM = "/services/receivers/stream" -PATH_RECEIVERS_SIMPLE = "/services/receivers/simple" -PATH_STORAGE_PASSWORDS = "storage/passwords" - -XNAMEF_ATOM = "{http://www.w3.org/2005/Atom}%s" -XNAME_ENTRY = XNAMEF_ATOM % "entry" -XNAME_CONTENT = XNAMEF_ATOM % "content" - -MATCH_ENTRY_CONTENT = f"{XNAME_ENTRY}/{XNAME_CONTENT}/*" - - -class IllegalOperationException(Exception): - """Thrown when an operation is not possible on the Splunk instance that a - :class:`Service` object is connected to.""" - - -class IncomparableException(Exception): - """Thrown when trying to compare objects (using ``==``, ``<``, ``>``, and - so on) of a type that doesn't support it.""" - - -class AmbiguousReferenceException(ValueError): - """Thrown when the name used to fetch an entity matches more than one entity.""" - - -class InvalidNameException(Exception): - """Thrown when the specified name contains characters that are not allowed - in Splunk entity names.""" - - -class NoSuchCapability(Exception): - """Thrown when the capability that has been referred to doesn't exist.""" - - -class OperationError(Exception): - """Raised for a failed operation, such as a timeout.""" - - -class NotSupportedError(Exception): - """Raised for operations that are not supported on a given object.""" - - -def _trailing(template, *targets): - """Substring of *template* following all *targets*. - - **Example**:: - - template = "this is a test of the bunnies." - _trailing(template, "is", "est", "the") == " bunnies" - - Each target is matched successively in the string, and the string - remaining after the last target is returned. If one of the targets - fails to match, a ValueError is raised. - - :param template: Template to extract a trailing string from. - :type template: ``string`` - :param targets: Strings to successively match in *template*. - :type targets: list of ``string``s - :return: Trailing string after all targets are matched. - :rtype: ``string`` - :raises ValueError: Raised when one of the targets does not match. - """ - s = template - for t in targets: - n = s.find(t) - if n == -1: - raise ValueError("Target " + t + " not found in template.") - s = s[n + len(t):] - return s - - -# Filter the given state content record according to the given arg list. -def _filter_content(content, *args): - if len(args) > 0: - return record((k, content[k]) for k in args) - return record((k, v) for k, v in list(content.items()) - if k not in ['eai:acl', 'eai:attributes', 'type']) - - -# Construct a resource path from the given base path + resource name -def _path(base, name): - if not base.endswith('/'): base = base + '/' - return base + name - - -# Load an atom record from the body of the given response -# this will ultimately be sent to an xml ElementTree so we -# should use the xmlcharrefreplace option -def _load_atom(response, match=None): - return data.load(response.body.read() - .decode('utf-8', 'xmlcharrefreplace'), match) - - -# Load an array of atom entries from the body of the given response -def _load_atom_entries(response): - r = _load_atom(response) - if 'feed' in r: - # Need this to handle a random case in the REST API - if r.feed.get('totalResults') in [0, '0']: - return [] - entries = r.feed.get('entry', None) - if entries is None: return None - return entries if isinstance(entries, list) else [entries] - # Unlike most other endpoints, the jobs endpoint does not return - # its state wrapped in another element, but at the top level. - # For example, in XML, it returns ... instead of - # .... - entries = r.get('entry', None) - if entries is None: return None - return entries if isinstance(entries, list) else [entries] - - -# Load the sid from the body of the given response -def _load_sid(response, output_mode): - if output_mode == "json": - json_obj = json.loads(response.body.read()) - return json_obj.get('sid') - return _load_atom(response).response.sid - - -# Parse the given atom entry record into a generic entity state record -def _parse_atom_entry(entry): - title = entry.get('title', None) - - elink = entry.get('link', []) - elink = elink if isinstance(elink, list) else [elink] - links = record((link.rel, link.href) for link in elink) - - # Retrieve entity content values - content = entry.get('content', {}) - - # Host entry metadata - metadata = _parse_atom_metadata(content) - - # Filter some of the noise out of the content record - content = record((k, v) for k, v in list(content.items()) - if k not in ['eai:acl', 'eai:attributes']) - - if 'type' in content: - if isinstance(content['type'], list): - content['type'] = [t for t in content['type'] if t != 'text/xml'] - # Unset type if it was only 'text/xml' - if len(content['type']) == 0: - content.pop('type', None) - # Flatten 1 element list - if len(content['type']) == 1: - content['type'] = content['type'][0] - else: - content.pop('type', None) - - return record({ - 'title': title, - 'links': links, - 'access': metadata.access, - 'fields': metadata.fields, - 'content': content, - 'updated': entry.get("updated") - }) - - -# Parse the metadata fields out of the given atom entry content record -def _parse_atom_metadata(content): - # Hoist access metadata - access = content.get('eai:acl', None) - - # Hoist content metadata (and cleanup some naming) - attributes = content.get('eai:attributes', {}) - fields = record({ - 'required': attributes.get('requiredFields', []), - 'optional': attributes.get('optionalFields', []), - 'wildcard': attributes.get('wildcardFields', [])}) - - return record({'access': access, 'fields': fields}) - - -# kwargs: scheme, host, port, app, owner, username, password -def connect(**kwargs): - """This function connects and logs in to a Splunk instance. - - This function is a shorthand for :meth:`Service.login`. - The ``connect`` function makes one round trip to the server (for logging in). - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verification for - https connections. (optional, the default is True) - :type verify: ``Boolean`` - :param `owner`: The owner context of the namespace (optional). - :type owner: ``string`` - :param `app`: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (the default is "user"). - :type sharing: "global", "system", "app", or "user" - :param `token`: The current session token (optional). Session tokens can be - shared across multiple service instances. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param autologin: When ``True``, automatically tries to log in again if the - session terminates. - :type autologin: ``boolean`` - :param `username`: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param `password`: The password for the Splunk account. - :type password: ``string`` - :param retires: Number of retries for each HTTP connection (optional, the default is 0). - NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. - :type retries: ``int`` - :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). - :type retryDelay: ``int`` (in seconds) - :param `context`: The SSLContext that can be used when setting verify=True (optional) - :type context: ``SSLContext`` - :return: An initialized :class:`Service` connection. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - a = s.apps["my_app"] - ... - """ - s = Service(**kwargs) - s.login() - return s - - -# In preparation for adding Storm support, we added an -# intermediary class between Service and Context. Storm's -# API is not going to be the same as enterprise Splunk's -# API, so we will derive both Service (for enterprise Splunk) -# and StormService for (Splunk Storm) from _BaseService, and -# put any shared behavior on it. -class _BaseService(Context): - pass - - -class Service(_BaseService): - """A Pythonic binding to Splunk instances. - - A :class:`Service` represents a binding to a Splunk instance on an - HTTP or HTTPS port. It handles the details of authentication, wire - formats, and wraps the REST API endpoints into something more - Pythonic. All of the low-level operations on the instance from - :class:`splunklib.binding.Context` are also available in case you need - to do something beyond what is provided by this class. - - After creating a ``Service`` object, you must call its :meth:`login` - method before you can issue requests to Splunk. - Alternately, use the :func:`connect` function to create an already - authenticated :class:`Service` object, or provide a session token - when creating the :class:`Service` object explicitly (the same - token may be shared by multiple :class:`Service` objects). - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verification for - https connections. (optional, the default is True) - :type verify: ``Boolean`` - :param `owner`: The owner context of the namespace (optional; use "-" for wildcard). - :type owner: ``string`` - :param `app`: The app context of the namespace (optional; use "-" for wildcard). - :type app: ``string`` - :param `token`: The current session token (optional). Session tokens can be - shared across multiple service instances. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param `username`: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param `password`: The password, which is used to authenticate the Splunk - instance. - :type password: ``string`` - :param retires: Number of retries for each HTTP connection (optional, the default is 0). - NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. - :type retries: ``int`` - :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). - :type retryDelay: ``int`` (in seconds) - :return: A :class:`Service` instance. - - **Example**:: - - import splunklib.client as client - s = client.Service(username="boris", password="natasha", ...) - s.login() - # Or equivalently - s = client.connect(username="boris", password="natasha") - # Or if you already have a session token - s = client.Service(token="atg232342aa34324a") - # Or if you already have a valid cookie - s = client.Service(cookie="splunkd_8089=...") - """ - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self._splunk_version = None - self._kvstore_owner = None - self._instance_type = None - - @property - def apps(self): - """Returns the collection of applications that are installed on this instance of Splunk. - - :return: A :class:`Collection` of :class:`Application` entities. - """ - return Collection(self, PATH_APPS, item=Application) - - @property - def confs(self): - """Returns the collection of configuration files for this Splunk instance. - - :return: A :class:`Configurations` collection of - :class:`ConfigurationFile` entities. - """ - return Configurations(self) - - @property - def capabilities(self): - """Returns the list of system capabilities. - - :return: A ``list`` of capabilities. - """ - response = self.get(PATH_CAPABILITIES) - return _load_atom(response, MATCH_ENTRY_CONTENT).capabilities - - @property - def event_types(self): - """Returns the collection of event types defined in this Splunk instance. - - :return: An :class:`Entity` containing the event types. - """ - return Collection(self, PATH_EVENT_TYPES) - - @property - def fired_alerts(self): - """Returns the collection of alerts that have been fired on the Splunk - instance, grouped by saved search. - - :return: A :class:`Collection` of :class:`AlertGroup` entities. - """ - return Collection(self, PATH_FIRED_ALERTS, item=AlertGroup) - - @property - def indexes(self): - """Returns the collection of indexes for this Splunk instance. - - :return: An :class:`Indexes` collection of :class:`Index` entities. - """ - return Indexes(self, PATH_INDEXES, item=Index) - - @property - def info(self): - """Returns the information about this instance of Splunk. - - :return: The system information, as key-value pairs. - :rtype: ``dict`` - """ - response = self.get("/services/server/info") - return _filter_content(_load_atom(response, MATCH_ENTRY_CONTENT)) - - def input(self, path, kind=None): - """Retrieves an input by path, and optionally kind. - - :return: A :class:`Input` object. - """ - return Input(self, path, kind=kind).refresh() - - @property - def inputs(self): - """Returns the collection of inputs configured on this Splunk instance. - - :return: An :class:`Inputs` collection of :class:`Input` entities. - """ - return Inputs(self) - - def job(self, sid): - """Retrieves a search job by sid. - - :return: A :class:`Job` object. - """ - return Job(self, sid).refresh() - - @property - def jobs(self): - """Returns the collection of current search jobs. - - :return: A :class:`Jobs` collection of :class:`Job` entities. - """ - return Jobs(self) - - @property - def loggers(self): - """Returns the collection of logging level categories and their status. - - :return: A :class:`Loggers` collection of logging levels. - """ - return Loggers(self) - - @property - def messages(self): - """Returns the collection of service messages. - - :return: A :class:`Collection` of :class:`Message` entities. - """ - return Collection(self, PATH_MESSAGES, item=Message) - - @property - def modular_input_kinds(self): - """Returns the collection of the modular input kinds on this Splunk instance. - - :return: A :class:`ReadOnlyCollection` of :class:`ModularInputKind` entities. - """ - if self.splunk_version >= (5,): - return ReadOnlyCollection(self, PATH_MODULAR_INPUTS, item=ModularInputKind) - raise IllegalOperationException("Modular inputs are not supported before Splunk version 5.") - - @property - def storage_passwords(self): - """Returns the collection of the storage passwords on this Splunk instance. - - :return: A :class:`ReadOnlyCollection` of :class:`StoragePasswords` entities. - """ - return StoragePasswords(self) - - # kwargs: enable_lookups, reload_macros, parse_only, output_mode - def parse(self, query, **kwargs): - """Parses a search query and returns a semantic map of the search. - - :param query: The search query to parse. - :type query: ``string`` - :param kwargs: Arguments to pass to the ``search/parser`` endpoint - (optional). Valid arguments are: - - * "enable_lookups" (``boolean``): If ``True``, performs reverse lookups - to expand the search expression. - - * "output_mode" (``string``): The output format (XML or JSON). - - * "parse_only" (``boolean``): If ``True``, disables the expansion of - search due to evaluation of subsearches, time term expansion, - lookups, tags, eventtypes, and sourcetype alias. - - * "reload_macros" (``boolean``): If ``True``, reloads macro - definitions from macros.conf. - - :type kwargs: ``dict`` - :return: A semantic map of the parsed search query. - """ - if not self.disable_v2_api: - return self.post("search/v2/parser", q=query, **kwargs) - return self.get("search/parser", q=query, **kwargs) - - def restart(self, timeout=None): - """Restarts this Splunk instance. - - The service is unavailable until it has successfully restarted. - - If a *timeout* value is specified, ``restart`` blocks until the service - resumes or the timeout period has been exceeded. Otherwise, ``restart`` returns - immediately. - - :param timeout: A timeout period, in seconds. - :type timeout: ``integer`` - """ - msg = {"value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} - # This message will be deleted once the server actually restarts. - self.messages.create(name="restart_required", **msg) - result = self.post("/services/server/control/restart") - if timeout is None: - return result - start = datetime.now() - diff = timedelta(seconds=timeout) - while datetime.now() - start < diff: - try: - self.login() - if not self.restart_required: - return result - except Exception as e: - sleep(1) - raise Exception("Operation time out.") - - @property - def restart_required(self): - """Indicates whether splunkd is in a state that requires a restart. - - :return: A ``boolean`` that indicates whether a restart is required. - - """ - response = self.get("messages").body.read() - messages = data.load(response)['feed'] - if 'entry' not in messages: - result = False - else: - if isinstance(messages['entry'], dict): - titles = [messages['entry']['title']] - else: - titles = [x['title'] for x in messages['entry']] - result = 'restart_required' in titles - return result - - @property - def roles(self): - """Returns the collection of user roles. - - :return: A :class:`Roles` collection of :class:`Role` entities. - """ - return Roles(self) - - def search(self, query, **kwargs): - """Runs a search using a search query and any optional arguments you - provide, and returns a `Job` object representing the search. - - :param query: A search query. - :type query: ``string`` - :param kwargs: Arguments for the search (optional): - - * "output_mode" (``string``): Specifies the output format of the - results. - - * "earliest_time" (``string``): Specifies the earliest time in the - time range to - search. The time string can be a UTC time (with fractional - seconds), a relative time specifier (to now), or a formatted - time string. - - * "latest_time" (``string``): Specifies the latest time in the time - range to - search. The time string can be a UTC time (with fractional - seconds), a relative time specifier (to now), or a formatted - time string. - - * "rf" (``string``): Specifies one or more fields to add to the - search. - - :type kwargs: ``dict`` - :rtype: class:`Job` - :returns: An object representing the created job. - """ - return self.jobs.create(query, **kwargs) - - @property - def saved_searches(self): - """Returns the collection of saved searches. - - :return: A :class:`SavedSearches` collection of :class:`SavedSearch` - entities. - """ - return SavedSearches(self) - - @property - def settings(self): - """Returns the configuration settings for this instance of Splunk. - - :return: A :class:`Settings` object containing configuration settings. - """ - return Settings(self) - - @property - def splunk_version(self): - """Returns the version of the splunkd instance this object is attached - to. - - The version is returned as a tuple of the version components as - integers (for example, `(4,3,3)` or `(5,)`). - - :return: A ``tuple`` of ``integers``. - """ - if self._splunk_version is None: - self._splunk_version = tuple(int(p) for p in self.info['version'].split('.')) - return self._splunk_version - - @property - def splunk_instance(self): - if self._instance_type is None : - splunk_info = self.info - if hasattr(splunk_info, 'instance_type') : - self._instance_type = splunk_info['instance_type'] - else: - self._instance_type = '' - return self._instance_type - - @property - def disable_v2_api(self): - if self.splunk_instance.lower() == 'cloud': - return self.splunk_version < (9,0,2209) - return self.splunk_version < (9,0,2) - - @property - def kvstore_owner(self): - """Returns the KVStore owner for this instance of Splunk. - - By default is the kvstore owner is not set, it will return "nobody" - :return: A string with the KVStore owner. - """ - if self._kvstore_owner is None: - self._kvstore_owner = "nobody" - return self._kvstore_owner - - @kvstore_owner.setter - def kvstore_owner(self, value): - """ - kvstore is refreshed, when the owner value is changed - """ - self._kvstore_owner = value - self.kvstore - - @property - def kvstore(self): - """Returns the collection of KV Store collections. - - sets the owner for the namespace, before retrieving the KVStore Collection - - :return: A :class:`KVStoreCollections` collection of :class:`KVStoreCollection` entities. - """ - self.namespace['owner'] = self.kvstore_owner - return KVStoreCollections(self) - - @property - def users(self): - """Returns the collection of users. - - :return: A :class:`Users` collection of :class:`User` entities. - """ - return Users(self) - - -class Endpoint: - """This class represents individual Splunk resources in the Splunk REST API. - - An ``Endpoint`` object represents a URI, such as ``/services/saved/searches``. - This class provides the common functionality of :class:`Collection` and - :class:`Entity` (essentially HTTP GET and POST methods). - """ - - def __init__(self, service, path): - self.service = service - self.path = path - - def get_api_version(self, path): - """Return the API version of the service used in the provided path. - - Args: - path (str): A fully-qualified endpoint path (for example, "/services/search/jobs"). - - Returns: - int: Version of the API (for example, 1) - """ - # Default to v1 if undefined in the path - # For example, "/services/search/jobs" is using API v1 - api_version = 1 - - versionSearch = re.search('(?:servicesNS\/[^/]+\/[^/]+|services)\/[^/]+\/v(\d+)\/', path) - if versionSearch: - api_version = int(versionSearch.group(1)) - - return api_version - - def get(self, path_segment="", owner=None, app=None, sharing=None, **query): - """Performs a GET operation on the path segment relative to this endpoint. - - This method is named to match the HTTP method. This method makes at least - one roundtrip to the server, one additional round trip for - each 303 status returned, plus at most two additional round - trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method takes a - default namespace from the :class:`Service` object for this :class:`Endpoint`. - All other keyword arguments are included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Service`` is not logged in. - :raises HTTPError: Raised when an error in the request occurs. - :param path_segment: A path segment relative to this endpoint. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (optional). - :type sharing: "global", "system", "app", or "user" - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - import splunklib.client - s = client.service(...) - apps = s.apps - apps.get() == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '26208'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:30:35 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - apps.get('nonexistant/path') # raises HTTPError - s.logout() - apps.get() # raises AuthenticationError - """ - # self.path to the Endpoint is relative in the SDK, so passing - # owner, app, sharing, etc. along will produce the correct - # namespace in the final request. - if path_segment.startswith('/'): - path = path_segment - else: - if not self.path.endswith('/') and path_segment != "": - self.path = self.path + '/' - path = self.service._abspath(self.path + path_segment, owner=owner, - app=app, sharing=sharing) - # ^-- This was "%s%s" % (self.path, path_segment). - # That doesn't work, because self.path may be UrlEncoded. - - # Get the API version from the path - api_version = self.get_api_version(path) - - # Search API v2+ fallback to v1: - # - In v2+, /results_preview, /events and /results do not support search params. - # - Fallback from v2+ to v1 if Splunk Version is < 9. - # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): - # path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - if api_version == 1: - if isinstance(path, UrlEncoded): - path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) - else: - path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - return self.service.get(path, - owner=owner, app=app, sharing=sharing, - **query) - - def post(self, path_segment="", owner=None, app=None, sharing=None, **query): - """Performs a POST operation on the path segment relative to this endpoint. - - This method is named to match the HTTP method. This method makes at least - one roundtrip to the server, one additional round trip for - each 303 status returned, plus at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method takes a - default namespace from the :class:`Service` object for this :class:`Endpoint`. - All other keyword arguments are included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Service`` is not logged in. - :raises HTTPError: Raised when an error in the request occurs. - :param path_segment: A path segment relative to this endpoint. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - import splunklib.client - s = client.service(...) - apps = s.apps - apps.post(name='boris') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '2908'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 18:34:50 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'Created', - 'status': 201} - apps.get('nonexistant/path') # raises HTTPError - s.logout() - apps.get() # raises AuthenticationError - """ - if path_segment.startswith('/'): - path = path_segment - else: - if not self.path.endswith('/') and path_segment != "": - self.path = self.path + '/' - path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) - - # Get the API version from the path - api_version = self.get_api_version(path) - - # Search API v2+ fallback to v1: - # - In v2+, /results_preview, /events and /results do not support search params. - # - Fallback from v2+ to v1 if Splunk Version is < 9. - # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): - # path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - if api_version == 1: - if isinstance(path, UrlEncoded): - path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) - else: - path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) - - -# kwargs: path, app, owner, sharing, state -class Entity(Endpoint): - """This class is a base class for Splunk entities in the REST API, such as - saved searches, jobs, indexes, and inputs. - - ``Entity`` provides the majority of functionality required by entities. - Subclasses only implement the special cases for individual entities. - For example for saved searches, the subclass makes fields like ``action.email``, - ``alert_type``, and ``search`` available. - - An ``Entity`` is addressed like a dictionary, with a few extensions, - so the following all work, for example in saved searches:: - - ent['action.email'] - ent['alert_type'] - ent['search'] - - You can also access the fields as though they were the fields of a Python - object, as in:: - - ent.alert_type - ent.search - - However, because some of the field names are not valid Python identifiers, - the dictionary-like syntax is preferable. - - The state of an :class:`Entity` object is cached, so accessing a field - does not contact the server. If you think the values on the - server have changed, call the :meth:`Entity.refresh` method. - """ - # Not every endpoint in the API is an Entity or a Collection. For - # example, a saved search at saved/searches/{name} has an additional - # method saved/searches/{name}/scheduled_times, but this isn't an - # entity in its own right. In these cases, subclasses should - # implement a method that uses the get and post methods inherited - # from Endpoint, calls the _load_atom function (it's elsewhere in - # client.py, but not a method of any object) to read the - # information, and returns the extracted data in a Pythonesque form. - # - # The primary use of subclasses of Entity is to handle specially - # named fields in the Entity. If you only need to provide a default - # value for an optional field, subclass Entity and define a - # dictionary ``defaults``. For instance,:: - # - # class Hypothetical(Entity): - # defaults = {'anOptionalField': 'foo', - # 'anotherField': 'bar'} - # - # If you have to do more than provide a default, such as rename or - # actually process values, then define a new method with the - # ``@property`` decorator. - # - # class Hypothetical(Entity): - # @property - # def foobar(self): - # return self.content['foo'] + "-" + self.content["bar"] - - # Subclasses can override defaults the default values for - # optional fields. See above. - defaults = {} - - def __init__(self, service, path, **kwargs): - Endpoint.__init__(self, service, path) - self._state = None - if not kwargs.get('skip_refresh', False): - self.refresh(kwargs.get('state', None)) # "Prefresh" - - def __contains__(self, item): - try: - self[item] - return True - except (KeyError, AttributeError): - return False - - def __eq__(self, other): - """Raises IncomparableException. - - Since Entity objects are snapshots of times on the server, no - simple definition of equality will suffice beyond instance - equality, and instance equality leads to strange situations - such as:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - x = saved_searches['asearch'] - - but then ``x != saved_searches['asearch']``. - - whether or not there was a change on the server. Rather than - try to do something fancy, we simply declare that equality is - undefined for Entities. - - Makes no roundtrips to the server. - """ - raise IncomparableException(f"Equality is undefined for objects of class {self.__class__.__name__}") - - def __getattr__(self, key): - # Called when an attribute was not found by the normal method. In this - # case we try to find it in self.content and then self.defaults. - if key in self.state.content: - return self.state.content[key] - if key in self.defaults: - return self.defaults[key] - raise AttributeError(key) - - def __getitem__(self, key): - # getattr attempts to find a field on the object in the normal way, - # then calls __getattr__ if it cannot. - return getattr(self, key) - - # Load the Atom entry record from the given response - this is a method - # because the "entry" record varies slightly by entity and this allows - # for a subclass to override and handle any special cases. - def _load_atom_entry(self, response): - elem = _load_atom(response, XNAME_ENTRY) - if isinstance(elem, list): - apps = [ele.entry.content.get('eai:appName') for ele in elem] - - raise AmbiguousReferenceException( - f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") - return elem.entry - - # Load the entity state record from the given response - def _load_state(self, response): - entry = self._load_atom_entry(response) - return _parse_atom_entry(entry) - - def _run_action(self, path_segment, **kwargs): - """Run a method and return the content Record from the returned XML. - - A method is a relative path from an Entity that is not itself - an Entity. _run_action assumes that the returned XML is an - Atom field containing one Entry, and the contents of Entry is - what should be the return value. This is right in enough cases - to make this method useful. - """ - response = self.get(path_segment, **kwargs) - data = self._load_atom_entry(response) - rec = _parse_atom_entry(data) - return rec.content - - def _proper_namespace(self, owner=None, app=None, sharing=None): - """Produce a namespace sans wildcards for use in entity requests. - - This method tries to fill in the fields of the namespace which are `None` - or wildcard (`'-'`) from the entity's namespace. If that fails, it uses - the service's namespace. - - :param owner: - :param app: - :param sharing: - :return: - """ - if owner is None and app is None and sharing is None: # No namespace provided - if self._state is not None and 'access' in self._state: - return (self._state.access.owner, - self._state.access.app, - self._state.access.sharing) - return (self.service.namespace['owner'], - self.service.namespace['app'], - self.service.namespace['sharing']) - return owner, app, sharing - - def delete(self): - owner, app, sharing = self._proper_namespace() - return self.service.delete(self.path, owner=owner, app=app, sharing=sharing) - - def get(self, path_segment="", owner=None, app=None, sharing=None, **query): - owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super().get(path_segment, owner=owner, app=app, sharing=sharing, **query) - - def post(self, path_segment="", owner=None, app=None, sharing=None, **query): - owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super().post(path_segment, owner=owner, app=app, sharing=sharing, **query) - - def refresh(self, state=None): - """Refreshes the state of this entity. - - If *state* is provided, load it as the new state for this - entity. Otherwise, make a roundtrip to the server (by calling - the :meth:`read` method of ``self``) to fetch an updated state, - plus at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param state: Entity-specific arguments (optional). - :type state: ``dict`` - :raises EntityDeletedException: Raised if the entity no longer exists on - the server. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - search = s.apps['search'] - search.refresh() - """ - if state is not None: - self._state = state - else: - self._state = self.read(self.get()) - return self - - @property - def access(self): - """Returns the access metadata for this entity. - - :return: A :class:`splunklib.data.Record` object with three keys: - ``owner``, ``app``, and ``sharing``. - """ - return self.state.access - - @property - def content(self): - """Returns the contents of the entity. - - :return: A ``dict`` containing values. - """ - return self.state.content - - def disable(self): - """Disables the entity at this endpoint.""" - self.post("disable") - return self - - def enable(self): - """Enables the entity at this endpoint.""" - self.post("enable") - return self - - @property - def fields(self): - """Returns the content metadata for this entity. - - :return: A :class:`splunklib.data.Record` object with three keys: - ``required``, ``optional``, and ``wildcard``. - """ - return self.state.fields - - @property - def links(self): - """Returns a dictionary of related resources. - - :return: A ``dict`` with keys and corresponding URLs. - """ - return self.state.links - - @property - def name(self): - """Returns the entity name. - - :return: The entity name. - :rtype: ``string`` - """ - return self.state.title - - def read(self, response): - """ Reads the current state of the entity from the server. """ - results = self._load_state(response) - # In lower layers of the SDK, we end up trying to URL encode - # text to be dispatched via HTTP. However, these links are already - # URL encoded when they arrive, and we need to mark them as such. - unquoted_links = dict((k, UrlEncoded(v, skip_encode=True)) - for k, v in list(results['links'].items())) - results['links'] = unquoted_links - return results - - def reload(self): - """Reloads the entity.""" - self.post("_reload") - return self - - def acl_update(self, **kwargs): - """To update Access Control List (ACL) properties for an endpoint. - - :param kwargs: Additional entity-specific arguments (required). - - - "owner" (``string``): The Splunk username, such as "admin". A value of "nobody" means no specific user (required). - - - "sharing" (``string``): A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system" (required). - - :type kwargs: ``dict`` - - **Example**:: - - import splunklib.client as client - service = client.connect(...) - saved_search = service.saved_searches["name"] - saved_search.acl_update(sharing="app", owner="nobody", app="search", **{"perms.read": "admin, nobody"}) - """ - if "body" not in kwargs: - kwargs = {"body": kwargs} - - if "sharing" not in kwargs["body"]: - raise ValueError("Required argument 'sharing' is missing.") - if "owner" not in kwargs["body"]: - raise ValueError("Required argument 'owner' is missing.") - - self.post("acl", **kwargs) - self.refresh() - return self - - @property - def state(self): - """Returns the entity's state record. - - :return: A ``dict`` containing fields and metadata for the entity. - """ - if self._state is None: self.refresh() - return self._state - - def update(self, **kwargs): - """Updates the server with any changes you've made to the current entity - along with any additional arguments you specify. - - **Note**: You cannot update the ``name`` field of an entity. - - Many of the fields in the REST API are not valid Python - identifiers, which means you cannot pass them as keyword - arguments. That is, Python will fail to parse the following:: - - # This fails - x.update(check-new=False, email.to='boris@utopia.net') - - However, you can always explicitly use a dictionary to pass - such keys:: - - # This works - x.update(**{'check-new': False, 'email.to': 'boris@utopia.net'}) - - :param kwargs: Additional entity-specific arguments (optional). - :type kwargs: ``dict`` - - :return: The entity this method is called on. - :rtype: class:`Entity` - """ - # The peculiarity in question: the REST API creates a new - # Entity if we pass name in the dictionary, instead of the - # expected behavior of updating this Entity. Therefore, we - # check for 'name' in kwargs and throw an error if it is - # there. - if 'name' in kwargs: - raise IllegalOperationException('Cannot update the name of an Entity via the REST API.') - self.post(**kwargs) - return self - - -class ReadOnlyCollection(Endpoint): - """This class represents a read-only collection of entities in the Splunk - instance. - """ - - def __init__(self, service, path, item=Entity): - Endpoint.__init__(self, service, path) - self.item = item # Item accessor - self.null_count = -1 - - def __contains__(self, name): - """Is there at least one entry called *name* in this collection? - - Makes a single roundtrip to the server, plus at most two more - if - the ``autologin`` field of :func:`connect` is set to ``True``. - """ - try: - self[name] - return True - except KeyError: - return False - except AmbiguousReferenceException: - return True - - def __getitem__(self, key): - """Fetch an item named *key* from this collection. - - A name is not a unique identifier in a collection. The unique - identifier is a name plus a namespace. For example, there can - be a saved search named ``'mysearch'`` with sharing ``'app'`` - in application ``'search'``, and another with sharing - ``'user'`` with owner ``'boris'`` and application - ``'search'``. If the ``Collection`` is attached to a - ``Service`` that has ``'-'`` (wildcard) as user and app in its - namespace, then both of these may be visible under the same - name. - - Where there is no conflict, ``__getitem__`` will fetch the - entity given just the name. If there is a conflict, and you - pass just a name, it will raise a ``ValueError``. In that - case, add the namespace as a second argument. - - This function makes a single roundtrip to the server, plus at - most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param key: The name to fetch, or a tuple (name, namespace). - :return: An :class:`Entity` object. - :raises KeyError: Raised if *key* does not exist. - :raises ValueError: Raised if no namespace is specified and *key* - does not refer to a unique name. - - **Example**:: - - s = client.connect(...) - saved_searches = s.saved_searches - x1 = saved_searches.create( - 'mysearch', 'search * | head 1', - owner='admin', app='search', sharing='app') - x2 = saved_searches.create( - 'mysearch', 'search * | head 1', - owner='admin', app='search', sharing='user') - # Raises ValueError: - saved_searches['mysearch'] - # Fetches x1 - saved_searches[ - 'mysearch', - client.namespace(sharing='app', app='search')] - # Fetches x2 - saved_searches[ - 'mysearch', - client.namespace(sharing='user', owner='boris', app='search')] - """ - try: - if isinstance(key, tuple) and len(key) == 2: - # x[a,b] is translated to x.__getitem__( (a,b) ), so we - # have to extract values out. - key, ns = key - key = UrlEncoded(key, encode_slash=True) - response = self.get(key, owner=ns.owner, app=ns.app) - else: - key = UrlEncoded(key, encode_slash=True) - response = self.get(key) - entries = self._load_list(response) - if len(entries) > 1: - raise AmbiguousReferenceException( - f"Found multiple entities named '{key}'; please specify a namespace.") - if len(entries) == 0: - raise KeyError(key) - return entries[0] - except HTTPError as he: - if he.status == 404: # No entity matching key and namespace. - raise KeyError(key) - else: - raise - - def __iter__(self, **kwargs): - """Iterate over the entities in the collection. - - :param kwargs: Additional arguments. - :type kwargs: ``dict`` - :rtype: iterator over entities. - - Implemented to give Collection a listish interface. This - function always makes a roundtrip to the server, plus at most - two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - for entity in saved_searches: - print(f"Saved search named {entity.name}") - """ - - for item in self.iter(**kwargs): - yield item - - def __len__(self): - """Enable ``len(...)`` for ``Collection`` objects. - - Implemented for consistency with a listish interface. No - further failure modes beyond those possible for any method on - an Endpoint. - - This function always makes a round trip to the server, plus at - most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - n = len(saved_searches) - """ - return len(self.list()) - - def _entity_path(self, state): - """Calculate the path to an entity to be returned. - - *state* should be the dictionary returned by - :func:`_parse_atom_entry`. :func:`_entity_path` extracts the - link to this entity from *state*, and strips all the namespace - prefixes from it to leave only the relative path of the entity - itself, sans namespace. - - :rtype: ``string`` - :return: an absolute path - """ - # This has been factored out so that it can be easily - # overloaded by Configurations, which has to switch its - # entities' endpoints from its own properties/ to configs/. - raw_path = parse.unquote(state.links.alternate) - if 'servicesNS/' in raw_path: - return _trailing(raw_path, 'servicesNS/', '/', '/') - if 'services/' in raw_path: - return _trailing(raw_path, 'services/') - return raw_path - - def _load_list(self, response): - """Converts *response* to a list of entities. - - *response* is assumed to be a :class:`Record` containing an - HTTP response, of the form:: - - {'status': 200, - 'headers': [('content-length', '232642'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Tue, 29 May 2012 15:27:08 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'body': ...a stream implementing .read()...} - - The ``'body'`` key refers to a stream containing an Atom feed, - that is, an XML document with a toplevel element ````, - and within that element one or more ```` elements. - """ - # Some subclasses of Collection have to override this because - # splunkd returns something that doesn't match - # . - entries = _load_atom_entries(response) - if entries is None: return [] - entities = [] - for entry in entries: - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - self._entity_path(state), - state=state) - entities.append(entity) - - return entities - - def itemmeta(self): - """Returns metadata for members of the collection. - - Makes a single roundtrip to the server, plus two more at most if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :return: A :class:`splunklib.data.Record` object containing the metadata. - - **Example**:: - - import splunklib.client as client - import pprint - s = client.connect(...) - pprint.pprint(s.apps.itemmeta()) - {'access': {'app': 'search', - 'can_change_perms': '1', - 'can_list': '1', - 'can_share_app': '1', - 'can_share_global': '1', - 'can_share_user': '1', - 'can_write': '1', - 'modifiable': '1', - 'owner': 'admin', - 'perms': {'read': ['*'], 'write': ['admin']}, - 'removable': '0', - 'sharing': 'user'}, - 'fields': {'optional': ['author', - 'configured', - 'description', - 'label', - 'manageable', - 'template', - 'visible'], - 'required': ['name'], 'wildcard': []}} - """ - response = self.get("_new") - content = _load_atom(response, MATCH_ENTRY_CONTENT) - return _parse_atom_metadata(content) - - def iter(self, offset=0, count=None, pagesize=None, **kwargs): - """Iterates over the collection. - - This method is equivalent to the :meth:`list` method, but - it returns an iterator and can load a certain number of entities at a - time from the server. - - :param offset: The index of the first entity to return (optional). - :type offset: ``integer`` - :param count: The maximum number of entities to return (optional). - :type count: ``integer`` - :param pagesize: The number of entities to load (optional). - :type pagesize: ``integer`` - :param kwargs: Additional arguments (optional): - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - for saved_search in s.saved_searches.iter(pagesize=10): - # Loads 10 saved searches at a time from the - # server. - ... - """ - assert pagesize is None or pagesize > 0 - if count is None: - count = self.null_count - fetched = 0 - while count == self.null_count or fetched < count: - response = self.get(count=pagesize or count, offset=offset, **kwargs) - items = self._load_list(response) - N = len(items) - fetched += N - for item in items: - yield item - if pagesize is None or N < pagesize: - break - offset += N - logger.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) - - # kwargs: count, offset, search, sort_dir, sort_key, sort_mode - def list(self, count=None, **kwargs): - """Retrieves a list of entities in this collection. - - The entire collection is loaded at once and is returned as a list. This - function makes a single roundtrip to the server, plus at most two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - There is no caching--every call makes at least one round trip. - - :param count: The maximum number of entities to return (optional). - :type count: ``integer`` - :param kwargs: Additional arguments (optional): - - - "offset" (``integer``): The offset of the first item to return. - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - :return: A ``list`` of entities. - """ - # response = self.get(count=count, **kwargs) - # return self._load_list(response) - return list(self.iter(count=count, **kwargs)) - - -class Collection(ReadOnlyCollection): - """A collection of entities. - - Splunk provides a number of different collections of distinct - entity types: applications, saved searches, fired alerts, and a - number of others. Each particular type is available separately - from the Splunk instance, and the entities of that type are - returned in a :class:`Collection`. - - The interface for :class:`Collection` does not quite match either - ``list`` or ``dict`` in Python, because there are enough semantic - mismatches with either to make its behavior surprising. A unique - element in a :class:`Collection` is defined by a string giving its - name plus namespace (although the namespace is optional if the name is - unique). - - **Example**:: - - import splunklib.client as client - service = client.connect(...) - mycollection = service.saved_searches - mysearch = mycollection['my_search', client.namespace(owner='boris', app='natasha', sharing='user')] - # Or if there is only one search visible named 'my_search' - mysearch = mycollection['my_search'] - - Similarly, ``name`` in ``mycollection`` works as you might expect (though - you cannot currently pass a namespace to the ``in`` operator), as does - ``len(mycollection)``. - - However, as an aggregate, :class:`Collection` behaves more like a - list. If you iterate over a :class:`Collection`, you get an - iterator over the entities, not the names and namespaces. - - **Example**:: - - for entity in mycollection: - assert isinstance(entity, client.Entity) - - Use the :meth:`create` and :meth:`delete` methods to create and delete - entities in this collection. To view the access control list and other - metadata of the collection, use the :meth:`ReadOnlyCollection.itemmeta` method. - - :class:`Collection` does no caching. Each call makes at least one - round trip to the server to fetch data. - """ - - def create(self, name, **params): - """Creates a new entity in this collection. - - This function makes either one or two roundtrips to the - server, depending on the type of entities in this - collection, plus at most two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param name: The name of the entity to create. - :type name: ``string`` - :param namespace: A namespace, as created by the :func:`splunklib.binding.namespace` - function (optional). You can also set ``owner``, ``app``, and - ``sharing`` in ``params``. - :type namespace: A :class:`splunklib.data.Record` object with keys ``owner``, ``app``, - and ``sharing``. - :param params: Additional entity-specific arguments (optional). - :type params: ``dict`` - :return: The new entity. - :rtype: A subclass of :class:`Entity`, chosen by :meth:`Collection.self.item`. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - applications = s.apps - new_app = applications.create("my_fake_app") - """ - if not isinstance(name, str): - raise InvalidNameException(f"{name} is not a valid name for an entity.") - if 'namespace' in params: - namespace = params.pop('namespace') - params['owner'] = namespace.owner - params['app'] = namespace.app - params['sharing'] = namespace.sharing - response = self.post(name=name, **params) - atom = _load_atom(response, XNAME_ENTRY) - if atom is None: - # This endpoint doesn't return the content of the new - # item. We have to go fetch it ourselves. - return self[name] - entry = atom.entry - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - self._entity_path(state), - state=state) - return entity - - def delete(self, name, **params): - """Deletes a specified entity from the collection. - - :param name: The name of the entity to delete. - :type name: ``string`` - :return: The collection. - :rtype: ``self`` - - This method is implemented for consistency with the REST API's DELETE - method. - - If there is no *name* entity on the server, a ``KeyError`` is - thrown. This function always makes a roundtrip to the server. - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - saved_searches.create('my_saved_search', - 'search * | head 1') - assert 'my_saved_search' in saved_searches - saved_searches.delete('my_saved_search') - assert 'my_saved_search' not in saved_searches - """ - name = UrlEncoded(name, encode_slash=True) - if 'namespace' in params: - namespace = params.pop('namespace') - params['owner'] = namespace.owner - params['app'] = namespace.app - params['sharing'] = namespace.sharing - try: - self.service.delete(_path(self.path, name), **params) - except HTTPError as he: - # An HTTPError with status code 404 means that the entity - # has already been deleted, and we reraise it as a - # KeyError. - if he.status == 404: - raise KeyError(f"No such entity {name}") - else: - raise - return self - - def get(self, name="", owner=None, app=None, sharing=None, **query): - """Performs a GET request to the server on the collection. - - If *owner*, *app*, and *sharing* are omitted, this method takes a - default namespace from the :class:`Service` object for this :class:`Endpoint`. - All other keyword arguments are included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Service`` is not logged in. - :raises HTTPError: Raised when an error in the request occurs. - :param path_segment: A path segment relative to this endpoint. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (optional). - :type sharing: "global", "system", "app", or "user" - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - import splunklib.client - s = client.service(...) - saved_searches = s.saved_searches - saved_searches.get("my/saved/search") == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '26208'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:30:35 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - saved_searches.get('nonexistant/search') # raises HTTPError - s.logout() - saved_searches.get() # raises AuthenticationError - - """ - name = UrlEncoded(name, encode_slash=True) - return super().get(name, owner, app, sharing, **query) - - -class ConfigurationFile(Collection): - """This class contains all of the stanzas from one configuration file. - """ - - # __init__'s arguments must match those of an Entity, not a - # Collection, since it is being created as the elements of a - # Configurations, which is a Collection subclass. - def __init__(self, service, path, **kwargs): - Collection.__init__(self, service, path, item=Stanza) - self.name = kwargs['state']['title'] - - -class Configurations(Collection): - """This class provides access to the configuration files from this Splunk - instance. Retrieve this collection using :meth:`Service.confs`. - - Splunk's configuration is divided into files, and each file into - stanzas. This collection is unusual in that the values in it are - themselves collections of :class:`ConfigurationFile` objects. - """ - - def __init__(self, service): - Collection.__init__(self, service, PATH_PROPERTIES, item=ConfigurationFile) - if self.service.namespace.owner == '-' or self.service.namespace.app == '-': - raise ValueError("Configurations cannot have wildcards in namespace.") - - def __getitem__(self, key): - # The superclass implementation is designed for collections that contain - # entities. This collection (Configurations) contains collections - # (ConfigurationFile). - # - # The configurations endpoint returns multiple entities when we ask for a single file. - # This screws up the default implementation of __getitem__ from Collection, which thinks - # that multiple entities means a name collision, so we have to override it here. - try: - self.get(key) - return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) - except HTTPError as he: - if he.status == 404: # No entity matching key - raise KeyError(key) - else: - raise - - def __contains__(self, key): - # configs/conf-{name} never returns a 404. We have to post to properties/{name} - # in order to find out if a configuration exists. - try: - self.get(key) - return True - except HTTPError as he: - if he.status == 404: # No entity matching key - return False - raise - - def create(self, name): - """ Creates a configuration file named *name*. - - If there is already a configuration file with that name, - the existing file is returned. - - :param name: The name of the configuration file. - :type name: ``string`` - - :return: The :class:`ConfigurationFile` object. - """ - # This has to be overridden to handle the plumbing of creating - # a ConfigurationFile (which is a Collection) instead of some - # Entity. - if not isinstance(name, str): - raise ValueError(f"Invalid name: {repr(name)}") - response = self.post(__conf=name) - if response.status == 303: - return self[name] - if response.status == 201: - return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) - raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") - - def delete(self, key): - """Raises `IllegalOperationException`.""" - raise IllegalOperationException("Cannot delete configuration files from the REST API.") - - def _entity_path(self, state): - # Overridden to make all the ConfigurationFile objects - # returned refer to the configs/ path instead of the - # properties/ path used by Configrations. - return PATH_CONF % state['title'] - - -class Stanza(Entity): - """This class contains a single configuration stanza.""" - - def submit(self, stanza): - """Adds keys to the current configuration stanza as a - dictionary of key-value pairs. - - :param stanza: A dictionary of key-value pairs for the stanza. - :type stanza: ``dict`` - :return: The :class:`Stanza` object. - """ - body = _encode(**stanza) - self.service.post(self.path, body=body) - return self - - def __len__(self): - # The stanza endpoint returns all the keys at the same level in the XML as the eai information - # and 'disabled', so to get an accurate length, we have to filter those out and have just - # the stanza keys. - return len([x for x in list(self._state.content.keys()) - if not x.startswith('eai') and x != 'disabled']) - - -class StoragePassword(Entity): - """This class contains a storage password. - """ - - def __init__(self, service, path, **kwargs): - state = kwargs.get('state', None) - kwargs['skip_refresh'] = kwargs.get('skip_refresh', state is not None) - super().__init__(service, path, **kwargs) - self._state = state - - @property - def clear_password(self): - return self.content.get('clear_password') - - @property - def encrypted_password(self): - return self.content.get('encr_password') - - @property - def realm(self): - return self.content.get('realm') - - @property - def username(self): - return self.content.get('username') - - -class StoragePasswords(Collection): - """This class provides access to the storage passwords from this Splunk - instance. Retrieve this collection using :meth:`Service.storage_passwords`. - """ - - def __init__(self, service): - if service.namespace.owner == '-' or service.namespace.app == '-': - raise ValueError("StoragePasswords cannot have wildcards in namespace.") - super().__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) - - def create(self, password, username, realm=None): - """ Creates a storage password. - - A `StoragePassword` can be identified by , or by : if the - optional realm parameter is also provided. - - :param password: The password for the credentials - this is the only part of the credentials that will be stored securely. - :type name: ``string`` - :param username: The username for the credentials. - :type name: ``string`` - :param realm: The credential realm. (optional) - :type name: ``string`` - - :return: The :class:`StoragePassword` object created. - """ - if not isinstance(username, str): - raise ValueError(f"Invalid name: {repr(username)}") - - if realm is None: - response = self.post(password=password, name=username) - else: - response = self.post(password=password, realm=realm, name=username) - - if response.status != 201: - raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") - - entries = _load_atom_entries(response) - state = _parse_atom_entry(entries[0]) - storage_password = StoragePassword(self.service, self._entity_path(state), state=state, skip_refresh=True) - - return storage_password - - def delete(self, username, realm=None): - """Delete a storage password by username and/or realm. - - The identifier can be passed in through the username parameter as - or :, but the preferred way is by - passing in the username and realm parameters. - - :param username: The username for the credentials, or : if the realm parameter is omitted. - :type name: ``string`` - :param realm: The credential realm. (optional) - :type name: ``string`` - :return: The `StoragePassword` collection. - :rtype: ``self`` - """ - if realm is None: - # This case makes the username optional, so - # the full name can be passed in as realm. - # Assume it's already encoded. - name = username - else: - # Encode each component separately - name = UrlEncoded(realm, encode_slash=True) + ":" + UrlEncoded(username, encode_slash=True) - - # Append the : expected at the end of the name - if name[-1] != ":": - name = name + ":" - return Collection.delete(self, name) - - -class AlertGroup(Entity): - """This class represents a group of fired alerts for a saved search. Access - it using the :meth:`alerts` property.""" - - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - def __len__(self): - return self.count - - @property - def alerts(self): - """Returns a collection of triggered alerts. - - :return: A :class:`Collection` of triggered alerts. - """ - return Collection(self.service, self.path) - - @property - def count(self): - """Returns the count of triggered alerts. - - :return: The triggered alert count. - :rtype: ``integer`` - """ - return int(self.content.get('triggered_alert_count', 0)) - - -class Indexes(Collection): - """This class contains the collection of indexes in this Splunk instance. - Retrieve this collection using :meth:`Service.indexes`. - """ - - def get_default(self): - """ Returns the name of the default index. - - :return: The name of the default index. - - """ - index = self['_audit'] - return index['defaultDatabase'] - - def delete(self, name): - """ Deletes a given index. - - **Note**: This method is only supported in Splunk 5.0 and later. - - :param name: The name of the index to delete. - :type name: ``string`` - """ - if self.service.splunk_version >= (5,): - Collection.delete(self, name) - else: - raise IllegalOperationException("Deleting indexes via the REST API is " - "not supported before Splunk version 5.") - - -class Index(Entity): - """This class represents an index and provides different operations, such as - cleaning the index, writing to the index, and so forth.""" - - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - def attach(self, host=None, source=None, sourcetype=None): - """Opens a stream (a writable socket) for writing events to the index. - - :param host: The host value for events written to the stream. - :type host: ``string`` - :param source: The source value for events written to the stream. - :type source: ``string`` - :param sourcetype: The sourcetype value for events written to the - stream. - :type sourcetype: ``string`` - - :return: A writable socket. - """ - args = {'index': self.name} - if host is not None: args['host'] = host - if source is not None: args['source'] = source - if sourcetype is not None: args['sourcetype'] = sourcetype - path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + parse.urlencode(args), skip_encode=True) - - cookie_header = self.service.token if self.service.token is _NoAuthenticationToken else self.service.token.replace("Splunk ", "") - cookie_or_auth_header = f"Authorization: Splunk {cookie_header}\r\n" - - # If we have cookie(s), use them instead of "Authorization: ..." - if self.service.has_cookies(): - cookie_header = _make_cookie_header(list(self.service.get_cookies().items())) - cookie_or_auth_header = f"Cookie: {cookie_header}\r\n" - - # Since we need to stream to the index connection, we have to keep - # the connection open and use the Splunk extension headers to note - # the input mode - sock = self.service.connect() - headers = [f"POST {str(self.service._abspath(path))} HTTP/1.1\r\n".encode('utf-8'), - f"Host: {self.service.host}:{int(self.service.port)}\r\n".encode('utf-8'), - b"Accept-Encoding: identity\r\n", - cookie_or_auth_header.encode('utf-8'), - b"X-Splunk-Input-Mode: Streaming\r\n", - b"\r\n"] - - for h in headers: - sock.write(h) - return sock - - @contextlib.contextmanager - def attached_socket(self, *args, **kwargs): - """Opens a raw socket in a ``with`` block to write data to Splunk. - - The arguments are identical to those for :meth:`attach`. The socket is - automatically closed at the end of the ``with`` block, even if an - exception is raised in the block. - - :param host: The host value for events written to the stream. - :type host: ``string`` - :param source: The source value for events written to the stream. - :type source: ``string`` - :param sourcetype: The sourcetype value for events written to the - stream. - :type sourcetype: ``string`` - - :returns: Nothing. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - index = s.indexes['some_index'] - with index.attached_socket(sourcetype='test') as sock: - sock.send('Test event\\r\\n') - - """ - try: - sock = self.attach(*args, **kwargs) - yield sock - finally: - sock.shutdown(socket.SHUT_RDWR) - sock.close() - - def clean(self, timeout=60): - """Deletes the contents of the index. - - This method blocks until the index is empty, because it needs to restore - values at the end of the operation. - - :param timeout: The time-out period for the operation, in seconds (the - default is 60). - :type timeout: ``integer`` - - :return: The :class:`Index`. - """ - self.refresh() - - tds = self['maxTotalDataSizeMB'] - ftp = self['frozenTimePeriodInSecs'] - was_disabled_initially = self.disabled - try: - if not was_disabled_initially and self.service.splunk_version < (5,): - # Need to disable the index first on Splunk 4.x, - # but it doesn't work to disable it on 5.0. - self.disable() - self.update(maxTotalDataSizeMB=1, frozenTimePeriodInSecs=1) - self.roll_hot_buckets() - - # Wait until event count goes to 0. - start = datetime.now() - diff = timedelta(seconds=timeout) - while self.content.totalEventCount != '0' and datetime.now() < start + diff: - sleep(1) - self.refresh() - - if self.content.totalEventCount != '0': - raise OperationError( - f"Cleaning index {self.name} took longer than {timeout} seconds; timing out.") - finally: - # Restore original values - self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) - if not was_disabled_initially and self.service.splunk_version < (5,): - # Re-enable the index if it was originally enabled and we messed with it. - self.enable() - - return self - - def roll_hot_buckets(self): - """Performs rolling hot buckets for this index. - - :return: The :class:`Index`. - """ - self.post("roll-hot-buckets") - return self - - def submit(self, event, host=None, source=None, sourcetype=None): - """Submits a single event to the index using ``HTTP POST``. - - :param event: The event to submit. - :type event: ``string`` - :param `host`: The host value of the event. - :type host: ``string`` - :param `source`: The source value of the event. - :type source: ``string`` - :param `sourcetype`: The sourcetype value of the event. - :type sourcetype: ``string`` - - :return: The :class:`Index`. - """ - args = {'index': self.name} - if host is not None: args['host'] = host - if source is not None: args['source'] = source - if sourcetype is not None: args['sourcetype'] = sourcetype - - self.service.post(PATH_RECEIVERS_SIMPLE, body=event, **args) - return self - - # kwargs: host, host_regex, host_segment, rename-source, sourcetype - def upload(self, filename, **kwargs): - """Uploads a file for immediate indexing. - - **Note**: The file must be locally accessible from the server. - - :param filename: The name of the file to upload. The file can be a - plain, compressed, or archived file. - :type filename: ``string`` - :param kwargs: Additional arguments (optional). For more about the - available parameters, see `Index parameters `_ on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The :class:`Index`. - """ - kwargs['index'] = self.name - path = 'data/inputs/oneshot' - self.service.post(path, name=filename, **kwargs) - return self - - -class Input(Entity): - """This class represents a Splunk input. This class is the base for all - typed input classes and is also used when the client does not recognize an - input kind. - """ - - def __init__(self, service, path, kind=None, **kwargs): - # kind can be omitted (in which case it is inferred from the path) - # Otherwise, valid values are the paths from data/inputs ("udp", - # "monitor", "tcp/raw"), or two special cases: "tcp" (which is "tcp/raw") - # and "splunktcp" (which is "tcp/cooked"). - Entity.__init__(self, service, path, **kwargs) - if kind is None: - path_segments = path.split('/') - i = path_segments.index('inputs') + 1 - if path_segments[i] == 'tcp': - self.kind = path_segments[i] + '/' + path_segments[i + 1] - else: - self.kind = path_segments[i] - else: - self.kind = kind - - # Handle old input kind names. - if self.kind == 'tcp': - self.kind = 'tcp/raw' - if self.kind == 'splunktcp': - self.kind = 'tcp/cooked' - - def update(self, **kwargs): - """Updates the server with any changes you've made to the current input - along with any additional arguments you specify. - - :param kwargs: Additional arguments (optional). For more about the - available parameters, see `Input parameters `_ on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The input this method was called on. - :rtype: class:`Input` - """ - # UDP and TCP inputs require special handling due to their restrictToHost - # field. For all other inputs kinds, we can dispatch to the superclass method. - if self.kind not in ['tcp', 'splunktcp', 'tcp/raw', 'tcp/cooked', 'udp']: - return super().update(**kwargs) - else: - # The behavior of restrictToHost is inconsistent across input kinds and versions of Splunk. - # In Splunk 4.x, the name of the entity is only the port, independent of the value of - # restrictToHost. In Splunk 5.0 this changed so the name will be of the form :. - # In 5.0 and 5.0.1, if you don't supply the restrictToHost value on every update, it will - # remove the host restriction from the input. As of 5.0.2 you simply can't change restrictToHost - # on an existing input. - - # The logic to handle all these cases: - # - Throw an exception if the user tries to set restrictToHost on an existing input - # for *any* version of Splunk. - # - Set the existing restrictToHost value on the update args internally so we don't - # cause it to change in Splunk 5.0 and 5.0.1. - to_update = kwargs.copy() - - if 'restrictToHost' in kwargs: - raise IllegalOperationException("Cannot set restrictToHost on an existing input with the SDK.") - if 'restrictToHost' in self._state.content and self.kind != 'udp': - to_update['restrictToHost'] = self._state.content['restrictToHost'] - - # Do the actual update operation. - return super().update(**to_update) - - -# Inputs is a "kinded" collection, which is a heterogenous collection where -# each item is tagged with a kind, that provides a single merged view of all -# input kinds. -class Inputs(Collection): - """This class represents a collection of inputs. The collection is - heterogeneous and each member of the collection contains a *kind* property - that indicates the specific type of input. - Retrieve this collection using :meth:`Service.inputs`.""" - - def __init__(self, service, kindmap=None): - Collection.__init__(self, service, PATH_INPUTS, item=Input) - - def __getitem__(self, key): - # The key needed to retrieve the input needs it's parenthesis to be URL encoded - # based on the REST API for input - # - if isinstance(key, tuple) and len(key) == 2: - # Fetch a single kind - key, kind = key - key = UrlEncoded(key, encode_slash=True) - try: - response = self.get(self.kindpath(kind) + "/" + key) - entries = self._load_list(response) - if len(entries) > 1: - raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") - if len(entries) == 0: - raise KeyError((key, kind)) - return entries[0] - except HTTPError as he: - if he.status == 404: # No entity matching kind and key - raise KeyError((key, kind)) - else: - raise - else: - # Iterate over all the kinds looking for matches. - kind = None - candidate = None - key = UrlEncoded(key, encode_slash=True) - for kind in self.kinds: - try: - response = self.get(kind + "/" + key) - entries = self._load_list(response) - if len(entries) > 1: - raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") - if len(entries) == 0: - pass - if candidate is not None: # Already found at least one candidate - raise AmbiguousReferenceException( - f"Found multiple inputs named {key}, please specify a kind") - candidate = entries[0] - except HTTPError as he: - if he.status == 404: - pass # Just carry on to the next kind. - else: - raise - if candidate is None: - raise KeyError(key) # Never found a match. - return candidate - - def __contains__(self, key): - if isinstance(key, tuple) and len(key) == 2: - # If we specify a kind, this will shortcut properly - try: - self.__getitem__(key) - return True - except KeyError: - return False - else: - # Without a kind, we want to minimize the number of round trips to the server, so we - # reimplement some of the behavior of __getitem__ in order to be able to stop searching - # on the first hit. - for kind in self.kinds: - try: - response = self.get(self.kindpath(kind) + "/" + key) - entries = self._load_list(response) - if len(entries) > 0: - return True - except HTTPError as he: - if he.status == 404: - pass # Just carry on to the next kind. - else: - raise - return False - - def create(self, name, kind, **kwargs): - """Creates an input of a specific kind in this collection, with any - arguments you specify. - - :param `name`: The input name. - :type name: ``string`` - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - :param `kwargs`: Additional arguments (optional). For more about the - available parameters, see `Input parameters `_ on Splunk Developer Portal. - - :type kwargs: ``dict`` - - :return: The new :class:`Input`. - """ - kindpath = self.kindpath(kind) - self.post(kindpath, name=name, **kwargs) - - # If we created an input with restrictToHost set, then - # its path will be :, not just , - # and we have to adjust accordingly. - - # Url encodes the name of the entity. - name = UrlEncoded(name, encode_slash=True) - path = _path( - self.path + kindpath, - f"{kwargs['restrictToHost']}:{name}" if 'restrictToHost' in kwargs else name - ) - return Input(self.service, path, kind) - - def delete(self, name, kind=None): - """Removes an input from the collection. - - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - :param name: The name of the input to remove. - :type name: ``string`` - - :return: The :class:`Inputs` collection. - """ - if kind is None: - self.service.delete(self[name].path) - else: - self.service.delete(self[name, kind].path) - return self - - def itemmeta(self, kind): - """Returns metadata for the members of a given kind. - - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - - :return: The metadata. - :rtype: class:``splunklib.data.Record`` - """ - response = self.get(f"{self._kindmap[kind]}/_new") - content = _load_atom(response, MATCH_ENTRY_CONTENT) - return _parse_atom_metadata(content) - - def _get_kind_list(self, subpath=None): - if subpath is None: - subpath = [] - - kinds = [] - response = self.get('/'.join(subpath)) - content = _load_atom_entries(response) - for entry in content: - this_subpath = subpath + [entry.title] - # The "all" endpoint doesn't work yet. - # The "tcp/ssl" endpoint is not a real input collection. - if entry.title == 'all' or this_subpath == ['tcp', 'ssl']: - continue - if 'create' in [x.rel for x in entry.link]: - path = '/'.join(subpath + [entry.title]) - kinds.append(path) - else: - subkinds = self._get_kind_list(subpath + [entry.title]) - kinds.extend(subkinds) - return kinds - - @property - def kinds(self): - """Returns the input kinds on this Splunk instance. - - :return: The list of input kinds. - :rtype: ``list`` - """ - return self._get_kind_list() - - def kindpath(self, kind): - """Returns a path to the resources for a given input kind. - - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - - :return: The relative endpoint path. - :rtype: ``string`` - """ - if kind == 'tcp': - return UrlEncoded('tcp/raw', skip_encode=True) - if kind == 'splunktcp': - return UrlEncoded('tcp/cooked', skip_encode=True) - return UrlEncoded(kind, skip_encode=True) - - def list(self, *kinds, **kwargs): - """Returns a list of inputs that are in the :class:`Inputs` collection. - You can also filter by one or more input kinds. - - This function iterates over all possible inputs, regardless of any arguments you - specify. Because the :class:`Inputs` collection is the union of all the inputs of each - kind, this method implements parameters such as "count", "search", and so - on at the Python level once all the data has been fetched. The exception - is when you specify a single input kind, and then this method makes a single request - with the usual semantics for parameters. - - :param kinds: The input kinds to return (optional). - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kinds: ``string`` - :param kwargs: Additional arguments (optional): - - - "count" (``integer``): The maximum number of items to return. - - - "offset" (``integer``): The offset of the first item to return. - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - - :return: A list of input kinds. - :rtype: ``list`` - """ - if len(kinds) == 0: - kinds = self.kinds - if len(kinds) == 1: - kind = kinds[0] - logger.debug("Inputs.list taking short circuit branch for single kind.") - path = self.kindpath(kind) - logger.debug("Path for inputs: %s", path) - try: - path = UrlEncoded(path, skip_encode=True) - response = self.get(path, **kwargs) - except HTTPError as he: - if he.status == 404: # No inputs of this kind - return [] - entities = [] - entries = _load_atom_entries(response) - if entries is None: - return [] # No inputs in a collection comes back with no feed or entry in the XML - for entry in entries: - state = _parse_atom_entry(entry) - # Unquote the URL, since all URL encoded in the SDK - # should be of type UrlEncoded, and all str should not - # be URL encoded. - path = parse.unquote(state.links.alternate) - entity = Input(self.service, path, kind, state=state) - entities.append(entity) - return entities - - search = kwargs.get('search', '*') - - entities = [] - for kind in kinds: - response = None - try: - kind = UrlEncoded(kind, skip_encode=True) - response = self.get(self.kindpath(kind), search=search) - except HTTPError as e: - if e.status == 404: - continue # No inputs of this kind - else: - raise - - entries = _load_atom_entries(response) - if entries is None: continue # No inputs to process - for entry in entries: - state = _parse_atom_entry(entry) - # Unquote the URL, since all URL encoded in the SDK - # should be of type UrlEncoded, and all str should not - # be URL encoded. - path = parse.unquote(state.links.alternate) - entity = Input(self.service, path, kind, state=state) - entities.append(entity) - if 'offset' in kwargs: - entities = entities[kwargs['offset']:] - if 'count' in kwargs: - entities = entities[:kwargs['count']] - if kwargs.get('sort_mode', None) == 'alpha': - sort_field = kwargs.get('sort_field', 'name') - if sort_field == 'name': - f = lambda x: x.name.lower() - else: - f = lambda x: x[sort_field].lower() - entities = sorted(entities, key=f) - if kwargs.get('sort_mode', None) == 'alpha_case': - sort_field = kwargs.get('sort_field', 'name') - if sort_field == 'name': - f = lambda x: x.name - else: - f = lambda x: x[sort_field] - entities = sorted(entities, key=f) - if kwargs.get('sort_dir', 'asc') == 'desc': - entities = list(reversed(entities)) - return entities - - def __iter__(self, **kwargs): - for item in self.iter(**kwargs): - yield item - - def iter(self, **kwargs): - """ Iterates over the collection of inputs. - - :param kwargs: Additional arguments (optional): - - - "count" (``integer``): The maximum number of items to return. - - - "offset" (``integer``): The offset of the first item to return. - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - """ - for item in self.list(**kwargs): - yield item - - def oneshot(self, path, **kwargs): - """ Creates a oneshot data input, which is an upload of a single file - for one-time indexing. - - :param path: The path and filename. - :type path: ``string`` - :param kwargs: Additional arguments (optional). For more about the - available parameters, see `Input parameters `_ on Splunk Developer Portal. - :type kwargs: ``dict`` - """ - self.post('oneshot', name=path, **kwargs) - - -class Job(Entity): - """This class represents a search job.""" - - def __init__(self, service, sid, **kwargs): - # Default to v2 in Splunk Version 9+ - path = "{path}{sid}" - # Formatting path based on the Splunk Version - if service.disable_v2_api: - path = path.format(path=PATH_JOBS, sid=sid) - else: - path = path.format(path=PATH_JOBS_V2, sid=sid) - - Entity.__init__(self, service, path, skip_refresh=True, **kwargs) - self.sid = sid - - # The Job entry record is returned at the root of the response - def _load_atom_entry(self, response): - return _load_atom(response).entry - - def cancel(self): - """Stops the current search and deletes the results cache. - - :return: The :class:`Job`. - """ - try: - self.post("control", action="cancel") - except HTTPError as he: - if he.status == 404: - # The job has already been cancelled, so - # cancelling it twice is a nop. - pass - else: - raise - return self - - def disable_preview(self): - """Disables preview for this job. - - :return: The :class:`Job`. - """ - self.post("control", action="disablepreview") - return self - - def enable_preview(self): - """Enables preview for this job. - - **Note**: Enabling preview might slow search considerably. - - :return: The :class:`Job`. - """ - self.post("control", action="enablepreview") - return self - - def events(self, **kwargs): - """Returns a streaming handle to this job's events. - - :param kwargs: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/events - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's events. - """ - kwargs['segmentation'] = kwargs.get('segmentation', 'none') - - # Search API v1(GET) and v2(POST) - if self.service.disable_v2_api: - return self.get("events", **kwargs).body - return self.post("events", **kwargs).body - - def finalize(self): - """Stops the job and provides intermediate results for retrieval. - - :return: The :class:`Job`. - """ - self.post("control", action="finalize") - return self - - def is_done(self): - """Indicates whether this job finished running. - - :return: ``True`` if the job is done, ``False`` if not. - :rtype: ``boolean`` - """ - if not self.is_ready(): - return False - done = (self._state.content['isDone'] == '1') - return done - - def is_ready(self): - """Indicates whether this job is ready for querying. - - :return: ``True`` if the job is ready, ``False`` if not. - :rtype: ``boolean`` - - """ - response = self.get() - if response.status == 204: - return False - self._state = self.read(response) - ready = self._state.content['dispatchState'] not in ['QUEUED', 'PARSING'] - return ready - - @property - def name(self): - """Returns the name of the search job, which is the search ID (SID). - - :return: The search ID. - :rtype: ``string`` - """ - return self.sid - - def pause(self): - """Suspends the current search. - - :return: The :class:`Job`. - """ - self.post("control", action="pause") - return self - - def results(self, **query_params): - """Returns a streaming handle to this job's search results. To get a nice, Pythonic iterator, pass the handle - to :class:`splunklib.results.JSONResultsReader` along with the query param "output_mode='json'", as in:: - - import splunklib.client as client - import splunklib.results as results - from time import sleep - service = client.connect(...) - job = service.jobs.create("search * | head 5") - while not job.is_done(): - sleep(.2) - rr = results.JSONResultsReader(job.results(output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - assert rr.is_preview == False - - Results are not available until the job has finished. If called on - an unfinished job, the result is an empty event set. - - This method makes a single roundtrip - to the server, plus at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param query_params: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/results - `_. - :type query_params: ``dict`` - - :return: The ``InputStream`` IO handle to this job's results. - """ - query_params['segmentation'] = query_params.get('segmentation', 'none') - - # Search API v1(GET) and v2(POST) - if self.service.disable_v2_api: - return self.get("results", **query_params).body - return self.post("results", **query_params).body - - def preview(self, **query_params): - """Returns a streaming handle to this job's preview search results. - - Unlike :class:`splunklib.results.JSONResultsReader`along with the query param "output_mode='json'", - which requires a job to be finished to return any results, the ``preview`` method returns any results that - have been generated so far, whether the job is running or not. The returned search results are the raw data - from the server. Pass the handle returned to :class:`splunklib.results.JSONResultsReader` to get a nice, - Pythonic iterator over objects, as in:: - - import splunklib.client as client - import splunklib.results as results - service = client.connect(...) - job = service.jobs.create("search * | head 5") - rr = results.JSONResultsReader(job.preview(output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - if rr.is_preview: - print("Preview of a running search job.") - else: - print("Job is finished. Results are final.") - - This method makes one roundtrip to the server, plus at most - two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param query_params: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/results_preview - `_ - in the REST API documentation. - :type query_params: ``dict`` - - :return: The ``InputStream`` IO handle to this job's preview results. - """ - query_params['segmentation'] = query_params.get('segmentation', 'none') - - # Search API v1(GET) and v2(POST) - if self.service.disable_v2_api: - return self.get("results_preview", **query_params).body - return self.post("results_preview", **query_params).body - - def searchlog(self, **kwargs): - """Returns a streaming handle to this job's search log. - - :param `kwargs`: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/search.log - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's search log. - """ - return self.get("search.log", **kwargs).body - - def set_priority(self, value): - """Sets this job's search priority in the range of 0-10. - - Higher numbers indicate higher priority. Unless splunkd is - running as *root*, you can only decrease the priority of a running job. - - :param `value`: The search priority. - :type value: ``integer`` - - :return: The :class:`Job`. - """ - self.post('control', action="setpriority", priority=value) - return self - - def summary(self, **kwargs): - """Returns a streaming handle to this job's summary. - - :param `kwargs`: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/summary - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's summary. - """ - return self.get("summary", **kwargs).body - - def timeline(self, **kwargs): - """Returns a streaming handle to this job's timeline results. - - :param `kwargs`: Additional timeline arguments (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/timeline - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's timeline. - """ - return self.get("timeline", **kwargs).body - - def touch(self): - """Extends the expiration time of the search to the current time (now) plus - the time-to-live (ttl) value. - - :return: The :class:`Job`. - """ - self.post("control", action="touch") - return self - - def set_ttl(self, value): - """Set the job's time-to-live (ttl) value, which is the time before the - search job expires and is still available. - - :param `value`: The ttl value, in seconds. - :type value: ``integer`` - - :return: The :class:`Job`. - """ - self.post("control", action="setttl", ttl=value) - return self - - def unpause(self): - """Resumes the current search, if paused. - - :return: The :class:`Job`. - """ - self.post("control", action="unpause") - return self - - -class Jobs(Collection): - """This class represents a collection of search jobs. Retrieve this - collection using :meth:`Service.jobs`.""" - - def __init__(self, service): - # Splunk 9 introduces the v2 endpoint - if not service.disable_v2_api: - path = PATH_JOBS_V2 - else: - path = PATH_JOBS - Collection.__init__(self, service, path, item=Job) - # The count value to say list all the contents of this - # Collection is 0, not -1 as it is on most. - self.null_count = 0 - - def _load_list(self, response): - # Overridden because Job takes a sid instead of a path. - entries = _load_atom_entries(response) - if entries is None: return [] - entities = [] - for entry in entries: - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - entry['content']['sid'], - state=state) - entities.append(entity) - return entities - - def create(self, query, **kwargs): - """ Creates a search using a search query and any additional parameters - you provide. - - :param query: The search query. - :type query: ``string`` - :param kwargs: Additiona parameters (optional). For a list of available - parameters, see `Search job parameters - `_ - on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The :class:`Job`. - """ - if kwargs.get("exec_mode", None) == "oneshot": - raise TypeError("Cannot specify exec_mode=oneshot; use the oneshot method instead.") - response = self.post(search=query, **kwargs) - sid = _load_sid(response, kwargs.get("output_mode", None)) - return Job(self.service, sid) - - def export(self, query, **params): - """Runs a search and immediately starts streaming preview events. This method returns a streaming handle to - this job's events as an XML document from the server. To parse this stream into usable Python objects, - pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param - "output_mode='json'":: - - import splunklib.client as client - import splunklib.results as results - service = client.connect(...) - rr = results.JSONResultsReader(service.jobs.export("search * | head 5",output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - assert rr.is_preview == False - - Running an export search is more efficient as it streams the results - directly to you, rather than having to write them out to disk and make - them available later. As soon as results are ready, you will receive - them. - - The ``export`` method makes a single roundtrip to the server (as opposed - to two for :meth:`create` followed by :meth:`preview`), plus at most two - more if the ``autologin`` field of :func:`connect` is set to ``True``. - - :raises `ValueError`: Raised for invalid queries. - :param query: The search query. - :type query: ``string`` - :param params: Additional arguments (optional). For a list of valid - parameters, see `GET search/jobs/export - `_ - in the REST API documentation. - :type params: ``dict`` - - :return: The ``InputStream`` IO handle to raw XML returned from the server. - """ - if "exec_mode" in params: - raise TypeError("Cannot specify an exec_mode to export.") - params['segmentation'] = params.get('segmentation', 'none') - return self.post(path_segment="export", - search=query, - **params).body - - def itemmeta(self): - """There is no metadata available for class:``Jobs``. - - Any call to this method raises a class:``NotSupportedError``. - - :raises: class:``NotSupportedError`` - """ - raise NotSupportedError() - - def oneshot(self, query, **params): - """Run a oneshot search and returns a streaming handle to the results. - - The ``InputStream`` object streams fragments from the server. To parse this stream into usable Python - objects, pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param - "output_mode='json'" :: - - import splunklib.client as client - import splunklib.results as results - service = client.connect(...) - rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5",output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - assert rr.is_preview == False - - The ``oneshot`` method makes a single roundtrip to the server (as opposed - to two for :meth:`create` followed by :meth:`results`), plus at most two more - if the ``autologin`` field of :func:`connect` is set to ``True``. - - :raises ValueError: Raised for invalid queries. - - :param query: The search query. - :type query: ``string`` - :param params: Additional arguments (optional): - - - "output_mode": Specifies the output format of the results (XML, - JSON, or CSV). - - - "earliest_time": Specifies the earliest time in the time range to - search. The time string can be a UTC time (with fractional seconds), - a relative time specifier (to now), or a formatted time string. - - - "latest_time": Specifies the latest time in the time range to - search. The time string can be a UTC time (with fractional seconds), - a relative time specifier (to now), or a formatted time string. - - - "rf": Specifies one or more fields to add to the search. - - :type params: ``dict`` - - :return: The ``InputStream`` IO handle to raw XML returned from the server. - """ - if "exec_mode" in params: - raise TypeError("Cannot specify an exec_mode to oneshot.") - params['segmentation'] = params.get('segmentation', 'none') - return self.post(search=query, - exec_mode="oneshot", - **params).body - - -class Loggers(Collection): - """This class represents a collection of service logging categories. - Retrieve this collection using :meth:`Service.loggers`.""" - - def __init__(self, service): - Collection.__init__(self, service, PATH_LOGGER) - - def itemmeta(self): - """There is no metadata available for class:``Loggers``. - - Any call to this method raises a class:``NotSupportedError``. - - :raises: class:``NotSupportedError`` - """ - raise NotSupportedError() - - -class Message(Entity): - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - @property - def value(self): - """Returns the message value. - - :return: The message value. - :rtype: ``string`` - """ - return self[self.name] - - -class ModularInputKind(Entity): - """This class contains the different types of modular inputs. Retrieve this - collection using :meth:`Service.modular_input_kinds`. - """ - - def __contains__(self, name): - args = self.state.content['endpoints']['args'] - if name in args: - return True - return Entity.__contains__(self, name) - - def __getitem__(self, name): - args = self.state.content['endpoint']['args'] - if name in args: - return args['item'] - return Entity.__getitem__(self, name) - - @property - def arguments(self): - """A dictionary of all the arguments supported by this modular input kind. - - The keys in the dictionary are the names of the arguments. The values are - another dictionary giving the metadata about that argument. The possible - keys in that dictionary are ``"title"``, ``"description"``, ``"required_on_create``", - ``"required_on_edit"``, ``"data_type"``. Each value is a string. It should be one - of ``"true"`` or ``"false"`` for ``"required_on_create"`` and ``"required_on_edit"``, - and one of ``"boolean"``, ``"string"``, or ``"number``" for ``"data_type"``. - - :return: A dictionary describing the arguments this modular input kind takes. - :rtype: ``dict`` - """ - return self.state.content['endpoint']['args'] - - def update(self, **kwargs): - """Raises an error. Modular input kinds are read only.""" - raise IllegalOperationException("Modular input kinds cannot be updated via the REST API.") - - -class SavedSearch(Entity): - """This class represents a saved search.""" - - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - def acknowledge(self): - """Acknowledges the suppression of alerts from this saved search and - resumes alerting. - - :return: The :class:`SavedSearch`. - """ - self.post("acknowledge") - return self - - @property - def alert_count(self): - """Returns the number of alerts fired by this saved search. - - :return: The number of alerts fired by this saved search. - :rtype: ``integer`` - """ - return int(self._state.content.get('triggered_alert_count', 0)) - - def dispatch(self, **kwargs): - """Runs the saved search and returns the resulting search job. - - :param `kwargs`: Additional dispatch arguments (optional). For details, - see the `POST saved/searches/{name}/dispatch - `_ - endpoint in the REST API documentation. - :type kwargs: ``dict`` - :return: The :class:`Job`. - """ - response = self.post("dispatch", **kwargs) - sid = _load_sid(response, kwargs.get("output_mode", None)) - return Job(self.service, sid) - - @property - def fired_alerts(self): - """Returns the collection of fired alerts (a fired alert group) - corresponding to this saved search's alerts. - - :raises IllegalOperationException: Raised when the search is not scheduled. - - :return: A collection of fired alerts. - :rtype: :class:`AlertGroup` - """ - if self['is_scheduled'] == '0': - raise IllegalOperationException('Unscheduled saved searches have no alerts.') - c = Collection( - self.service, - self.service._abspath(PATH_FIRED_ALERTS + self.name, - owner=self._state.access.owner, - app=self._state.access.app, - sharing=self._state.access.sharing), - item=AlertGroup) - return c - - def history(self, **kwargs): - """Returns a list of search jobs corresponding to this saved search. - - :param `kwargs`: Additional arguments (optional). - :type kwargs: ``dict`` - - :return: A list of :class:`Job` objects. - """ - response = self.get("history", **kwargs) - entries = _load_atom_entries(response) - if entries is None: return [] - jobs = [] - for entry in entries: - job = Job(self.service, entry.title) - jobs.append(job) - return jobs - - def update(self, search=None, **kwargs): - """Updates the server with any changes you've made to the current saved - search along with any additional arguments you specify. - - :param `search`: The search query (optional). - :type search: ``string`` - :param `kwargs`: Additional arguments (optional). For a list of available - parameters, see `Saved search parameters - `_ - on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The :class:`SavedSearch`. - """ - # Updates to a saved search *require* that the search string be - # passed, so we pass the current search string if a value wasn't - # provided by the caller. - if search is None: search = self.content.search - Entity.update(self, search=search, **kwargs) - return self - - def scheduled_times(self, earliest_time='now', latest_time='+1h'): - """Returns the times when this search is scheduled to run. - - By default this method returns the times in the next hour. For different - time ranges, set *earliest_time* and *latest_time*. For example, - for all times in the last day use "earliest_time=-1d" and - "latest_time=now". - - :param earliest_time: The earliest time. - :type earliest_time: ``string`` - :param latest_time: The latest time. - :type latest_time: ``string`` - - :return: The list of search times. - """ - response = self.get("scheduled_times", - earliest_time=earliest_time, - latest_time=latest_time) - data = self._load_atom_entry(response) - rec = _parse_atom_entry(data) - times = [datetime.fromtimestamp(int(t)) - for t in rec.content.scheduled_times] - return times - - def suppress(self, expiration): - """Skips any scheduled runs of this search in the next *expiration* - number of seconds. - - :param expiration: The expiration period, in seconds. - :type expiration: ``integer`` - - :return: The :class:`SavedSearch`. - """ - self.post("suppress", expiration=expiration) - return self - - @property - def suppressed(self): - """Returns the number of seconds that this search is blocked from running - (possibly 0). - - :return: The number of seconds. - :rtype: ``integer`` - """ - r = self._run_action("suppress") - if r.suppressed == "1": - return int(r.expiration) - return 0 - - def unsuppress(self): - """Cancels suppression and makes this search run as scheduled. - - :return: The :class:`SavedSearch`. - """ - self.post("suppress", expiration="0") - return self - - -class SavedSearches(Collection): - """This class represents a collection of saved searches. Retrieve this - collection using :meth:`Service.saved_searches`.""" - - def __init__(self, service): - Collection.__init__( - self, service, PATH_SAVED_SEARCHES, item=SavedSearch) - - def create(self, name, search, **kwargs): - """ Creates a saved search. - - :param name: The name for the saved search. - :type name: ``string`` - :param search: The search query. - :type search: ``string`` - :param kwargs: Additional arguments (optional). For a list of available - parameters, see `Saved search parameters - `_ - on Splunk Developer Portal. - :type kwargs: ``dict`` - :return: The :class:`SavedSearches` collection. - """ - return Collection.create(self, name, search=search, **kwargs) - - -class Settings(Entity): - """This class represents configuration settings for a Splunk service. - Retrieve this collection using :meth:`Service.settings`.""" - - def __init__(self, service, **kwargs): - Entity.__init__(self, service, "/services/server/settings", **kwargs) - - # Updates on the settings endpoint are POSTed to server/settings/settings. - def update(self, **kwargs): - """Updates the settings on the server using the arguments you provide. - - :param kwargs: Additional arguments. For a list of valid arguments, see - `POST server/settings/{name} - `_ - in the REST API documentation. - :type kwargs: ``dict`` - :return: The :class:`Settings` collection. - """ - self.service.post("/services/server/settings/settings", **kwargs) - return self - - -class User(Entity): - """This class represents a Splunk user. - """ - - @property - def role_entities(self): - """Returns a list of roles assigned to this user. - - :return: The list of roles. - :rtype: ``list`` - """ - all_role_names = [r.name for r in self.service.roles.list()] - return [self.service.roles[name] for name in self.content.roles if name in all_role_names] - - -# Splunk automatically lowercases new user names so we need to match that -# behavior here to ensure that the subsequent member lookup works correctly. -class Users(Collection): - """This class represents the collection of Splunk users for this instance of - Splunk. Retrieve this collection using :meth:`Service.users`. - """ - - def __init__(self, service): - Collection.__init__(self, service, PATH_USERS, item=User) - - def __getitem__(self, key): - return Collection.__getitem__(self, key.lower()) - - def __contains__(self, name): - return Collection.__contains__(self, name.lower()) - - def create(self, username, password, roles, **params): - """Creates a new user. - - This function makes two roundtrips to the server, plus at most - two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param username: The username. - :type username: ``string`` - :param password: The password. - :type password: ``string`` - :param roles: A single role or list of roles for the user. - :type roles: ``string`` or ``list`` - :param params: Additional arguments (optional). For a list of available - parameters, see `User authentication parameters - `_ - on Splunk Developer Portal. - :type params: ``dict`` - - :return: The new user. - :rtype: :class:`User` - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - users = c.users - boris = users.create("boris", "securepassword", roles="user") - hilda = users.create("hilda", "anotherpassword", roles=["user","power"]) - """ - if not isinstance(username, str): - raise ValueError(f"Invalid username: {str(username)}") - username = username.lower() - self.post(name=username, password=password, roles=roles, **params) - # splunkd doesn't return the user in the POST response body, - # so we have to make a second round trip to fetch it. - response = self.get(username) - entry = _load_atom(response, XNAME_ENTRY).entry - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - parse.unquote(state.links.alternate), - state=state) - return entity - - def delete(self, name): - """ Deletes the user and returns the resulting collection of users. - - :param name: The name of the user to delete. - :type name: ``string`` - - :return: - :rtype: :class:`Users` - """ - return Collection.delete(self, name.lower()) - - -class Role(Entity): - """This class represents a user role. - """ - - def grant(self, *capabilities_to_grant): - """Grants additional capabilities to this role. - - :param capabilities_to_grant: Zero or more capabilities to grant this - role. For a list of capabilities, see - `Capabilities `_ - on Splunk Developer Portal. - :type capabilities_to_grant: ``string`` or ``list`` - :return: The :class:`Role`. - - **Example**:: - - service = client.connect(...) - role = service.roles['somerole'] - role.grant('change_own_password', 'search') - """ - possible_capabilities = self.service.capabilities - for capability in capabilities_to_grant: - if capability not in possible_capabilities: - raise NoSuchCapability(capability) - new_capabilities = self['capabilities'] + list(capabilities_to_grant) - self.post(capabilities=new_capabilities) - return self - - def revoke(self, *capabilities_to_revoke): - """Revokes zero or more capabilities from this role. - - :param capabilities_to_revoke: Zero or more capabilities to grant this - role. For a list of capabilities, see - `Capabilities `_ - on Splunk Developer Portal. - :type capabilities_to_revoke: ``string`` or ``list`` - - :return: The :class:`Role`. - - **Example**:: - - service = client.connect(...) - role = service.roles['somerole'] - role.revoke('change_own_password', 'search') - """ - possible_capabilities = self.service.capabilities - for capability in capabilities_to_revoke: - if capability not in possible_capabilities: - raise NoSuchCapability(capability) - old_capabilities = self['capabilities'] - new_capabilities = [] - for c in old_capabilities: - if c not in capabilities_to_revoke: - new_capabilities.append(c) - if not new_capabilities: - new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. - self.post(capabilities=new_capabilities) - return self - - -class Roles(Collection): - """This class represents the collection of roles in the Splunk instance. - Retrieve this collection using :meth:`Service.roles`.""" - - def __init__(self, service): - Collection.__init__(self, service, PATH_ROLES, item=Role) - - def __getitem__(self, key): - return Collection.__getitem__(self, key.lower()) - - def __contains__(self, name): - return Collection.__contains__(self, name.lower()) - - def create(self, name, **params): - """Creates a new role. - - This function makes two roundtrips to the server, plus at most - two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param name: Name for the role. - :type name: ``string`` - :param params: Additional arguments (optional). For a list of available - parameters, see `Roles parameters - `_ - on Splunk Developer Portal. - :type params: ``dict`` - - :return: The new role. - :rtype: :class:`Role` - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - roles = c.roles - paltry = roles.create("paltry", imported_roles="user", defaultApp="search") - """ - if not isinstance(name, str): - raise ValueError(f"Invalid role name: {str(name)}") - name = name.lower() - self.post(name=name, **params) - # splunkd doesn't return the user in the POST response body, - # so we have to make a second round trip to fetch it. - response = self.get(name) - entry = _load_atom(response, XNAME_ENTRY).entry - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - parse.unquote(state.links.alternate), - state=state) - return entity - - def delete(self, name): - """ Deletes the role and returns the resulting collection of roles. - - :param name: The name of the role to delete. - :type name: ``string`` - - :rtype: The :class:`Roles` - """ - return Collection.delete(self, name.lower()) - - -class Application(Entity): - """Represents a locally-installed Splunk app.""" - - @property - def setupInfo(self): - """Returns the setup information for the app. - - :return: The setup information. - """ - return self.content.get('eai:setup', None) - - def package(self): - """ Creates a compressed package of the app for archiving.""" - return self._run_action("package") - - def updateInfo(self): - """Returns any update information that is available for the app.""" - return self._run_action("update") - - -class KVStoreCollections(Collection): - def __init__(self, service): - Collection.__init__(self, service, 'storage/collections/config', item=KVStoreCollection) - - def __getitem__(self, item): - res = Collection.__getitem__(self, item) - for k, v in res.content.items(): - if "accelerated_fields" in k: - res.content[k] = json.loads(v) - return res - - def create(self, name, accelerated_fields={}, fields={}, **kwargs): - """Creates a KV Store Collection. - - :param name: name of collection to create - :type name: ``string`` - :param accelerated_fields: dictionary of accelerated_fields definitions - :type accelerated_fields: ``dict`` - :param fields: dictionary of field definitions - :type fields: ``dict`` - :param kwargs: a dictionary of additional parameters specifying indexes and field definitions - :type kwargs: ``dict`` - - :return: Result of POST request - """ - for k, v in list(accelerated_fields.items()): - if isinstance(v, dict): - v = json.dumps(v) - kwargs['accelerated_fields.' + k] = v - for k, v in list(fields.items()): - kwargs['field.' + k] = v - return self.post(name=name, **kwargs) - - -class KVStoreCollection(Entity): - @property - def data(self): - """Returns data object for this Collection. - - :rtype: :class:`KVStoreCollectionData` - """ - return KVStoreCollectionData(self) - - def update_accelerated_field(self, name, value): - """Changes the definition of a KV Store accelerated_field. - - :param name: name of accelerated_fields to change - :type name: ``string`` - :param value: new accelerated_fields definition - :type value: ``dict`` - - :return: Result of POST request - """ - kwargs = {} - kwargs['accelerated_fields.' + name] = json.dumps(value) if isinstance(value, dict) else value - return self.post(**kwargs) - - def update_field(self, name, value): - """Changes the definition of a KV Store field. - - :param name: name of field to change - :type name: ``string`` - :param value: new field definition - :type value: ``string`` - - :return: Result of POST request - """ - kwargs = {} - kwargs['field.' + name] = value - return self.post(**kwargs) - - -class KVStoreCollectionData: - """This class represents the data endpoint for a KVStoreCollection. - - Retrieve using :meth:`KVStoreCollection.data` - """ - JSON_HEADER = [('Content-Type', 'application/json')] - - def __init__(self, collection): - self.service = collection.service - self.collection = collection - self.owner, self.app, self.sharing = collection._proper_namespace() - self.path = 'storage/collections/data/' + UrlEncoded(self.collection.name, encode_slash=True) + '/' - - def _get(self, url, **kwargs): - return self.service.get(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) - - def _post(self, url, **kwargs): - return self.service.post(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) - - def _delete(self, url, **kwargs): - return self.service.delete(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) - - def query(self, **query): - """ - Gets the results of query, with optional parameters sort, limit, skip, and fields. - - :param query: Optional parameters. Valid options are sort, limit, skip, and fields - :type query: ``dict`` - - :return: Array of documents retrieved by query. - :rtype: ``array`` - """ - - for key, value in list(query.items()): - if isinstance(query[key], dict): - query[key] = json.dumps(value) - - return json.loads(self._get('', **query).body.read().decode('utf-8')) - - def query_by_id(self, id): - """ - Returns object with _id = id. - - :param id: Value for ID. If not a string will be coerced to string. - :type id: ``string`` - - :return: Document with id - :rtype: ``dict`` - """ - return json.loads(self._get(UrlEncoded(str(id), encode_slash=True)).body.read().decode('utf-8')) - - def insert(self, data): - """ - Inserts item into this collection. An _id field will be generated if not assigned in the data. - - :param data: Document to insert - :type data: ``string`` - - :return: _id of inserted object - :rtype: ``dict`` - """ - if isinstance(data, dict): - data = json.dumps(data) - return json.loads( - self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) - - def delete(self, query=None): - """ - Deletes all data in collection if query is absent. Otherwise, deletes all data matched by query. - - :param query: Query to select documents to delete - :type query: ``string`` - - :return: Result of DELETE request - """ - return self._delete('', **({'query': query}) if query else {}) - - def delete_by_id(self, id): - """ - Deletes document that has _id = id. - - :param id: id of document to delete - :type id: ``string`` - - :return: Result of DELETE request - """ - return self._delete(UrlEncoded(str(id), encode_slash=True)) - - def update(self, id, data): - """ - Replaces document with _id = id with data. - - :param id: _id of document to update - :type id: ``string`` - :param data: the new document to insert - :type data: ``string`` - - :return: id of replaced document - :rtype: ``dict`` - """ - if isinstance(data, dict): - data = json.dumps(data) - return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, - body=data).body.read().decode('utf-8')) - - def batch_find(self, *dbqueries): - """ - Returns array of results from queries dbqueries. - - :param dbqueries: Array of individual queries as dictionaries - :type dbqueries: ``array`` of ``dict`` - - :return: Results of each query - :rtype: ``array`` of ``array`` - """ - if len(dbqueries) < 1: - raise Exception('Must have at least one query.') - - data = json.dumps(dbqueries) - - return json.loads( - self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) - - def batch_save(self, *documents): - """ - Inserts or updates every document specified in documents. - - :param documents: Array of documents to save as dictionaries - :type documents: ``array`` of ``dict`` - - :return: Results of update operation as overall stats - :rtype: ``dict`` - """ - if len(documents) < 1: - raise Exception('Must have at least one document.') - - data = json.dumps(documents) - - return json.loads( - self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# The purpose of this module is to provide a friendlier domain interface to +# various Splunk endpoints. The approach here is to leverage the binding +# layer to capture endpoint context and provide objects and methods that +# offer simplified access their corresponding endpoints. The design avoids +# caching resource state. From the perspective of this module, the 'policy' +# for caching resource state belongs in the application or a higher level +# framework, and its the purpose of this module to provide simplified +# access to that resource state. +# +# A side note, the objects below that provide helper methods for updating eg: +# Entity state, are written so that they may be used in a fluent style. +# + +"""The **splunklib.client** module provides a Pythonic interface to the +`Splunk REST API `_, +allowing you programmatically access Splunk's resources. + +**splunklib.client** wraps a Pythonic layer around the wire-level +binding of the **splunklib.binding** module. The core of the library is the +:class:`Service` class, which encapsulates a connection to the server, and +provides access to the various aspects of Splunk's functionality, which are +exposed via the REST API. Typically you connect to a running Splunk instance +with the :func:`connect` function:: + + import splunklib.client as client + service = client.connect(host='localhost', port=8089, + username='admin', password='...') + assert isinstance(service, client.Service) + +:class:`Service` objects have fields for the various Splunk resources (such as apps, +jobs, saved searches, inputs, and indexes). All of these fields are +:class:`Collection` objects:: + + appcollection = service.apps + my_app = appcollection.create('my_app') + my_app = appcollection['my_app'] + appcollection.delete('my_app') + +The individual elements of the collection, in this case *applications*, +are subclasses of :class:`Entity`. An ``Entity`` object has fields for its +attributes, and methods that are specific to each kind of entity. For example:: + + print(my_app['author']) # Or: print(my_app.author) + my_app.package() # Creates a compressed package of this application +""" + +import contextlib +import datetime +import json +import logging +import re +import socket +from datetime import datetime, timedelta +from time import sleep +from urllib import parse + +from splunklib import data +from splunklib.data import record +from splunklib.binding import (AuthenticationError, Context, HTTPError, UrlEncoded, + _encode, _make_cookie_header, _NoAuthenticationToken, + namespace) + +logger = logging.getLogger(__name__) + +__all__ = [ + "connect", + "NotSupportedError", + "OperationError", + "IncomparableException", + "Service", + "namespace", + "AuthenticationError" +] + +PATH_APPS = "apps/local/" +PATH_CAPABILITIES = "authorization/capabilities/" +PATH_CONF = "configs/conf-%s/" +PATH_PROPERTIES = "properties/" +PATH_DEPLOYMENT_CLIENTS = "deployment/client/" +PATH_DEPLOYMENT_TENANTS = "deployment/tenants/" +PATH_DEPLOYMENT_SERVERS = "deployment/server/" +PATH_DEPLOYMENT_SERVERCLASSES = "deployment/serverclass/" +PATH_EVENT_TYPES = "saved/eventtypes/" +PATH_FIRED_ALERTS = "alerts/fired_alerts/" +PATH_INDEXES = "data/indexes/" +PATH_INPUTS = "data/inputs/" +PATH_JOBS = "search/jobs/" +PATH_JOBS_V2 = "search/v2/jobs/" +PATH_LOGGER = "/services/server/logger/" +PATH_MESSAGES = "messages/" +PATH_MODULAR_INPUTS = "data/modular-inputs" +PATH_ROLES = "authorization/roles/" +PATH_SAVED_SEARCHES = "saved/searches/" +PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) +PATH_USERS = "authentication/users/" +PATH_RECEIVERS_STREAM = "/services/receivers/stream" +PATH_RECEIVERS_SIMPLE = "/services/receivers/simple" +PATH_STORAGE_PASSWORDS = "storage/passwords" + +XNAMEF_ATOM = "{http://www.w3.org/2005/Atom}%s" +XNAME_ENTRY = XNAMEF_ATOM % "entry" +XNAME_CONTENT = XNAMEF_ATOM % "content" + +MATCH_ENTRY_CONTENT = f"{XNAME_ENTRY}/{XNAME_CONTENT}/*" + + +class IllegalOperationException(Exception): + """Thrown when an operation is not possible on the Splunk instance that a + :class:`Service` object is connected to.""" + + +class IncomparableException(Exception): + """Thrown when trying to compare objects (using ``==``, ``<``, ``>``, and + so on) of a type that doesn't support it.""" + + +class AmbiguousReferenceException(ValueError): + """Thrown when the name used to fetch an entity matches more than one entity.""" + + +class InvalidNameException(Exception): + """Thrown when the specified name contains characters that are not allowed + in Splunk entity names.""" + + +class NoSuchCapability(Exception): + """Thrown when the capability that has been referred to doesn't exist.""" + + +class OperationError(Exception): + """Raised for a failed operation, such as a timeout.""" + + +class NotSupportedError(Exception): + """Raised for operations that are not supported on a given object.""" + + +def _trailing(template, *targets): + """Substring of *template* following all *targets*. + + **Example**:: + + template = "this is a test of the bunnies." + _trailing(template, "is", "est", "the") == " bunnies" + + Each target is matched successively in the string, and the string + remaining after the last target is returned. If one of the targets + fails to match, a ValueError is raised. + + :param template: Template to extract a trailing string from. + :type template: ``string`` + :param targets: Strings to successively match in *template*. + :type targets: list of ``string``s + :return: Trailing string after all targets are matched. + :rtype: ``string`` + :raises ValueError: Raised when one of the targets does not match. + """ + s = template + for t in targets: + n = s.find(t) + if n == -1: + raise ValueError("Target " + t + " not found in template.") + s = s[n + len(t):] + return s + + +# Filter the given state content record according to the given arg list. +def _filter_content(content, *args): + if len(args) > 0: + return record((k, content[k]) for k in args) + return record((k, v) for k, v in content.items() + if k not in ['eai:acl', 'eai:attributes', 'type']) + + +# Construct a resource path from the given base path + resource name +def _path(base, name): + if not base.endswith('/'): base = base + '/' + return base + name + + +# Load an atom record from the body of the given response +# this will ultimately be sent to an xml ElementTree so we +# should use the xmlcharrefreplace option +def _load_atom(response, match=None): + return data.load(response.body.read() + .decode('utf-8', 'xmlcharrefreplace'), match) + + +# Load an array of atom entries from the body of the given response +def _load_atom_entries(response): + r = _load_atom(response) + if 'feed' in r: + # Need this to handle a random case in the REST API + if r.feed.get('totalResults') in [0, '0']: + return [] + entries = r.feed.get('entry', None) + if entries is None: return None + return entries if isinstance(entries, list) else [entries] + # Unlike most other endpoints, the jobs endpoint does not return + # its state wrapped in another element, but at the top level. + # For example, in XML, it returns ... instead of + # .... + entries = r.get('entry', None) + if entries is None: return None + return entries if isinstance(entries, list) else [entries] + + +# Load the sid from the body of the given response +def _load_sid(response, output_mode): + if output_mode == "json": + json_obj = json.loads(response.body.read()) + return json_obj.get('sid') + return _load_atom(response).response.sid + + +# Parse the given atom entry record into a generic entity state record +def _parse_atom_entry(entry): + title = entry.get('title', None) + + elink = entry.get('link', []) + elink = elink if isinstance(elink, list) else [elink] + links = record((link.rel, link.href) for link in elink) + + # Retrieve entity content values + content = entry.get('content', {}) + + # Host entry metadata + metadata = _parse_atom_metadata(content) + + # Filter some of the noise out of the content record + content = record((k, v) for k, v in content.items() + if k not in ['eai:acl', 'eai:attributes']) + + if 'type' in content: + if isinstance(content['type'], list): + content['type'] = [t for t in content['type'] if t != 'text/xml'] + # Unset type if it was only 'text/xml' + if len(content['type']) == 0: + content.pop('type', None) + # Flatten 1 element list + if len(content['type']) == 1: + content['type'] = content['type'][0] + else: + content.pop('type', None) + + return record({ + 'title': title, + 'links': links, + 'access': metadata.access, + 'fields': metadata.fields, + 'content': content, + 'updated': entry.get("updated") + }) + + +# Parse the metadata fields out of the given atom entry content record +def _parse_atom_metadata(content): + # Hoist access metadata + access = content.get('eai:acl', None) + + # Hoist content metadata (and cleanup some naming) + attributes = content.get('eai:attributes', {}) + fields = record({ + 'required': attributes.get('requiredFields', []), + 'optional': attributes.get('optionalFields', []), + 'wildcard': attributes.get('wildcardFields', [])}) + + return record({'access': access, 'fields': fields}) + + +# kwargs: scheme, host, port, app, owner, username, password +def connect(**kwargs): + """This function connects and logs in to a Splunk instance. + + This function is a shorthand for :meth:`Service.login`. + The ``connect`` function makes one round trip to the server (for logging in). + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verification for + https connections. (optional, the default is True) + :type verify: ``Boolean`` + :param `owner`: The owner context of the namespace (optional). + :type owner: ``string`` + :param `app`: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param `token`: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param autologin: When ``True``, automatically tries to log in again if the + session terminates. + :type autologin: ``boolean`` + :param `username`: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param `password`: The password for the Splunk account. + :type password: ``string`` + :param retires: Number of retries for each HTTP connection (optional, the default is 0). + NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. + :type retries: ``int`` + :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). + :type retryDelay: ``int`` (in seconds) + :param `context`: The SSLContext that can be used when setting verify=True (optional) + :type context: ``SSLContext`` + :return: An initialized :class:`Service` connection. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + a = s.apps["my_app"] + ... + """ + s = Service(**kwargs) + s.login() + return s + + +# In preparation for adding Storm support, we added an +# intermediary class between Service and Context. Storm's +# API is not going to be the same as enterprise Splunk's +# API, so we will derive both Service (for enterprise Splunk) +# and StormService for (Splunk Storm) from _BaseService, and +# put any shared behavior on it. +class _BaseService(Context): + pass + + +class Service(_BaseService): + """A Pythonic binding to Splunk instances. + + A :class:`Service` represents a binding to a Splunk instance on an + HTTP or HTTPS port. It handles the details of authentication, wire + formats, and wraps the REST API endpoints into something more + Pythonic. All of the low-level operations on the instance from + :class:`splunklib.binding.Context` are also available in case you need + to do something beyond what is provided by this class. + + After creating a ``Service`` object, you must call its :meth:`login` + method before you can issue requests to Splunk. + Alternately, use the :func:`connect` function to create an already + authenticated :class:`Service` object, or provide a session token + when creating the :class:`Service` object explicitly (the same + token may be shared by multiple :class:`Service` objects). + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verification for + https connections. (optional, the default is True) + :type verify: ``Boolean`` + :param `owner`: The owner context of the namespace (optional; use "-" for wildcard). + :type owner: ``string`` + :param `app`: The app context of the namespace (optional; use "-" for wildcard). + :type app: ``string`` + :param `token`: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param `username`: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param `password`: The password, which is used to authenticate the Splunk + instance. + :type password: ``string`` + :param retires: Number of retries for each HTTP connection (optional, the default is 0). + NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. + :type retries: ``int`` + :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). + :type retryDelay: ``int`` (in seconds) + :return: A :class:`Service` instance. + + **Example**:: + + import splunklib.client as client + s = client.Service(username="boris", password="natasha", ...) + s.login() + # Or equivalently + s = client.connect(username="boris", password="natasha") + # Or if you already have a session token + s = client.Service(token="atg232342aa34324a") + # Or if you already have a valid cookie + s = client.Service(cookie="splunkd_8089=...") + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._splunk_version = None + self._kvstore_owner = None + self._instance_type = None + + @property + def apps(self): + """Returns the collection of applications that are installed on this instance of Splunk. + + :return: A :class:`Collection` of :class:`Application` entities. + """ + return Collection(self, PATH_APPS, item=Application) + + @property + def confs(self): + """Returns the collection of configuration files for this Splunk instance. + + :return: A :class:`Configurations` collection of + :class:`ConfigurationFile` entities. + """ + return Configurations(self) + + @property + def capabilities(self): + """Returns the list of system capabilities. + + :return: A ``list`` of capabilities. + """ + response = self.get(PATH_CAPABILITIES) + return _load_atom(response, MATCH_ENTRY_CONTENT).capabilities + + @property + def event_types(self): + """Returns the collection of event types defined in this Splunk instance. + + :return: An :class:`Entity` containing the event types. + """ + return Collection(self, PATH_EVENT_TYPES) + + @property + def fired_alerts(self): + """Returns the collection of alerts that have been fired on the Splunk + instance, grouped by saved search. + + :return: A :class:`Collection` of :class:`AlertGroup` entities. + """ + return Collection(self, PATH_FIRED_ALERTS, item=AlertGroup) + + @property + def indexes(self): + """Returns the collection of indexes for this Splunk instance. + + :return: An :class:`Indexes` collection of :class:`Index` entities. + """ + return Indexes(self, PATH_INDEXES, item=Index) + + @property + def info(self): + """Returns the information about this instance of Splunk. + + :return: The system information, as key-value pairs. + :rtype: ``dict`` + """ + response = self.get("/services/server/info") + return _filter_content(_load_atom(response, MATCH_ENTRY_CONTENT)) + + def input(self, path, kind=None): + """Retrieves an input by path, and optionally kind. + + :return: A :class:`Input` object. + """ + return Input(self, path, kind=kind).refresh() + + @property + def inputs(self): + """Returns the collection of inputs configured on this Splunk instance. + + :return: An :class:`Inputs` collection of :class:`Input` entities. + """ + return Inputs(self) + + def job(self, sid): + """Retrieves a search job by sid. + + :return: A :class:`Job` object. + """ + return Job(self, sid).refresh() + + @property + def jobs(self): + """Returns the collection of current search jobs. + + :return: A :class:`Jobs` collection of :class:`Job` entities. + """ + return Jobs(self) + + @property + def loggers(self): + """Returns the collection of logging level categories and their status. + + :return: A :class:`Loggers` collection of logging levels. + """ + return Loggers(self) + + @property + def messages(self): + """Returns the collection of service messages. + + :return: A :class:`Collection` of :class:`Message` entities. + """ + return Collection(self, PATH_MESSAGES, item=Message) + + @property + def modular_input_kinds(self): + """Returns the collection of the modular input kinds on this Splunk instance. + + :return: A :class:`ReadOnlyCollection` of :class:`ModularInputKind` entities. + """ + if self.splunk_version >= (5,): + return ReadOnlyCollection(self, PATH_MODULAR_INPUTS, item=ModularInputKind) + raise IllegalOperationException("Modular inputs are not supported before Splunk version 5.") + + @property + def storage_passwords(self): + """Returns the collection of the storage passwords on this Splunk instance. + + :return: A :class:`ReadOnlyCollection` of :class:`StoragePasswords` entities. + """ + return StoragePasswords(self) + + # kwargs: enable_lookups, reload_macros, parse_only, output_mode + def parse(self, query, **kwargs): + """Parses a search query and returns a semantic map of the search. + + :param query: The search query to parse. + :type query: ``string`` + :param kwargs: Arguments to pass to the ``search/parser`` endpoint + (optional). Valid arguments are: + + * "enable_lookups" (``boolean``): If ``True``, performs reverse lookups + to expand the search expression. + + * "output_mode" (``string``): The output format (XML or JSON). + + * "parse_only" (``boolean``): If ``True``, disables the expansion of + search due to evaluation of subsearches, time term expansion, + lookups, tags, eventtypes, and sourcetype alias. + + * "reload_macros" (``boolean``): If ``True``, reloads macro + definitions from macros.conf. + + :type kwargs: ``dict`` + :return: A semantic map of the parsed search query. + """ + if not self.disable_v2_api: + return self.post("search/v2/parser", q=query, **kwargs) + return self.get("search/parser", q=query, **kwargs) + + def restart(self, timeout=None): + """Restarts this Splunk instance. + + The service is unavailable until it has successfully restarted. + + If a *timeout* value is specified, ``restart`` blocks until the service + resumes or the timeout period has been exceeded. Otherwise, ``restart`` returns + immediately. + + :param timeout: A timeout period, in seconds. + :type timeout: ``integer`` + """ + msg = {"value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} + # This message will be deleted once the server actually restarts. + self.messages.create(name="restart_required", **msg) + result = self.post("/services/server/control/restart") + if timeout is None: + return result + start = datetime.now() + diff = timedelta(seconds=timeout) + while datetime.now() - start < diff: + try: + self.login() + if not self.restart_required: + return result + except Exception as e: + sleep(1) + raise Exception("Operation time out.") + + @property + def restart_required(self): + """Indicates whether splunkd is in a state that requires a restart. + + :return: A ``boolean`` that indicates whether a restart is required. + + """ + response = self.get("messages").body.read() + messages = data.load(response)['feed'] + if 'entry' not in messages: + result = False + else: + if isinstance(messages['entry'], dict): + titles = [messages['entry']['title']] + else: + titles = [x['title'] for x in messages['entry']] + result = 'restart_required' in titles + return result + + @property + def roles(self): + """Returns the collection of user roles. + + :return: A :class:`Roles` collection of :class:`Role` entities. + """ + return Roles(self) + + def search(self, query, **kwargs): + """Runs a search using a search query and any optional arguments you + provide, and returns a `Job` object representing the search. + + :param query: A search query. + :type query: ``string`` + :param kwargs: Arguments for the search (optional): + + * "output_mode" (``string``): Specifies the output format of the + results. + + * "earliest_time" (``string``): Specifies the earliest time in the + time range to + search. The time string can be a UTC time (with fractional + seconds), a relative time specifier (to now), or a formatted + time string. + + * "latest_time" (``string``): Specifies the latest time in the time + range to + search. The time string can be a UTC time (with fractional + seconds), a relative time specifier (to now), or a formatted + time string. + + * "rf" (``string``): Specifies one or more fields to add to the + search. + + :type kwargs: ``dict`` + :rtype: class:`Job` + :returns: An object representing the created job. + """ + return self.jobs.create(query, **kwargs) + + @property + def saved_searches(self): + """Returns the collection of saved searches. + + :return: A :class:`SavedSearches` collection of :class:`SavedSearch` + entities. + """ + return SavedSearches(self) + + @property + def settings(self): + """Returns the configuration settings for this instance of Splunk. + + :return: A :class:`Settings` object containing configuration settings. + """ + return Settings(self) + + @property + def splunk_version(self): + """Returns the version of the splunkd instance this object is attached + to. + + The version is returned as a tuple of the version components as + integers (for example, `(4,3,3)` or `(5,)`). + + :return: A ``tuple`` of ``integers``. + """ + if self._splunk_version is None: + self._splunk_version = tuple(int(p) for p in self.info['version'].split('.')) + return self._splunk_version + + @property + def splunk_instance(self): + if self._instance_type is None : + splunk_info = self.info + if hasattr(splunk_info, 'instance_type') : + self._instance_type = splunk_info['instance_type'] + else: + self._instance_type = '' + return self._instance_type + + @property + def disable_v2_api(self): + if self.splunk_instance.lower() == 'cloud': + return self.splunk_version < (9,0,2209) + return self.splunk_version < (9,0,2) + + @property + def kvstore_owner(self): + """Returns the KVStore owner for this instance of Splunk. + + By default is the kvstore owner is not set, it will return "nobody" + :return: A string with the KVStore owner. + """ + if self._kvstore_owner is None: + self._kvstore_owner = "nobody" + return self._kvstore_owner + + @kvstore_owner.setter + def kvstore_owner(self, value): + """ + kvstore is refreshed, when the owner value is changed + """ + self._kvstore_owner = value + self.kvstore + + @property + def kvstore(self): + """Returns the collection of KV Store collections. + + sets the owner for the namespace, before retrieving the KVStore Collection + + :return: A :class:`KVStoreCollections` collection of :class:`KVStoreCollection` entities. + """ + self.namespace['owner'] = self.kvstore_owner + return KVStoreCollections(self) + + @property + def users(self): + """Returns the collection of users. + + :return: A :class:`Users` collection of :class:`User` entities. + """ + return Users(self) + + +class Endpoint: + """This class represents individual Splunk resources in the Splunk REST API. + + An ``Endpoint`` object represents a URI, such as ``/services/saved/searches``. + This class provides the common functionality of :class:`Collection` and + :class:`Entity` (essentially HTTP GET and POST methods). + """ + + def __init__(self, service, path): + self.service = service + self.path = path + + def get_api_version(self, path): + """Return the API version of the service used in the provided path. + + Args: + path (str): A fully-qualified endpoint path (for example, "/services/search/jobs"). + + Returns: + int: Version of the API (for example, 1) + """ + # Default to v1 if undefined in the path + # For example, "/services/search/jobs" is using API v1 + api_version = 1 + + versionSearch = re.search('(?:servicesNS\/[^/]+\/[^/]+|services)\/[^/]+\/v(\d+)\/', path) + if versionSearch: + api_version = int(versionSearch.group(1)) + + return api_version + + def get(self, path_segment="", owner=None, app=None, sharing=None, **query): + """Performs a GET operation on the path segment relative to this endpoint. + + This method is named to match the HTTP method. This method makes at least + one roundtrip to the server, one additional round trip for + each 303 status returned, plus at most two additional round + trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (optional). + :type sharing: "global", "system", "app", or "user" + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + apps = s.apps + apps.get() == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + apps.get('nonexistant/path') # raises HTTPError + s.logout() + apps.get() # raises AuthenticationError + """ + # self.path to the Endpoint is relative in the SDK, so passing + # owner, app, sharing, etc. along will produce the correct + # namespace in the final request. + if path_segment.startswith('/'): + path = path_segment + else: + if not self.path.endswith('/') and path_segment != "": + self.path = self.path + '/' + path = self.service._abspath(self.path + path_segment, owner=owner, + app=app, sharing=sharing) + # ^-- This was "%s%s" % (self.path, path_segment). + # That doesn't work, because self.path may be UrlEncoded. + + # Get the API version from the path + api_version = self.get_api_version(path) + + # Search API v2+ fallback to v1: + # - In v2+, /results_preview, /events and /results do not support search params. + # - Fallback from v2+ to v1 if Splunk Version is < 9. + # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): + # path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + if api_version == 1: + if isinstance(path, UrlEncoded): + path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) + else: + path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + return self.service.get(path, + owner=owner, app=app, sharing=sharing, + **query) + + def post(self, path_segment="", owner=None, app=None, sharing=None, **query): + """Performs a POST operation on the path segment relative to this endpoint. + + This method is named to match the HTTP method. This method makes at least + one roundtrip to the server, one additional round trip for + each 303 status returned, plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + apps = s.apps + apps.post(name='boris') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '2908'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 18:34:50 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'Created', + 'status': 201} + apps.get('nonexistant/path') # raises HTTPError + s.logout() + apps.get() # raises AuthenticationError + """ + if path_segment.startswith('/'): + path = path_segment + else: + if not self.path.endswith('/') and path_segment != "": + self.path = self.path + '/' + path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) + + # Get the API version from the path + api_version = self.get_api_version(path) + + # Search API v2+ fallback to v1: + # - In v2+, /results_preview, /events and /results do not support search params. + # - Fallback from v2+ to v1 if Splunk Version is < 9. + # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): + # path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + if api_version == 1: + if isinstance(path, UrlEncoded): + path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) + else: + path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) + + +# kwargs: path, app, owner, sharing, state +class Entity(Endpoint): + """This class is a base class for Splunk entities in the REST API, such as + saved searches, jobs, indexes, and inputs. + + ``Entity`` provides the majority of functionality required by entities. + Subclasses only implement the special cases for individual entities. + For example for saved searches, the subclass makes fields like ``action.email``, + ``alert_type``, and ``search`` available. + + An ``Entity`` is addressed like a dictionary, with a few extensions, + so the following all work, for example in saved searches:: + + ent['action.email'] + ent['alert_type'] + ent['search'] + + You can also access the fields as though they were the fields of a Python + object, as in:: + + ent.alert_type + ent.search + + However, because some of the field names are not valid Python identifiers, + the dictionary-like syntax is preferable. + + The state of an :class:`Entity` object is cached, so accessing a field + does not contact the server. If you think the values on the + server have changed, call the :meth:`Entity.refresh` method. + """ + # Not every endpoint in the API is an Entity or a Collection. For + # example, a saved search at saved/searches/{name} has an additional + # method saved/searches/{name}/scheduled_times, but this isn't an + # entity in its own right. In these cases, subclasses should + # implement a method that uses the get and post methods inherited + # from Endpoint, calls the _load_atom function (it's elsewhere in + # client.py, but not a method of any object) to read the + # information, and returns the extracted data in a Pythonesque form. + # + # The primary use of subclasses of Entity is to handle specially + # named fields in the Entity. If you only need to provide a default + # value for an optional field, subclass Entity and define a + # dictionary ``defaults``. For instance,:: + # + # class Hypothetical(Entity): + # defaults = {'anOptionalField': 'foo', + # 'anotherField': 'bar'} + # + # If you have to do more than provide a default, such as rename or + # actually process values, then define a new method with the + # ``@property`` decorator. + # + # class Hypothetical(Entity): + # @property + # def foobar(self): + # return self.content['foo'] + "-" + self.content["bar"] + + # Subclasses can override defaults the default values for + # optional fields. See above. + defaults = {} + + def __init__(self, service, path, **kwargs): + Endpoint.__init__(self, service, path) + self._state = None + if not kwargs.get('skip_refresh', False): + self.refresh(kwargs.get('state', None)) # "Prefresh" + + def __contains__(self, item): + try: + self[item] + return True + except (KeyError, AttributeError): + return False + + def __eq__(self, other): + """Raises IncomparableException. + + Since Entity objects are snapshots of times on the server, no + simple definition of equality will suffice beyond instance + equality, and instance equality leads to strange situations + such as:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + x = saved_searches['asearch'] + + but then ``x != saved_searches['asearch']``. + + whether or not there was a change on the server. Rather than + try to do something fancy, we simply declare that equality is + undefined for Entities. + + Makes no roundtrips to the server. + """ + raise IncomparableException(f"Equality is undefined for objects of class {self.__class__.__name__}") + + def __getattr__(self, key): + # Called when an attribute was not found by the normal method. In this + # case we try to find it in self.content and then self.defaults. + if key in self.state.content: + return self.state.content[key] + if key in self.defaults: + return self.defaults[key] + raise AttributeError(key) + + def __getitem__(self, key): + # getattr attempts to find a field on the object in the normal way, + # then calls __getattr__ if it cannot. + return getattr(self, key) + + # Load the Atom entry record from the given response - this is a method + # because the "entry" record varies slightly by entity and this allows + # for a subclass to override and handle any special cases. + def _load_atom_entry(self, response): + elem = _load_atom(response, XNAME_ENTRY) + if isinstance(elem, list): + apps = [ele.entry.content.get('eai:appName') for ele in elem] + + raise AmbiguousReferenceException( + f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") + return elem.entry + + # Load the entity state record from the given response + def _load_state(self, response): + entry = self._load_atom_entry(response) + return _parse_atom_entry(entry) + + def _run_action(self, path_segment, **kwargs): + """Run a method and return the content Record from the returned XML. + + A method is a relative path from an Entity that is not itself + an Entity. _run_action assumes that the returned XML is an + Atom field containing one Entry, and the contents of Entry is + what should be the return value. This is right in enough cases + to make this method useful. + """ + response = self.get(path_segment, **kwargs) + data = self._load_atom_entry(response) + rec = _parse_atom_entry(data) + return rec.content + + def _proper_namespace(self, owner=None, app=None, sharing=None): + """Produce a namespace sans wildcards for use in entity requests. + + This method tries to fill in the fields of the namespace which are `None` + or wildcard (`'-'`) from the entity's namespace. If that fails, it uses + the service's namespace. + + :param owner: + :param app: + :param sharing: + :return: + """ + if owner is None and app is None and sharing is None: # No namespace provided + if self._state is not None and 'access' in self._state: + return (self._state.access.owner, + self._state.access.app, + self._state.access.sharing) + return (self.service.namespace['owner'], + self.service.namespace['app'], + self.service.namespace['sharing']) + return owner, app, sharing + + def delete(self): + owner, app, sharing = self._proper_namespace() + return self.service.delete(self.path, owner=owner, app=app, sharing=sharing) + + def get(self, path_segment="", owner=None, app=None, sharing=None, **query): + owner, app, sharing = self._proper_namespace(owner, app, sharing) + return super().get(path_segment, owner=owner, app=app, sharing=sharing, **query) + + def post(self, path_segment="", owner=None, app=None, sharing=None, **query): + owner, app, sharing = self._proper_namespace(owner, app, sharing) + return super().post(path_segment, owner=owner, app=app, sharing=sharing, **query) + + def refresh(self, state=None): + """Refreshes the state of this entity. + + If *state* is provided, load it as the new state for this + entity. Otherwise, make a roundtrip to the server (by calling + the :meth:`read` method of ``self``) to fetch an updated state, + plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param state: Entity-specific arguments (optional). + :type state: ``dict`` + :raises EntityDeletedException: Raised if the entity no longer exists on + the server. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + search = s.apps['search'] + search.refresh() + """ + if state is not None: + self._state = state + else: + self._state = self.read(self.get()) + return self + + @property + def access(self): + """Returns the access metadata for this entity. + + :return: A :class:`splunklib.data.Record` object with three keys: + ``owner``, ``app``, and ``sharing``. + """ + return self.state.access + + @property + def content(self): + """Returns the contents of the entity. + + :return: A ``dict`` containing values. + """ + return self.state.content + + def disable(self): + """Disables the entity at this endpoint.""" + self.post("disable") + return self + + def enable(self): + """Enables the entity at this endpoint.""" + self.post("enable") + return self + + @property + def fields(self): + """Returns the content metadata for this entity. + + :return: A :class:`splunklib.data.Record` object with three keys: + ``required``, ``optional``, and ``wildcard``. + """ + return self.state.fields + + @property + def links(self): + """Returns a dictionary of related resources. + + :return: A ``dict`` with keys and corresponding URLs. + """ + return self.state.links + + @property + def name(self): + """Returns the entity name. + + :return: The entity name. + :rtype: ``string`` + """ + return self.state.title + + def read(self, response): + """ Reads the current state of the entity from the server. """ + results = self._load_state(response) + # In lower layers of the SDK, we end up trying to URL encode + # text to be dispatched via HTTP. However, these links are already + # URL encoded when they arrive, and we need to mark them as such. + unquoted_links = dict((k, UrlEncoded(v, skip_encode=True)) + for k, v in results['links'].items()) + results['links'] = unquoted_links + return results + + def reload(self): + """Reloads the entity.""" + self.post("_reload") + return self + + def acl_update(self, **kwargs): + """To update Access Control List (ACL) properties for an endpoint. + + :param kwargs: Additional entity-specific arguments (required). + + - "owner" (``string``): The Splunk username, such as "admin". A value of "nobody" means no specific user (required). + + - "sharing" (``string``): A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system" (required). + + :type kwargs: ``dict`` + + **Example**:: + + import splunklib.client as client + service = client.connect(...) + saved_search = service.saved_searches["name"] + saved_search.acl_update(sharing="app", owner="nobody", app="search", **{"perms.read": "admin, nobody"}) + """ + if "body" not in kwargs: + kwargs = {"body": kwargs} + + if "sharing" not in kwargs["body"]: + raise ValueError("Required argument 'sharing' is missing.") + if "owner" not in kwargs["body"]: + raise ValueError("Required argument 'owner' is missing.") + + self.post("acl", **kwargs) + self.refresh() + return self + + @property + def state(self): + """Returns the entity's state record. + + :return: A ``dict`` containing fields and metadata for the entity. + """ + if self._state is None: self.refresh() + return self._state + + def update(self, **kwargs): + """Updates the server with any changes you've made to the current entity + along with any additional arguments you specify. + + **Note**: You cannot update the ``name`` field of an entity. + + Many of the fields in the REST API are not valid Python + identifiers, which means you cannot pass them as keyword + arguments. That is, Python will fail to parse the following:: + + # This fails + x.update(check-new=False, email.to='boris@utopia.net') + + However, you can always explicitly use a dictionary to pass + such keys:: + + # This works + x.update(**{'check-new': False, 'email.to': 'boris@utopia.net'}) + + :param kwargs: Additional entity-specific arguments (optional). + :type kwargs: ``dict`` + + :return: The entity this method is called on. + :rtype: class:`Entity` + """ + # The peculiarity in question: the REST API creates a new + # Entity if we pass name in the dictionary, instead of the + # expected behavior of updating this Entity. Therefore, we + # check for 'name' in kwargs and throw an error if it is + # there. + if 'name' in kwargs: + raise IllegalOperationException('Cannot update the name of an Entity via the REST API.') + self.post(**kwargs) + return self + + +class ReadOnlyCollection(Endpoint): + """This class represents a read-only collection of entities in the Splunk + instance. + """ + + def __init__(self, service, path, item=Entity): + Endpoint.__init__(self, service, path) + self.item = item # Item accessor + self.null_count = -1 + + def __contains__(self, name): + """Is there at least one entry called *name* in this collection? + + Makes a single roundtrip to the server, plus at most two more + if + the ``autologin`` field of :func:`connect` is set to ``True``. + """ + try: + self[name] + return True + except KeyError: + return False + except AmbiguousReferenceException: + return True + + def __getitem__(self, key): + """Fetch an item named *key* from this collection. + + A name is not a unique identifier in a collection. The unique + identifier is a name plus a namespace. For example, there can + be a saved search named ``'mysearch'`` with sharing ``'app'`` + in application ``'search'``, and another with sharing + ``'user'`` with owner ``'boris'`` and application + ``'search'``. If the ``Collection`` is attached to a + ``Service`` that has ``'-'`` (wildcard) as user and app in its + namespace, then both of these may be visible under the same + name. + + Where there is no conflict, ``__getitem__`` will fetch the + entity given just the name. If there is a conflict, and you + pass just a name, it will raise a ``ValueError``. In that + case, add the namespace as a second argument. + + This function makes a single roundtrip to the server, plus at + most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param key: The name to fetch, or a tuple (name, namespace). + :return: An :class:`Entity` object. + :raises KeyError: Raised if *key* does not exist. + :raises ValueError: Raised if no namespace is specified and *key* + does not refer to a unique name. + + **Example**:: + + s = client.connect(...) + saved_searches = s.saved_searches + x1 = saved_searches.create( + 'mysearch', 'search * | head 1', + owner='admin', app='search', sharing='app') + x2 = saved_searches.create( + 'mysearch', 'search * | head 1', + owner='admin', app='search', sharing='user') + # Raises ValueError: + saved_searches['mysearch'] + # Fetches x1 + saved_searches[ + 'mysearch', + client.namespace(sharing='app', app='search')] + # Fetches x2 + saved_searches[ + 'mysearch', + client.namespace(sharing='user', owner='boris', app='search')] + """ + try: + if isinstance(key, tuple) and len(key) == 2: + # x[a,b] is translated to x.__getitem__( (a,b) ), so we + # have to extract values out. + key, ns = key + key = UrlEncoded(key, encode_slash=True) + response = self.get(key, owner=ns.owner, app=ns.app) + else: + key = UrlEncoded(key, encode_slash=True) + response = self.get(key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException( + f"Found multiple entities named '{key}'; please specify a namespace.") + if len(entries) == 0: + raise KeyError(key) + return entries[0] + except HTTPError as he: + if he.status == 404: # No entity matching key and namespace. + raise KeyError(key) + else: + raise + + def __iter__(self, **kwargs): + """Iterate over the entities in the collection. + + :param kwargs: Additional arguments. + :type kwargs: ``dict`` + :rtype: iterator over entities. + + Implemented to give Collection a listish interface. This + function always makes a roundtrip to the server, plus at most + two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + for entity in saved_searches: + print(f"Saved search named {entity.name}") + """ + + for item in self.iter(**kwargs): + yield item + + def __len__(self): + """Enable ``len(...)`` for ``Collection`` objects. + + Implemented for consistency with a listish interface. No + further failure modes beyond those possible for any method on + an Endpoint. + + This function always makes a round trip to the server, plus at + most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + n = len(saved_searches) + """ + return len(self.list()) + + def _entity_path(self, state): + """Calculate the path to an entity to be returned. + + *state* should be the dictionary returned by + :func:`_parse_atom_entry`. :func:`_entity_path` extracts the + link to this entity from *state*, and strips all the namespace + prefixes from it to leave only the relative path of the entity + itself, sans namespace. + + :rtype: ``string`` + :return: an absolute path + """ + # This has been factored out so that it can be easily + # overloaded by Configurations, which has to switch its + # entities' endpoints from its own properties/ to configs/. + raw_path = parse.unquote(state.links.alternate) + if 'servicesNS/' in raw_path: + return _trailing(raw_path, 'servicesNS/', '/', '/') + if 'services/' in raw_path: + return _trailing(raw_path, 'services/') + return raw_path + + def _load_list(self, response): + """Converts *response* to a list of entities. + + *response* is assumed to be a :class:`Record` containing an + HTTP response, of the form:: + + {'status': 200, + 'headers': [('content-length', '232642'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Tue, 29 May 2012 15:27:08 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'body': ...a stream implementing .read()...} + + The ``'body'`` key refers to a stream containing an Atom feed, + that is, an XML document with a toplevel element ````, + and within that element one or more ```` elements. + """ + # Some subclasses of Collection have to override this because + # splunkd returns something that doesn't match + # . + entries = _load_atom_entries(response) + if entries is None: return [] + entities = [] + for entry in entries: + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + self._entity_path(state), + state=state) + entities.append(entity) + + return entities + + def itemmeta(self): + """Returns metadata for members of the collection. + + Makes a single roundtrip to the server, plus two more at most if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :return: A :class:`splunklib.data.Record` object containing the metadata. + + **Example**:: + + import splunklib.client as client + import pprint + s = client.connect(...) + pprint.pprint(s.apps.itemmeta()) + {'access': {'app': 'search', + 'can_change_perms': '1', + 'can_list': '1', + 'can_share_app': '1', + 'can_share_global': '1', + 'can_share_user': '1', + 'can_write': '1', + 'modifiable': '1', + 'owner': 'admin', + 'perms': {'read': ['*'], 'write': ['admin']}, + 'removable': '0', + 'sharing': 'user'}, + 'fields': {'optional': ['author', + 'configured', + 'description', + 'label', + 'manageable', + 'template', + 'visible'], + 'required': ['name'], 'wildcard': []}} + """ + response = self.get("_new") + content = _load_atom(response, MATCH_ENTRY_CONTENT) + return _parse_atom_metadata(content) + + def iter(self, offset=0, count=None, pagesize=None, **kwargs): + """Iterates over the collection. + + This method is equivalent to the :meth:`list` method, but + it returns an iterator and can load a certain number of entities at a + time from the server. + + :param offset: The index of the first entity to return (optional). + :type offset: ``integer`` + :param count: The maximum number of entities to return (optional). + :type count: ``integer`` + :param pagesize: The number of entities to load (optional). + :type pagesize: ``integer`` + :param kwargs: Additional arguments (optional): + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + for saved_search in s.saved_searches.iter(pagesize=10): + # Loads 10 saved searches at a time from the + # server. + ... + """ + assert pagesize is None or pagesize > 0 + if count is None: + count = self.null_count + fetched = 0 + while count == self.null_count or fetched < count: + response = self.get(count=pagesize or count, offset=offset, **kwargs) + items = self._load_list(response) + N = len(items) + fetched += N + for item in items: + yield item + if pagesize is None or N < pagesize: + break + offset += N + logger.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) + + # kwargs: count, offset, search, sort_dir, sort_key, sort_mode + def list(self, count=None, **kwargs): + """Retrieves a list of entities in this collection. + + The entire collection is loaded at once and is returned as a list. This + function makes a single roundtrip to the server, plus at most two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + There is no caching--every call makes at least one round trip. + + :param count: The maximum number of entities to return (optional). + :type count: ``integer`` + :param kwargs: Additional arguments (optional): + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + :return: A ``list`` of entities. + """ + # response = self.get(count=count, **kwargs) + # return self._load_list(response) + return list(self.iter(count=count, **kwargs)) + + +class Collection(ReadOnlyCollection): + """A collection of entities. + + Splunk provides a number of different collections of distinct + entity types: applications, saved searches, fired alerts, and a + number of others. Each particular type is available separately + from the Splunk instance, and the entities of that type are + returned in a :class:`Collection`. + + The interface for :class:`Collection` does not quite match either + ``list`` or ``dict`` in Python, because there are enough semantic + mismatches with either to make its behavior surprising. A unique + element in a :class:`Collection` is defined by a string giving its + name plus namespace (although the namespace is optional if the name is + unique). + + **Example**:: + + import splunklib.client as client + service = client.connect(...) + mycollection = service.saved_searches + mysearch = mycollection['my_search', client.namespace(owner='boris', app='natasha', sharing='user')] + # Or if there is only one search visible named 'my_search' + mysearch = mycollection['my_search'] + + Similarly, ``name`` in ``mycollection`` works as you might expect (though + you cannot currently pass a namespace to the ``in`` operator), as does + ``len(mycollection)``. + + However, as an aggregate, :class:`Collection` behaves more like a + list. If you iterate over a :class:`Collection`, you get an + iterator over the entities, not the names and namespaces. + + **Example**:: + + for entity in mycollection: + assert isinstance(entity, client.Entity) + + Use the :meth:`create` and :meth:`delete` methods to create and delete + entities in this collection. To view the access control list and other + metadata of the collection, use the :meth:`ReadOnlyCollection.itemmeta` method. + + :class:`Collection` does no caching. Each call makes at least one + round trip to the server to fetch data. + """ + + def create(self, name, **params): + """Creates a new entity in this collection. + + This function makes either one or two roundtrips to the + server, depending on the type of entities in this + collection, plus at most two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param name: The name of the entity to create. + :type name: ``string`` + :param namespace: A namespace, as created by the :func:`splunklib.binding.namespace` + function (optional). You can also set ``owner``, ``app``, and + ``sharing`` in ``params``. + :type namespace: A :class:`splunklib.data.Record` object with keys ``owner``, ``app``, + and ``sharing``. + :param params: Additional entity-specific arguments (optional). + :type params: ``dict`` + :return: The new entity. + :rtype: A subclass of :class:`Entity`, chosen by :meth:`Collection.self.item`. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + applications = s.apps + new_app = applications.create("my_fake_app") + """ + if not isinstance(name, str): + raise InvalidNameException(f"{name} is not a valid name for an entity.") + if 'namespace' in params: + namespace = params.pop('namespace') + params['owner'] = namespace.owner + params['app'] = namespace.app + params['sharing'] = namespace.sharing + response = self.post(name=name, **params) + atom = _load_atom(response, XNAME_ENTRY) + if atom is None: + # This endpoint doesn't return the content of the new + # item. We have to go fetch it ourselves. + return self[name] + entry = atom.entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + self._entity_path(state), + state=state) + return entity + + def delete(self, name, **params): + """Deletes a specified entity from the collection. + + :param name: The name of the entity to delete. + :type name: ``string`` + :return: The collection. + :rtype: ``self`` + + This method is implemented for consistency with the REST API's DELETE + method. + + If there is no *name* entity on the server, a ``KeyError`` is + thrown. This function always makes a roundtrip to the server. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + saved_searches.create('my_saved_search', + 'search * | head 1') + assert 'my_saved_search' in saved_searches + saved_searches.delete('my_saved_search') + assert 'my_saved_search' not in saved_searches + """ + name = UrlEncoded(name, encode_slash=True) + if 'namespace' in params: + namespace = params.pop('namespace') + params['owner'] = namespace.owner + params['app'] = namespace.app + params['sharing'] = namespace.sharing + try: + self.service.delete(_path(self.path, name), **params) + except HTTPError as he: + # An HTTPError with status code 404 means that the entity + # has already been deleted, and we reraise it as a + # KeyError. + if he.status == 404: + raise KeyError(f"No such entity {name}") + else: + raise + return self + + def get(self, name="", owner=None, app=None, sharing=None, **query): + """Performs a GET request to the server on the collection. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (optional). + :type sharing: "global", "system", "app", or "user" + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + saved_searches = s.saved_searches + saved_searches.get("my/saved/search") == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + saved_searches.get('nonexistant/search') # raises HTTPError + s.logout() + saved_searches.get() # raises AuthenticationError + + """ + name = UrlEncoded(name, encode_slash=True) + return super().get(name, owner, app, sharing, **query) + + +class ConfigurationFile(Collection): + """This class contains all of the stanzas from one configuration file. + """ + + # __init__'s arguments must match those of an Entity, not a + # Collection, since it is being created as the elements of a + # Configurations, which is a Collection subclass. + def __init__(self, service, path, **kwargs): + Collection.__init__(self, service, path, item=Stanza) + self.name = kwargs['state']['title'] + + +class Configurations(Collection): + """This class provides access to the configuration files from this Splunk + instance. Retrieve this collection using :meth:`Service.confs`. + + Splunk's configuration is divided into files, and each file into + stanzas. This collection is unusual in that the values in it are + themselves collections of :class:`ConfigurationFile` objects. + """ + + def __init__(self, service): + Collection.__init__(self, service, PATH_PROPERTIES, item=ConfigurationFile) + if self.service.namespace.owner == '-' or self.service.namespace.app == '-': + raise ValueError("Configurations cannot have wildcards in namespace.") + + def __getitem__(self, key): + # The superclass implementation is designed for collections that contain + # entities. This collection (Configurations) contains collections + # (ConfigurationFile). + # + # The configurations endpoint returns multiple entities when we ask for a single file. + # This screws up the default implementation of __getitem__ from Collection, which thinks + # that multiple entities means a name collision, so we have to override it here. + try: + self.get(key) + return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) + except HTTPError as he: + if he.status == 404: # No entity matching key + raise KeyError(key) + else: + raise + + def __contains__(self, key): + # configs/conf-{name} never returns a 404. We have to post to properties/{name} + # in order to find out if a configuration exists. + try: + self.get(key) + return True + except HTTPError as he: + if he.status == 404: # No entity matching key + return False + raise + + def create(self, name): + """ Creates a configuration file named *name*. + + If there is already a configuration file with that name, + the existing file is returned. + + :param name: The name of the configuration file. + :type name: ``string`` + + :return: The :class:`ConfigurationFile` object. + """ + # This has to be overridden to handle the plumbing of creating + # a ConfigurationFile (which is a Collection) instead of some + # Entity. + if not isinstance(name, str): + raise ValueError(f"Invalid name: {repr(name)}") + response = self.post(__conf=name) + if response.status == 303: + return self[name] + if response.status == 201: + return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") + + def delete(self, key): + """Raises `IllegalOperationException`.""" + raise IllegalOperationException("Cannot delete configuration files from the REST API.") + + def _entity_path(self, state): + # Overridden to make all the ConfigurationFile objects + # returned refer to the configs/ path instead of the + # properties/ path used by Configrations. + return PATH_CONF % state['title'] + + +class Stanza(Entity): + """This class contains a single configuration stanza.""" + + def submit(self, stanza): + """Adds keys to the current configuration stanza as a + dictionary of key-value pairs. + + :param stanza: A dictionary of key-value pairs for the stanza. + :type stanza: ``dict`` + :return: The :class:`Stanza` object. + """ + body = _encode(**stanza) + self.service.post(self.path, body=body) + return self + + def __len__(self): + # The stanza endpoint returns all the keys at the same level in the XML as the eai information + # and 'disabled', so to get an accurate length, we have to filter those out and have just + # the stanza keys. + return len([x for x in self._state.content.keys() + if not x.startswith('eai') and x != 'disabled']) + + +class StoragePassword(Entity): + """This class contains a storage password. + """ + + def __init__(self, service, path, **kwargs): + state = kwargs.get('state', None) + kwargs['skip_refresh'] = kwargs.get('skip_refresh', state is not None) + super().__init__(service, path, **kwargs) + self._state = state + + @property + def clear_password(self): + return self.content.get('clear_password') + + @property + def encrypted_password(self): + return self.content.get('encr_password') + + @property + def realm(self): + return self.content.get('realm') + + @property + def username(self): + return self.content.get('username') + + +class StoragePasswords(Collection): + """This class provides access to the storage passwords from this Splunk + instance. Retrieve this collection using :meth:`Service.storage_passwords`. + """ + + def __init__(self, service): + if service.namespace.owner == '-' or service.namespace.app == '-': + raise ValueError("StoragePasswords cannot have wildcards in namespace.") + super().__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) + + def create(self, password, username, realm=None): + """ Creates a storage password. + + A `StoragePassword` can be identified by , or by : if the + optional realm parameter is also provided. + + :param password: The password for the credentials - this is the only part of the credentials that will be stored securely. + :type name: ``string`` + :param username: The username for the credentials. + :type name: ``string`` + :param realm: The credential realm. (optional) + :type name: ``string`` + + :return: The :class:`StoragePassword` object created. + """ + if not isinstance(username, str): + raise ValueError(f"Invalid name: {repr(username)}") + + if realm is None: + response = self.post(password=password, name=username) + else: + response = self.post(password=password, realm=realm, name=username) + + if response.status != 201: + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") + + entries = _load_atom_entries(response) + state = _parse_atom_entry(entries[0]) + storage_password = StoragePassword(self.service, self._entity_path(state), state=state, skip_refresh=True) + + return storage_password + + def delete(self, username, realm=None): + """Delete a storage password by username and/or realm. + + The identifier can be passed in through the username parameter as + or :, but the preferred way is by + passing in the username and realm parameters. + + :param username: The username for the credentials, or : if the realm parameter is omitted. + :type name: ``string`` + :param realm: The credential realm. (optional) + :type name: ``string`` + :return: The `StoragePassword` collection. + :rtype: ``self`` + """ + if realm is None: + # This case makes the username optional, so + # the full name can be passed in as realm. + # Assume it's already encoded. + name = username + else: + # Encode each component separately + name = UrlEncoded(realm, encode_slash=True) + ":" + UrlEncoded(username, encode_slash=True) + + # Append the : expected at the end of the name + if name[-1] != ":": + name = name + ":" + return Collection.delete(self, name) + + +class AlertGroup(Entity): + """This class represents a group of fired alerts for a saved search. Access + it using the :meth:`alerts` property.""" + + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def __len__(self): + return self.count + + @property + def alerts(self): + """Returns a collection of triggered alerts. + + :return: A :class:`Collection` of triggered alerts. + """ + return Collection(self.service, self.path) + + @property + def count(self): + """Returns the count of triggered alerts. + + :return: The triggered alert count. + :rtype: ``integer`` + """ + return int(self.content.get('triggered_alert_count', 0)) + + +class Indexes(Collection): + """This class contains the collection of indexes in this Splunk instance. + Retrieve this collection using :meth:`Service.indexes`. + """ + + def get_default(self): + """ Returns the name of the default index. + + :return: The name of the default index. + + """ + index = self['_audit'] + return index['defaultDatabase'] + + def delete(self, name): + """ Deletes a given index. + + **Note**: This method is only supported in Splunk 5.0 and later. + + :param name: The name of the index to delete. + :type name: ``string`` + """ + if self.service.splunk_version >= (5,): + Collection.delete(self, name) + else: + raise IllegalOperationException("Deleting indexes via the REST API is " + "not supported before Splunk version 5.") + + +class Index(Entity): + """This class represents an index and provides different operations, such as + cleaning the index, writing to the index, and so forth.""" + + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def attach(self, host=None, source=None, sourcetype=None): + """Opens a stream (a writable socket) for writing events to the index. + + :param host: The host value for events written to the stream. + :type host: ``string`` + :param source: The source value for events written to the stream. + :type source: ``string`` + :param sourcetype: The sourcetype value for events written to the + stream. + :type sourcetype: ``string`` + + :return: A writable socket. + """ + args = {'index': self.name} + if host is not None: args['host'] = host + if source is not None: args['source'] = source + if sourcetype is not None: args['sourcetype'] = sourcetype + path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + parse.urlencode(args), skip_encode=True) + + cookie_header = self.service.token if self.service.token is _NoAuthenticationToken else self.service.token.replace("Splunk ", "") + cookie_or_auth_header = f"Authorization: Splunk {cookie_header}\r\n" + + # If we have cookie(s), use them instead of "Authorization: ..." + if self.service.has_cookies(): + cookie_header = _make_cookie_header(self.service.get_cookies().items()) + cookie_or_auth_header = f"Cookie: {cookie_header}\r\n" + + # Since we need to stream to the index connection, we have to keep + # the connection open and use the Splunk extension headers to note + # the input mode + sock = self.service.connect() + headers = [f"POST {str(self.service._abspath(path))} HTTP/1.1\r\n".encode('utf-8'), + f"Host: {self.service.host}:{int(self.service.port)}\r\n".encode('utf-8'), + b"Accept-Encoding: identity\r\n", + cookie_or_auth_header.encode('utf-8'), + b"X-Splunk-Input-Mode: Streaming\r\n", + b"\r\n"] + + for h in headers: + sock.write(h) + return sock + + @contextlib.contextmanager + def attached_socket(self, *args, **kwargs): + """Opens a raw socket in a ``with`` block to write data to Splunk. + + The arguments are identical to those for :meth:`attach`. The socket is + automatically closed at the end of the ``with`` block, even if an + exception is raised in the block. + + :param host: The host value for events written to the stream. + :type host: ``string`` + :param source: The source value for events written to the stream. + :type source: ``string`` + :param sourcetype: The sourcetype value for events written to the + stream. + :type sourcetype: ``string`` + + :returns: Nothing. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + index = s.indexes['some_index'] + with index.attached_socket(sourcetype='test') as sock: + sock.send('Test event\\r\\n') + + """ + try: + sock = self.attach(*args, **kwargs) + yield sock + finally: + sock.shutdown(socket.SHUT_RDWR) + sock.close() + + def clean(self, timeout=60): + """Deletes the contents of the index. + + This method blocks until the index is empty, because it needs to restore + values at the end of the operation. + + :param timeout: The time-out period for the operation, in seconds (the + default is 60). + :type timeout: ``integer`` + + :return: The :class:`Index`. + """ + self.refresh() + + tds = self['maxTotalDataSizeMB'] + ftp = self['frozenTimePeriodInSecs'] + was_disabled_initially = self.disabled + try: + if not was_disabled_initially and self.service.splunk_version < (5,): + # Need to disable the index first on Splunk 4.x, + # but it doesn't work to disable it on 5.0. + self.disable() + self.update(maxTotalDataSizeMB=1, frozenTimePeriodInSecs=1) + self.roll_hot_buckets() + + # Wait until event count goes to 0. + start = datetime.now() + diff = timedelta(seconds=timeout) + while self.content.totalEventCount != '0' and datetime.now() < start + diff: + sleep(1) + self.refresh() + + if self.content.totalEventCount != '0': + raise OperationError( + f"Cleaning index {self.name} took longer than {timeout} seconds; timing out.") + finally: + # Restore original values + self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) + if not was_disabled_initially and self.service.splunk_version < (5,): + # Re-enable the index if it was originally enabled and we messed with it. + self.enable() + + return self + + def roll_hot_buckets(self): + """Performs rolling hot buckets for this index. + + :return: The :class:`Index`. + """ + self.post("roll-hot-buckets") + return self + + def submit(self, event, host=None, source=None, sourcetype=None): + """Submits a single event to the index using ``HTTP POST``. + + :param event: The event to submit. + :type event: ``string`` + :param `host`: The host value of the event. + :type host: ``string`` + :param `source`: The source value of the event. + :type source: ``string`` + :param `sourcetype`: The sourcetype value of the event. + :type sourcetype: ``string`` + + :return: The :class:`Index`. + """ + args = {'index': self.name} + if host is not None: args['host'] = host + if source is not None: args['source'] = source + if sourcetype is not None: args['sourcetype'] = sourcetype + + self.service.post(PATH_RECEIVERS_SIMPLE, body=event, **args) + return self + + # kwargs: host, host_regex, host_segment, rename-source, sourcetype + def upload(self, filename, **kwargs): + """Uploads a file for immediate indexing. + + **Note**: The file must be locally accessible from the server. + + :param filename: The name of the file to upload. The file can be a + plain, compressed, or archived file. + :type filename: ``string`` + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Index parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`Index`. + """ + kwargs['index'] = self.name + path = 'data/inputs/oneshot' + self.service.post(path, name=filename, **kwargs) + return self + + +class Input(Entity): + """This class represents a Splunk input. This class is the base for all + typed input classes and is also used when the client does not recognize an + input kind. + """ + + def __init__(self, service, path, kind=None, **kwargs): + # kind can be omitted (in which case it is inferred from the path) + # Otherwise, valid values are the paths from data/inputs ("udp", + # "monitor", "tcp/raw"), or two special cases: "tcp" (which is "tcp/raw") + # and "splunktcp" (which is "tcp/cooked"). + Entity.__init__(self, service, path, **kwargs) + if kind is None: + path_segments = path.split('/') + i = path_segments.index('inputs') + 1 + if path_segments[i] == 'tcp': + self.kind = path_segments[i] + '/' + path_segments[i + 1] + else: + self.kind = path_segments[i] + else: + self.kind = kind + + # Handle old input kind names. + if self.kind == 'tcp': + self.kind = 'tcp/raw' + if self.kind == 'splunktcp': + self.kind = 'tcp/cooked' + + def update(self, **kwargs): + """Updates the server with any changes you've made to the current input + along with any additional arguments you specify. + + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The input this method was called on. + :rtype: class:`Input` + """ + # UDP and TCP inputs require special handling due to their restrictToHost + # field. For all other inputs kinds, we can dispatch to the superclass method. + if self.kind not in ['tcp', 'splunktcp', 'tcp/raw', 'tcp/cooked', 'udp']: + return super().update(**kwargs) + else: + # The behavior of restrictToHost is inconsistent across input kinds and versions of Splunk. + # In Splunk 4.x, the name of the entity is only the port, independent of the value of + # restrictToHost. In Splunk 5.0 this changed so the name will be of the form :. + # In 5.0 and 5.0.1, if you don't supply the restrictToHost value on every update, it will + # remove the host restriction from the input. As of 5.0.2 you simply can't change restrictToHost + # on an existing input. + + # The logic to handle all these cases: + # - Throw an exception if the user tries to set restrictToHost on an existing input + # for *any* version of Splunk. + # - Set the existing restrictToHost value on the update args internally so we don't + # cause it to change in Splunk 5.0 and 5.0.1. + to_update = kwargs.copy() + + if 'restrictToHost' in kwargs: + raise IllegalOperationException("Cannot set restrictToHost on an existing input with the SDK.") + if 'restrictToHost' in self._state.content and self.kind != 'udp': + to_update['restrictToHost'] = self._state.content['restrictToHost'] + + # Do the actual update operation. + return super().update(**to_update) + + +# Inputs is a "kinded" collection, which is a heterogenous collection where +# each item is tagged with a kind, that provides a single merged view of all +# input kinds. +class Inputs(Collection): + """This class represents a collection of inputs. The collection is + heterogeneous and each member of the collection contains a *kind* property + that indicates the specific type of input. + Retrieve this collection using :meth:`Service.inputs`.""" + + def __init__(self, service, kindmap=None): + Collection.__init__(self, service, PATH_INPUTS, item=Input) + + def __getitem__(self, key): + # The key needed to retrieve the input needs it's parenthesis to be URL encoded + # based on the REST API for input + # + if isinstance(key, tuple) and len(key) == 2: + # Fetch a single kind + key, kind = key + key = UrlEncoded(key, encode_slash=True) + try: + response = self.get(self.kindpath(kind) + "/" + key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") + if len(entries) == 0: + raise KeyError((key, kind)) + return entries[0] + except HTTPError as he: + if he.status == 404: # No entity matching kind and key + raise KeyError((key, kind)) + else: + raise + else: + # Iterate over all the kinds looking for matches. + kind = None + candidate = None + key = UrlEncoded(key, encode_slash=True) + for kind in self.kinds: + try: + response = self.get(kind + "/" + key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") + if len(entries) == 0: + pass + else: + if candidate is not None: # Already found at least one candidate + raise AmbiguousReferenceException( + f"Found multiple inputs named {key}, please specify a kind") + candidate = entries[0] + except HTTPError as he: + if he.status == 404: + pass # Just carry on to the next kind. + else: + raise + if candidate is None: + raise KeyError(key) # Never found a match. + return candidate + + def __contains__(self, key): + if isinstance(key, tuple) and len(key) == 2: + # If we specify a kind, this will shortcut properly + try: + self.__getitem__(key) + return True + except KeyError: + return False + else: + # Without a kind, we want to minimize the number of round trips to the server, so we + # reimplement some of the behavior of __getitem__ in order to be able to stop searching + # on the first hit. + for kind in self.kinds: + try: + response = self.get(self.kindpath(kind) + "/" + key) + entries = self._load_list(response) + if len(entries) > 0: + return True + except HTTPError as he: + if he.status == 404: + pass # Just carry on to the next kind. + else: + raise + return False + + def create(self, name, kind, **kwargs): + """Creates an input of a specific kind in this collection, with any + arguments you specify. + + :param `name`: The input name. + :type name: ``string`` + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + :param `kwargs`: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + + :type kwargs: ``dict`` + + :return: The new :class:`Input`. + """ + kindpath = self.kindpath(kind) + self.post(kindpath, name=name, **kwargs) + + # If we created an input with restrictToHost set, then + # its path will be :, not just , + # and we have to adjust accordingly. + + # Url encodes the name of the entity. + name = UrlEncoded(name, encode_slash=True) + path = _path( + self.path + kindpath, + f"{kwargs['restrictToHost']}:{name}" if 'restrictToHost' in kwargs else name + ) + return Input(self.service, path, kind) + + def delete(self, name, kind=None): + """Removes an input from the collection. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + :param name: The name of the input to remove. + :type name: ``string`` + + :return: The :class:`Inputs` collection. + """ + if kind is None: + self.service.delete(self[name].path) + else: + self.service.delete(self[name, kind].path) + return self + + def itemmeta(self, kind): + """Returns metadata for the members of a given kind. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + + :return: The metadata. + :rtype: class:``splunklib.data.Record`` + """ + response = self.get(f"{self._kindmap[kind]}/_new") + content = _load_atom(response, MATCH_ENTRY_CONTENT) + return _parse_atom_metadata(content) + + def _get_kind_list(self, subpath=None): + if subpath is None: + subpath = [] + + kinds = [] + response = self.get('/'.join(subpath)) + content = _load_atom_entries(response) + for entry in content: + this_subpath = subpath + [entry.title] + # The "all" endpoint doesn't work yet. + # The "tcp/ssl" endpoint is not a real input collection. + if entry.title == 'all' or this_subpath == ['tcp', 'ssl']: + continue + if 'create' in [x.rel for x in entry.link]: + path = '/'.join(subpath + [entry.title]) + kinds.append(path) + else: + subkinds = self._get_kind_list(subpath + [entry.title]) + kinds.extend(subkinds) + return kinds + + @property + def kinds(self): + """Returns the input kinds on this Splunk instance. + + :return: The list of input kinds. + :rtype: ``list`` + """ + return self._get_kind_list() + + def kindpath(self, kind): + """Returns a path to the resources for a given input kind. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + + :return: The relative endpoint path. + :rtype: ``string`` + """ + if kind == 'tcp': + return UrlEncoded('tcp/raw', skip_encode=True) + if kind == 'splunktcp': + return UrlEncoded('tcp/cooked', skip_encode=True) + return UrlEncoded(kind, skip_encode=True) + + def list(self, *kinds, **kwargs): + """Returns a list of inputs that are in the :class:`Inputs` collection. + You can also filter by one or more input kinds. + + This function iterates over all possible inputs, regardless of any arguments you + specify. Because the :class:`Inputs` collection is the union of all the inputs of each + kind, this method implements parameters such as "count", "search", and so + on at the Python level once all the data has been fetched. The exception + is when you specify a single input kind, and then this method makes a single request + with the usual semantics for parameters. + + :param kinds: The input kinds to return (optional). + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kinds: ``string`` + :param kwargs: Additional arguments (optional): + + - "count" (``integer``): The maximum number of items to return. + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + + :return: A list of input kinds. + :rtype: ``list`` + """ + if len(kinds) == 0: + kinds = self.kinds + if len(kinds) == 1: + kind = kinds[0] + logger.debug("Inputs.list taking short circuit branch for single kind.") + path = self.kindpath(kind) + logger.debug("Path for inputs: %s", path) + try: + path = UrlEncoded(path, skip_encode=True) + response = self.get(path, **kwargs) + except HTTPError as he: + if he.status == 404: # No inputs of this kind + return [] + entities = [] + entries = _load_atom_entries(response) + if entries is None: + return [] # No inputs in a collection comes back with no feed or entry in the XML + for entry in entries: + state = _parse_atom_entry(entry) + # Unquote the URL, since all URL encoded in the SDK + # should be of type UrlEncoded, and all str should not + # be URL encoded. + path = parse.unquote(state.links.alternate) + entity = Input(self.service, path, kind, state=state) + entities.append(entity) + return entities + + search = kwargs.get('search', '*') + + entities = [] + for kind in kinds: + response = None + try: + kind = UrlEncoded(kind, skip_encode=True) + response = self.get(self.kindpath(kind), search=search) + except HTTPError as e: + if e.status == 404: + continue # No inputs of this kind + else: + raise + + entries = _load_atom_entries(response) + if entries is None: continue # No inputs to process + for entry in entries: + state = _parse_atom_entry(entry) + # Unquote the URL, since all URL encoded in the SDK + # should be of type UrlEncoded, and all str should not + # be URL encoded. + path = parse.unquote(state.links.alternate) + entity = Input(self.service, path, kind, state=state) + entities.append(entity) + if 'offset' in kwargs: + entities = entities[kwargs['offset']:] + if 'count' in kwargs: + entities = entities[:kwargs['count']] + if kwargs.get('sort_mode', None) == 'alpha': + sort_field = kwargs.get('sort_field', 'name') + if sort_field == 'name': + f = lambda x: x.name.lower() + else: + f = lambda x: x[sort_field].lower() + entities = sorted(entities, key=f) + if kwargs.get('sort_mode', None) == 'alpha_case': + sort_field = kwargs.get('sort_field', 'name') + if sort_field == 'name': + f = lambda x: x.name + else: + f = lambda x: x[sort_field] + entities = sorted(entities, key=f) + if kwargs.get('sort_dir', 'asc') == 'desc': + entities = list(reversed(entities)) + return entities + + def __iter__(self, **kwargs): + for item in self.iter(**kwargs): + yield item + + def iter(self, **kwargs): + """ Iterates over the collection of inputs. + + :param kwargs: Additional arguments (optional): + + - "count" (``integer``): The maximum number of items to return. + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + """ + for item in self.list(**kwargs): + yield item + + def oneshot(self, path, **kwargs): + """ Creates a oneshot data input, which is an upload of a single file + for one-time indexing. + + :param path: The path and filename. + :type path: ``string`` + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + """ + self.post('oneshot', name=path, **kwargs) + + +class Job(Entity): + """This class represents a search job.""" + + def __init__(self, service, sid, **kwargs): + # Default to v2 in Splunk Version 9+ + path = "{path}{sid}" + # Formatting path based on the Splunk Version + if service.disable_v2_api: + path = path.format(path=PATH_JOBS, sid=sid) + else: + path = path.format(path=PATH_JOBS_V2, sid=sid) + + Entity.__init__(self, service, path, skip_refresh=True, **kwargs) + self.sid = sid + + # The Job entry record is returned at the root of the response + def _load_atom_entry(self, response): + return _load_atom(response).entry + + def cancel(self): + """Stops the current search and deletes the results cache. + + :return: The :class:`Job`. + """ + try: + self.post("control", action="cancel") + except HTTPError as he: + if he.status == 404: + # The job has already been cancelled, so + # cancelling it twice is a nop. + pass + else: + raise + return self + + def disable_preview(self): + """Disables preview for this job. + + :return: The :class:`Job`. + """ + self.post("control", action="disablepreview") + return self + + def enable_preview(self): + """Enables preview for this job. + + **Note**: Enabling preview might slow search considerably. + + :return: The :class:`Job`. + """ + self.post("control", action="enablepreview") + return self + + def events(self, **kwargs): + """Returns a streaming handle to this job's events. + + :param kwargs: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/events + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's events. + """ + kwargs['segmentation'] = kwargs.get('segmentation', 'none') + + # Search API v1(GET) and v2(POST) + if self.service.disable_v2_api: + return self.get("events", **kwargs).body + return self.post("events", **kwargs).body + + def finalize(self): + """Stops the job and provides intermediate results for retrieval. + + :return: The :class:`Job`. + """ + self.post("control", action="finalize") + return self + + def is_done(self): + """Indicates whether this job finished running. + + :return: ``True`` if the job is done, ``False`` if not. + :rtype: ``boolean`` + """ + if not self.is_ready(): + return False + done = (self._state.content['isDone'] == '1') + return done + + def is_ready(self): + """Indicates whether this job is ready for querying. + + :return: ``True`` if the job is ready, ``False`` if not. + :rtype: ``boolean`` + + """ + response = self.get() + if response.status == 204: + return False + self._state = self.read(response) + ready = self._state.content['dispatchState'] not in ['QUEUED', 'PARSING'] + return ready + + @property + def name(self): + """Returns the name of the search job, which is the search ID (SID). + + :return: The search ID. + :rtype: ``string`` + """ + return self.sid + + def pause(self): + """Suspends the current search. + + :return: The :class:`Job`. + """ + self.post("control", action="pause") + return self + + def results(self, **query_params): + """Returns a streaming handle to this job's search results. To get a nice, Pythonic iterator, pass the handle + to :class:`splunklib.results.JSONResultsReader` along with the query param "output_mode='json'", as in:: + + import splunklib.client as client + import splunklib.results as results + from time import sleep + service = client.connect(...) + job = service.jobs.create("search * | head 5") + while not job.is_done(): + sleep(.2) + rr = results.JSONResultsReader(job.results(output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + assert rr.is_preview == False + + Results are not available until the job has finished. If called on + an unfinished job, the result is an empty event set. + + This method makes a single roundtrip + to the server, plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param query_params: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/results + `_. + :type query_params: ``dict`` + + :return: The ``InputStream`` IO handle to this job's results. + """ + query_params['segmentation'] = query_params.get('segmentation', 'none') + + # Search API v1(GET) and v2(POST) + if self.service.disable_v2_api: + return self.get("results", **query_params).body + return self.post("results", **query_params).body + + def preview(self, **query_params): + """Returns a streaming handle to this job's preview search results. + + Unlike :class:`splunklib.results.JSONResultsReader`along with the query param "output_mode='json'", + which requires a job to be finished to return any results, the ``preview`` method returns any results that + have been generated so far, whether the job is running or not. The returned search results are the raw data + from the server. Pass the handle returned to :class:`splunklib.results.JSONResultsReader` to get a nice, + Pythonic iterator over objects, as in:: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + job = service.jobs.create("search * | head 5") + rr = results.JSONResultsReader(job.preview(output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + if rr.is_preview: + print("Preview of a running search job.") + else: + print("Job is finished. Results are final.") + + This method makes one roundtrip to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param query_params: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/results_preview + `_ + in the REST API documentation. + :type query_params: ``dict`` + + :return: The ``InputStream`` IO handle to this job's preview results. + """ + query_params['segmentation'] = query_params.get('segmentation', 'none') + + # Search API v1(GET) and v2(POST) + if self.service.disable_v2_api: + return self.get("results_preview", **query_params).body + return self.post("results_preview", **query_params).body + + def searchlog(self, **kwargs): + """Returns a streaming handle to this job's search log. + + :param `kwargs`: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/search.log + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's search log. + """ + return self.get("search.log", **kwargs).body + + def set_priority(self, value): + """Sets this job's search priority in the range of 0-10. + + Higher numbers indicate higher priority. Unless splunkd is + running as *root*, you can only decrease the priority of a running job. + + :param `value`: The search priority. + :type value: ``integer`` + + :return: The :class:`Job`. + """ + self.post('control', action="setpriority", priority=value) + return self + + def summary(self, **kwargs): + """Returns a streaming handle to this job's summary. + + :param `kwargs`: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/summary + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's summary. + """ + return self.get("summary", **kwargs).body + + def timeline(self, **kwargs): + """Returns a streaming handle to this job's timeline results. + + :param `kwargs`: Additional timeline arguments (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/timeline + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's timeline. + """ + return self.get("timeline", **kwargs).body + + def touch(self): + """Extends the expiration time of the search to the current time (now) plus + the time-to-live (ttl) value. + + :return: The :class:`Job`. + """ + self.post("control", action="touch") + return self + + def set_ttl(self, value): + """Set the job's time-to-live (ttl) value, which is the time before the + search job expires and is still available. + + :param `value`: The ttl value, in seconds. + :type value: ``integer`` + + :return: The :class:`Job`. + """ + self.post("control", action="setttl", ttl=value) + return self + + def unpause(self): + """Resumes the current search, if paused. + + :return: The :class:`Job`. + """ + self.post("control", action="unpause") + return self + + +class Jobs(Collection): + """This class represents a collection of search jobs. Retrieve this + collection using :meth:`Service.jobs`.""" + + def __init__(self, service): + # Splunk 9 introduces the v2 endpoint + if not service.disable_v2_api: + path = PATH_JOBS_V2 + else: + path = PATH_JOBS + Collection.__init__(self, service, path, item=Job) + # The count value to say list all the contents of this + # Collection is 0, not -1 as it is on most. + self.null_count = 0 + + def _load_list(self, response): + # Overridden because Job takes a sid instead of a path. + entries = _load_atom_entries(response) + if entries is None: return [] + entities = [] + for entry in entries: + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + entry['content']['sid'], + state=state) + entities.append(entity) + return entities + + def create(self, query, **kwargs): + """ Creates a search using a search query and any additional parameters + you provide. + + :param query: The search query. + :type query: ``string`` + :param kwargs: Additiona parameters (optional). For a list of available + parameters, see `Search job parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`Job`. + """ + if kwargs.get("exec_mode", None) == "oneshot": + raise TypeError("Cannot specify exec_mode=oneshot; use the oneshot method instead.") + response = self.post(search=query, **kwargs) + sid = _load_sid(response, kwargs.get("output_mode", None)) + return Job(self.service, sid) + + def export(self, query, **params): + """Runs a search and immediately starts streaming preview events. This method returns a streaming handle to + this job's events as an XML document from the server. To parse this stream into usable Python objects, + pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'":: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + rr = results.JSONResultsReader(service.jobs.export("search * | head 5",output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + assert rr.is_preview == False + + Running an export search is more efficient as it streams the results + directly to you, rather than having to write them out to disk and make + them available later. As soon as results are ready, you will receive + them. + + The ``export`` method makes a single roundtrip to the server (as opposed + to two for :meth:`create` followed by :meth:`preview`), plus at most two + more if the ``autologin`` field of :func:`connect` is set to ``True``. + + :raises `ValueError`: Raised for invalid queries. + :param query: The search query. + :type query: ``string`` + :param params: Additional arguments (optional). For a list of valid + parameters, see `GET search/jobs/export + `_ + in the REST API documentation. + :type params: ``dict`` + + :return: The ``InputStream`` IO handle to raw XML returned from the server. + """ + if "exec_mode" in params: + raise TypeError("Cannot specify an exec_mode to export.") + params['segmentation'] = params.get('segmentation', 'none') + return self.post(path_segment="export", + search=query, + **params).body + + def itemmeta(self): + """There is no metadata available for class:``Jobs``. + + Any call to this method raises a class:``NotSupportedError``. + + :raises: class:``NotSupportedError`` + """ + raise NotSupportedError() + + def oneshot(self, query, **params): + """Run a oneshot search and returns a streaming handle to the results. + + The ``InputStream`` object streams fragments from the server. To parse this stream into usable Python + objects, pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'" :: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5",output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + assert rr.is_preview == False + + The ``oneshot`` method makes a single roundtrip to the server (as opposed + to two for :meth:`create` followed by :meth:`results`), plus at most two more + if the ``autologin`` field of :func:`connect` is set to ``True``. + + :raises ValueError: Raised for invalid queries. + + :param query: The search query. + :type query: ``string`` + :param params: Additional arguments (optional): + + - "output_mode": Specifies the output format of the results (XML, + JSON, or CSV). + + - "earliest_time": Specifies the earliest time in the time range to + search. The time string can be a UTC time (with fractional seconds), + a relative time specifier (to now), or a formatted time string. + + - "latest_time": Specifies the latest time in the time range to + search. The time string can be a UTC time (with fractional seconds), + a relative time specifier (to now), or a formatted time string. + + - "rf": Specifies one or more fields to add to the search. + + :type params: ``dict`` + + :return: The ``InputStream`` IO handle to raw XML returned from the server. + """ + if "exec_mode" in params: + raise TypeError("Cannot specify an exec_mode to oneshot.") + params['segmentation'] = params.get('segmentation', 'none') + return self.post(search=query, + exec_mode="oneshot", + **params).body + + +class Loggers(Collection): + """This class represents a collection of service logging categories. + Retrieve this collection using :meth:`Service.loggers`.""" + + def __init__(self, service): + Collection.__init__(self, service, PATH_LOGGER) + + def itemmeta(self): + """There is no metadata available for class:``Loggers``. + + Any call to this method raises a class:``NotSupportedError``. + + :raises: class:``NotSupportedError`` + """ + raise NotSupportedError() + + +class Message(Entity): + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + @property + def value(self): + """Returns the message value. + + :return: The message value. + :rtype: ``string`` + """ + return self[self.name] + + +class ModularInputKind(Entity): + """This class contains the different types of modular inputs. Retrieve this + collection using :meth:`Service.modular_input_kinds`. + """ + + def __contains__(self, name): + args = self.state.content['endpoints']['args'] + if name in args: + return True + return Entity.__contains__(self, name) + + def __getitem__(self, name): + args = self.state.content['endpoint']['args'] + if name in args: + return args['item'] + return Entity.__getitem__(self, name) + + @property + def arguments(self): + """A dictionary of all the arguments supported by this modular input kind. + + The keys in the dictionary are the names of the arguments. The values are + another dictionary giving the metadata about that argument. The possible + keys in that dictionary are ``"title"``, ``"description"``, ``"required_on_create``", + ``"required_on_edit"``, ``"data_type"``. Each value is a string. It should be one + of ``"true"`` or ``"false"`` for ``"required_on_create"`` and ``"required_on_edit"``, + and one of ``"boolean"``, ``"string"``, or ``"number``" for ``"data_type"``. + + :return: A dictionary describing the arguments this modular input kind takes. + :rtype: ``dict`` + """ + return self.state.content['endpoint']['args'] + + def update(self, **kwargs): + """Raises an error. Modular input kinds are read only.""" + raise IllegalOperationException("Modular input kinds cannot be updated via the REST API.") + + +class SavedSearch(Entity): + """This class represents a saved search.""" + + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def acknowledge(self): + """Acknowledges the suppression of alerts from this saved search and + resumes alerting. + + :return: The :class:`SavedSearch`. + """ + self.post("acknowledge") + return self + + @property + def alert_count(self): + """Returns the number of alerts fired by this saved search. + + :return: The number of alerts fired by this saved search. + :rtype: ``integer`` + """ + return int(self._state.content.get('triggered_alert_count', 0)) + + def dispatch(self, **kwargs): + """Runs the saved search and returns the resulting search job. + + :param `kwargs`: Additional dispatch arguments (optional). For details, + see the `POST saved/searches/{name}/dispatch + `_ + endpoint in the REST API documentation. + :type kwargs: ``dict`` + :return: The :class:`Job`. + """ + response = self.post("dispatch", **kwargs) + sid = _load_sid(response, kwargs.get("output_mode", None)) + return Job(self.service, sid) + + @property + def fired_alerts(self): + """Returns the collection of fired alerts (a fired alert group) + corresponding to this saved search's alerts. + + :raises IllegalOperationException: Raised when the search is not scheduled. + + :return: A collection of fired alerts. + :rtype: :class:`AlertGroup` + """ + if self['is_scheduled'] == '0': + raise IllegalOperationException('Unscheduled saved searches have no alerts.') + c = Collection( + self.service, + self.service._abspath(PATH_FIRED_ALERTS + self.name, + owner=self._state.access.owner, + app=self._state.access.app, + sharing=self._state.access.sharing), + item=AlertGroup) + return c + + def history(self, **kwargs): + """Returns a list of search jobs corresponding to this saved search. + + :param `kwargs`: Additional arguments (optional). + :type kwargs: ``dict`` + + :return: A list of :class:`Job` objects. + """ + response = self.get("history", **kwargs) + entries = _load_atom_entries(response) + if entries is None: return [] + jobs = [] + for entry in entries: + job = Job(self.service, entry.title) + jobs.append(job) + return jobs + + def update(self, search=None, **kwargs): + """Updates the server with any changes you've made to the current saved + search along with any additional arguments you specify. + + :param `search`: The search query (optional). + :type search: ``string`` + :param `kwargs`: Additional arguments (optional). For a list of available + parameters, see `Saved search parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`SavedSearch`. + """ + # Updates to a saved search *require* that the search string be + # passed, so we pass the current search string if a value wasn't + # provided by the caller. + if search is None: search = self.content.search + Entity.update(self, search=search, **kwargs) + return self + + def scheduled_times(self, earliest_time='now', latest_time='+1h'): + """Returns the times when this search is scheduled to run. + + By default this method returns the times in the next hour. For different + time ranges, set *earliest_time* and *latest_time*. For example, + for all times in the last day use "earliest_time=-1d" and + "latest_time=now". + + :param earliest_time: The earliest time. + :type earliest_time: ``string`` + :param latest_time: The latest time. + :type latest_time: ``string`` + + :return: The list of search times. + """ + response = self.get("scheduled_times", + earliest_time=earliest_time, + latest_time=latest_time) + data = self._load_atom_entry(response) + rec = _parse_atom_entry(data) + times = [datetime.fromtimestamp(int(t)) + for t in rec.content.scheduled_times] + return times + + def suppress(self, expiration): + """Skips any scheduled runs of this search in the next *expiration* + number of seconds. + + :param expiration: The expiration period, in seconds. + :type expiration: ``integer`` + + :return: The :class:`SavedSearch`. + """ + self.post("suppress", expiration=expiration) + return self + + @property + def suppressed(self): + """Returns the number of seconds that this search is blocked from running + (possibly 0). + + :return: The number of seconds. + :rtype: ``integer`` + """ + r = self._run_action("suppress") + if r.suppressed == "1": + return int(r.expiration) + return 0 + + def unsuppress(self): + """Cancels suppression and makes this search run as scheduled. + + :return: The :class:`SavedSearch`. + """ + self.post("suppress", expiration="0") + return self + + +class SavedSearches(Collection): + """This class represents a collection of saved searches. Retrieve this + collection using :meth:`Service.saved_searches`.""" + + def __init__(self, service): + Collection.__init__( + self, service, PATH_SAVED_SEARCHES, item=SavedSearch) + + def create(self, name, search, **kwargs): + """ Creates a saved search. + + :param name: The name for the saved search. + :type name: ``string`` + :param search: The search query. + :type search: ``string`` + :param kwargs: Additional arguments (optional). For a list of available + parameters, see `Saved search parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + :return: The :class:`SavedSearches` collection. + """ + return Collection.create(self, name, search=search, **kwargs) + + +class Settings(Entity): + """This class represents configuration settings for a Splunk service. + Retrieve this collection using :meth:`Service.settings`.""" + + def __init__(self, service, **kwargs): + Entity.__init__(self, service, "/services/server/settings", **kwargs) + + # Updates on the settings endpoint are POSTed to server/settings/settings. + def update(self, **kwargs): + """Updates the settings on the server using the arguments you provide. + + :param kwargs: Additional arguments. For a list of valid arguments, see + `POST server/settings/{name} + `_ + in the REST API documentation. + :type kwargs: ``dict`` + :return: The :class:`Settings` collection. + """ + self.service.post("/services/server/settings/settings", **kwargs) + return self + + +class User(Entity): + """This class represents a Splunk user. + """ + + @property + def role_entities(self): + """Returns a list of roles assigned to this user. + + :return: The list of roles. + :rtype: ``list`` + """ + all_role_names = [r.name for r in self.service.roles.list()] + return [self.service.roles[name] for name in self.content.roles if name in all_role_names] + + +# Splunk automatically lowercases new user names so we need to match that +# behavior here to ensure that the subsequent member lookup works correctly. +class Users(Collection): + """This class represents the collection of Splunk users for this instance of + Splunk. Retrieve this collection using :meth:`Service.users`. + """ + + def __init__(self, service): + Collection.__init__(self, service, PATH_USERS, item=User) + + def __getitem__(self, key): + return Collection.__getitem__(self, key.lower()) + + def __contains__(self, name): + return Collection.__contains__(self, name.lower()) + + def create(self, username, password, roles, **params): + """Creates a new user. + + This function makes two roundtrips to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param username: The username. + :type username: ``string`` + :param password: The password. + :type password: ``string`` + :param roles: A single role or list of roles for the user. + :type roles: ``string`` or ``list`` + :param params: Additional arguments (optional). For a list of available + parameters, see `User authentication parameters + `_ + on Splunk Developer Portal. + :type params: ``dict`` + + :return: The new user. + :rtype: :class:`User` + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + users = c.users + boris = users.create("boris", "securepassword", roles="user") + hilda = users.create("hilda", "anotherpassword", roles=["user","power"]) + """ + if not isinstance(username, str): + raise ValueError(f"Invalid username: {str(username)}") + username = username.lower() + self.post(name=username, password=password, roles=roles, **params) + # splunkd doesn't return the user in the POST response body, + # so we have to make a second round trip to fetch it. + response = self.get(username) + entry = _load_atom(response, XNAME_ENTRY).entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + parse.unquote(state.links.alternate), + state=state) + return entity + + def delete(self, name): + """ Deletes the user and returns the resulting collection of users. + + :param name: The name of the user to delete. + :type name: ``string`` + + :return: + :rtype: :class:`Users` + """ + return Collection.delete(self, name.lower()) + + +class Role(Entity): + """This class represents a user role. + """ + + def grant(self, *capabilities_to_grant): + """Grants additional capabilities to this role. + + :param capabilities_to_grant: Zero or more capabilities to grant this + role. For a list of capabilities, see + `Capabilities `_ + on Splunk Developer Portal. + :type capabilities_to_grant: ``string`` or ``list`` + :return: The :class:`Role`. + + **Example**:: + + service = client.connect(...) + role = service.roles['somerole'] + role.grant('change_own_password', 'search') + """ + possible_capabilities = self.service.capabilities + for capability in capabilities_to_grant: + if capability not in possible_capabilities: + raise NoSuchCapability(capability) + new_capabilities = self['capabilities'] + list(capabilities_to_grant) + self.post(capabilities=new_capabilities) + return self + + def revoke(self, *capabilities_to_revoke): + """Revokes zero or more capabilities from this role. + + :param capabilities_to_revoke: Zero or more capabilities to grant this + role. For a list of capabilities, see + `Capabilities `_ + on Splunk Developer Portal. + :type capabilities_to_revoke: ``string`` or ``list`` + + :return: The :class:`Role`. + + **Example**:: + + service = client.connect(...) + role = service.roles['somerole'] + role.revoke('change_own_password', 'search') + """ + possible_capabilities = self.service.capabilities + for capability in capabilities_to_revoke: + if capability not in possible_capabilities: + raise NoSuchCapability(capability) + old_capabilities = self['capabilities'] + new_capabilities = [] + for c in old_capabilities: + if c not in capabilities_to_revoke: + new_capabilities.append(c) + if not new_capabilities: + new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. + self.post(capabilities=new_capabilities) + return self + + +class Roles(Collection): + """This class represents the collection of roles in the Splunk instance. + Retrieve this collection using :meth:`Service.roles`.""" + + def __init__(self, service): + Collection.__init__(self, service, PATH_ROLES, item=Role) + + def __getitem__(self, key): + return Collection.__getitem__(self, key.lower()) + + def __contains__(self, name): + return Collection.__contains__(self, name.lower()) + + def create(self, name, **params): + """Creates a new role. + + This function makes two roundtrips to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param name: Name for the role. + :type name: ``string`` + :param params: Additional arguments (optional). For a list of available + parameters, see `Roles parameters + `_ + on Splunk Developer Portal. + :type params: ``dict`` + + :return: The new role. + :rtype: :class:`Role` + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + roles = c.roles + paltry = roles.create("paltry", imported_roles="user", defaultApp="search") + """ + if not isinstance(name, str): + raise ValueError(f"Invalid role name: {str(name)}") + name = name.lower() + self.post(name=name, **params) + # splunkd doesn't return the user in the POST response body, + # so we have to make a second round trip to fetch it. + response = self.get(name) + entry = _load_atom(response, XNAME_ENTRY).entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + parse.unquote(state.links.alternate), + state=state) + return entity + + def delete(self, name): + """ Deletes the role and returns the resulting collection of roles. + + :param name: The name of the role to delete. + :type name: ``string`` + + :rtype: The :class:`Roles` + """ + return Collection.delete(self, name.lower()) + + +class Application(Entity): + """Represents a locally-installed Splunk app.""" + + @property + def setupInfo(self): + """Returns the setup information for the app. + + :return: The setup information. + """ + return self.content.get('eai:setup', None) + + def package(self): + """ Creates a compressed package of the app for archiving.""" + return self._run_action("package") + + def updateInfo(self): + """Returns any update information that is available for the app.""" + return self._run_action("update") + + +class KVStoreCollections(Collection): + def __init__(self, service): + Collection.__init__(self, service, 'storage/collections/config', item=KVStoreCollection) + + def __getitem__(self, item): + res = Collection.__getitem__(self, item) + for k, v in res.content.items(): + if "accelerated_fields" in k: + res.content[k] = json.loads(v) + return res + + def create(self, name, accelerated_fields={}, fields={}, **kwargs): + """Creates a KV Store Collection. + + :param name: name of collection to create + :type name: ``string`` + :param accelerated_fields: dictionary of accelerated_fields definitions + :type accelerated_fields: ``dict`` + :param fields: dictionary of field definitions + :type fields: ``dict`` + :param kwargs: a dictionary of additional parameters specifying indexes and field definitions + :type kwargs: ``dict`` + + :return: Result of POST request + """ + for k, v in accelerated_fields.items(): + if isinstance(v, dict): + v = json.dumps(v) + kwargs['accelerated_fields.' + k] = v + for k, v in fields.items(): + kwargs['field.' + k] = v + return self.post(name=name, **kwargs) + + +class KVStoreCollection(Entity): + @property + def data(self): + """Returns data object for this Collection. + + :rtype: :class:`KVStoreCollectionData` + """ + return KVStoreCollectionData(self) + + def update_accelerated_field(self, name, value): + """Changes the definition of a KV Store accelerated_field. + + :param name: name of accelerated_fields to change + :type name: ``string`` + :param value: new accelerated_fields definition + :type value: ``dict`` + + :return: Result of POST request + """ + kwargs = {} + kwargs['accelerated_fields.' + name] = json.dumps(value) if isinstance(value, dict) else value + return self.post(**kwargs) + + def update_field(self, name, value): + """Changes the definition of a KV Store field. + + :param name: name of field to change + :type name: ``string`` + :param value: new field definition + :type value: ``string`` + + :return: Result of POST request + """ + kwargs = {} + kwargs['field.' + name] = value + return self.post(**kwargs) + + +class KVStoreCollectionData: + """This class represents the data endpoint for a KVStoreCollection. + + Retrieve using :meth:`KVStoreCollection.data` + """ + JSON_HEADER = [('Content-Type', 'application/json')] + + def __init__(self, collection): + self.service = collection.service + self.collection = collection + self.owner, self.app, self.sharing = collection._proper_namespace() + self.path = 'storage/collections/data/' + UrlEncoded(self.collection.name, encode_slash=True) + '/' + + def _get(self, url, **kwargs): + return self.service.get(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def _post(self, url, **kwargs): + return self.service.post(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def _delete(self, url, **kwargs): + return self.service.delete(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def query(self, **query): + """ + Gets the results of query, with optional parameters sort, limit, skip, and fields. + + :param query: Optional parameters. Valid options are sort, limit, skip, and fields + :type query: ``dict`` + + :return: Array of documents retrieved by query. + :rtype: ``array`` + """ + + for key, value in query.items(): + if isinstance(query[key], dict): + query[key] = json.dumps(value) + + return json.loads(self._get('', **query).body.read().decode('utf-8')) + + def query_by_id(self, id): + """ + Returns object with _id = id. + + :param id: Value for ID. If not a string will be coerced to string. + :type id: ``string`` + + :return: Document with id + :rtype: ``dict`` + """ + return json.loads(self._get(UrlEncoded(str(id), encode_slash=True)).body.read().decode('utf-8')) + + def insert(self, data): + """ + Inserts item into this collection. An _id field will be generated if not assigned in the data. + + :param data: Document to insert + :type data: ``string`` + + :return: _id of inserted object + :rtype: ``dict`` + """ + if isinstance(data, dict): + data = json.dumps(data) + return json.loads( + self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + + def delete(self, query=None): + """ + Deletes all data in collection if query is absent. Otherwise, deletes all data matched by query. + + :param query: Query to select documents to delete + :type query: ``string`` + + :return: Result of DELETE request + """ + return self._delete('', **({'query': query}) if query else {}) + + def delete_by_id(self, id): + """ + Deletes document that has _id = id. + + :param id: id of document to delete + :type id: ``string`` + + :return: Result of DELETE request + """ + return self._delete(UrlEncoded(str(id), encode_slash=True)) + + def update(self, id, data): + """ + Replaces document with _id = id with data. + + :param id: _id of document to update + :type id: ``string`` + :param data: the new document to insert + :type data: ``string`` + + :return: id of replaced document + :rtype: ``dict`` + """ + if isinstance(data, dict): + data = json.dumps(data) + return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, + body=data).body.read().decode('utf-8')) + + def batch_find(self, *dbqueries): + """ + Returns array of results from queries dbqueries. + + :param dbqueries: Array of individual queries as dictionaries + :type dbqueries: ``array`` of ``dict`` + + :return: Results of each query + :rtype: ``array`` of ``array`` + """ + if len(dbqueries) < 1: + raise Exception('Must have at least one query.') + + data = json.dumps(dbqueries) + + return json.loads( + self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + + def batch_save(self, *documents): + """ + Inserts or updates every document specified in documents. + + :param documents: Array of documents to save as dictionaries + :type documents: ``array`` of ``dict`` + + :return: Results of update operation as overall stats + :rtype: ``dict`` + """ + if len(documents) < 1: + raise Exception('Must have at least one document.') + + data = json.dumps(documents) + + return json.loads( + self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 55e67b61..16999a2a 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -1,1143 +1,1143 @@ -# coding=utf-8 -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Absolute imports - -import csv -import io -import os -import re -import sys -import tempfile -import traceback -from collections import namedtuple, OrderedDict -from copy import deepcopy -from io import StringIO -from itertools import chain, islice -from logging import _nameToLevel as _levelNames, getLevelName, getLogger -from shutil import make_archive -from time import time -from urllib.parse import unquote -from urllib.parse import urlsplit -from warnings import warn -from xml.etree import ElementTree -from splunklib.utils import ensure_str - - -# Relative imports -import splunklib -from . import Boolean, Option, environment -from .internals import ( - CommandLineParser, - CsvDialect, - InputHeader, - Message, - MetadataDecoder, - MetadataEncoder, - ObjectView, - Recorder, - RecordWriterV1, - RecordWriterV2, - json_encode_string) -from ..client import Service - - -# ---------------------------------------------------------------------------------------------------------------------- - -# P1 [ ] TODO: Log these issues against ChunkedExternProcessor -# -# 1. Implement requires_preop configuration setting. -# This configuration setting is currently rejected by ChunkedExternProcessor. -# -# 2. Rename type=events as type=eventing for symmetry with type=reporting and type=streaming -# Eventing commands process records on the events pipeline. This change effects ChunkedExternProcessor.cpp, -# eventing_command.py, and generating_command.py. -# -# 3. For consistency with SCPV1, commands.conf should not require filename setting when chunked = true -# The SCPV1 processor uses .py as the default filename. The ChunkedExternProcessor should do the same. - -# P1 [ ] TODO: Verify that ChunkedExternProcessor complains if a streaming_preop has a type other than 'streaming' -# It once looked like sending type='reporting' for the streaming_preop was accepted. - -# ---------------------------------------------------------------------------------------------------------------------- - -# P2 [ ] TODO: Consider bumping None formatting up to Option.Item.__str__ - - -class SearchCommand: - """ Represents a custom search command. - - """ - - def __init__(self): - - # Variables that may be used, but not altered by derived classes - - class_name = self.__class__.__name__ - - self._logger, self._logging_configuration = getLogger(class_name), environment.logging_configuration - - # Variables backing option/property values - - self._configuration = self.ConfigurationSettings(self) - self._input_header = InputHeader() - self._fieldnames = None - self._finished = None - self._metadata = None - self._options = None - self._protocol_version = None - self._search_results_info = None - self._service = None - - # Internal variables - - self._default_logging_level = self._logger.level - self._record_writer = None - self._records = None - self._allow_empty_input = True - - def __str__(self): - text = ' '.join(chain((type(self).name, str(self.options)), [] if self.fieldnames is None else self.fieldnames)) - return text - - # region Options - - @Option - def logging_configuration(self): - """ **Syntax:** logging_configuration= - - **Description:** Loads an alternative logging configuration file for - a command invocation. The logging configuration file must be in Python - ConfigParser-format. Path names are relative to the app root directory. - - """ - return self._logging_configuration - - @logging_configuration.setter - def logging_configuration(self, value): - self._logger, self._logging_configuration = environment.configure_logging(self.__class__.__name__, value) - - @Option - def logging_level(self): - """ **Syntax:** logging_level=[CRITICAL|ERROR|WARNING|INFO|DEBUG|NOTSET] - - **Description:** Sets the threshold for the logger of this command invocation. Logging messages less severe than - `logging_level` will be ignored. - - """ - return getLevelName(self._logger.getEffectiveLevel()) - - @logging_level.setter - def logging_level(self, value): - if value is None: - value = self._default_logging_level - if isinstance(value, (bytes, str)): - try: - level = _levelNames[value.upper()] - except KeyError: - raise ValueError(f'Unrecognized logging level: {value}') - else: - try: - level = int(value) - except ValueError: - raise ValueError(f'Unrecognized logging level: {value}') - self._logger.setLevel(level) - - def add_field(self, current_record, field_name, field_value): - self._record_writer.custom_fields.add(field_name) - current_record[field_name] = field_value - - def gen_record(self, **record): - self._record_writer.custom_fields |= set(record.keys()) - return record - - record = Option(doc=''' - **Syntax: record= - - **Description:** When `true`, records the interaction between the command and splunkd. Defaults to `false`. - - ''', default=False, validate=Boolean()) - - show_configuration = Option(doc=''' - **Syntax:** show_configuration= - - **Description:** When `true`, reports command configuration as an informational message. Defaults to `false`. - - ''', default=False, validate=Boolean()) - - # endregion - - # region Properties - - @property - def configuration(self): - """ Returns the configuration settings for this command. - - """ - return self._configuration - - @property - def fieldnames(self): - """ Returns the fieldnames specified as argument to this command. - - """ - return self._fieldnames - - @fieldnames.setter - def fieldnames(self, value): - self._fieldnames = value - - @property - def input_header(self): - """ Returns the input header for this command. - - :return: The input header for this command. - :rtype: InputHeader - - """ - warn( - 'SearchCommand.input_header is deprecated and will be removed in a future release. ' - 'Please use SearchCommand.metadata instead.', DeprecationWarning, 2) - return self._input_header - - @property - def logger(self): - """ Returns the logger for this command. - - :return: The logger for this command. - :rtype: - - """ - return self._logger - - @property - def metadata(self): - return self._metadata - - @property - def options(self): - """ Returns the options specified as argument to this command. - - """ - if self._options is None: - self._options = Option.View(self) - return self._options - - @property - def protocol_version(self): - return self._protocol_version - - @property - def search_results_info(self): - """ Returns the search results info for this command invocation. - - The search results info object is created from the search results info file associated with the command - invocation. - - :return: Search results info:const:`None`, if the search results info file associated with the command - invocation is inaccessible. - :rtype: SearchResultsInfo or NoneType - - """ - if self._search_results_info is not None: - return self._search_results_info - - if self._protocol_version == 1: - try: - path = self._input_header['infoPath'] - except KeyError: - return None - else: - assert self._protocol_version == 2 - - try: - dispatch_dir = self._metadata.searchinfo.dispatch_dir - except AttributeError: - return None - - path = os.path.join(dispatch_dir, 'info.csv') - - try: - with io.open(path, 'r') as f: - reader = csv.reader(f, dialect=CsvDialect) - fields = next(reader) - values = next(reader) - except IOError as error: - if error.errno == 2: - self.logger.error(f'Search results info file {json_encode_string(path)} does not exist.') - return - raise - - def convert_field(field): - return (field[1:] if field[0] == '_' else field).replace('.', '_') - - decode = MetadataDecoder().decode - - def convert_value(value): - try: - return decode(value) if len(value) > 0 else value - except ValueError: - return value - - info = ObjectView(dict((convert_field(f_v[0]), convert_value(f_v[1])) for f_v in zip(fields, values))) - - try: - count_map = info.countMap - except AttributeError: - pass - else: - count_map = count_map.split(';') - n = len(count_map) - info.countMap = dict(list(zip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2)))) - - try: - msg_type = info.msgType - msg_text = info.msg - except AttributeError: - pass - else: - messages = [t_m for t_m in zip(msg_type.split('\n'), msg_text.split('\n')) if t_m[0] or t_m[1]] - info.msg = [Message(message) for message in messages] - del info.msgType - - try: - info.vix_families = ElementTree.fromstring(info.vix_families) - except AttributeError: - pass - - self._search_results_info = info - return info - - @property - def service(self): - """ Returns a Splunk service object for this command invocation or None. - - The service object is created from the Splunkd URI and authentication token passed to the command invocation in - the search results info file. This data is not passed to a command invocation by default. You must request it by - specifying this pair of configuration settings in commands.conf: - - .. code-block:: python - - enableheader = true - requires_srinfo = true - - The :code:`enableheader` setting is :code:`true` by default. Hence, you need not set it. The - :code:`requires_srinfo` setting is false by default. Hence, you must set it. - - :return: :class:`splunklib.client.Service`, if :code:`enableheader` and :code:`requires_srinfo` are both - :code:`true`. Otherwise, if either :code:`enableheader` or :code:`requires_srinfo` are :code:`false`, a value - of :code:`None` is returned. - - """ - if self._service is not None: - return self._service - - metadata = self._metadata - - if metadata is None: - return None - - try: - searchinfo = self._metadata.searchinfo - except AttributeError: - return None - - splunkd_uri = searchinfo.splunkd_uri - - if splunkd_uri is None: - return None - - uri = urlsplit(splunkd_uri, allow_fragments=False) - - self._service = Service( - scheme=uri.scheme, host=uri.hostname, port=uri.port, app=searchinfo.app, token=searchinfo.session_key) - - return self._service - - # endregion - - # region Methods - - def error_exit(self, error, message=None): - self.write_error(error.message if message is None else message) - self.logger.error('Abnormal exit: %s', error) - exit(1) - - def finish(self): - """ Flushes the output buffer and signals that this command has finished processing data. - - :return: :const:`None` - - """ - self._record_writer.flush(finished=True) - - def flush(self): - """ Flushes the output buffer. - - :return: :const:`None` - - """ - self._record_writer.flush(finished=False) - - def prepare(self): - """ Prepare for execution. - - This method should be overridden in search command classes that wish to examine and update their configuration - or option settings prior to execution. It is called during the getinfo exchange before command metadata is sent - to splunkd. - - :return: :const:`None` - :rtype: NoneType - - """ - - def process(self, argv=sys.argv, ifile=sys.stdin, ofile=sys.stdout, allow_empty_input=True): - """ Process data. - - :param argv: Command line arguments. - :type argv: list or tuple - - :param ifile: Input data file. - :type ifile: file - - :param ofile: Output data file. - :type ofile: file - - :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read - :type allow_empty_input: bool - - :return: :const:`None` - :rtype: NoneType - - """ - - self._allow_empty_input = allow_empty_input - - if len(argv) > 1: - self._process_protocol_v1(argv, ifile, ofile) - else: - self._process_protocol_v2(argv, ifile, ofile) - - def _map_input_header(self): - metadata = self._metadata - searchinfo = metadata.searchinfo - self._input_header.update( - allowStream=None, - infoPath=os.path.join(searchinfo.dispatch_dir, 'info.csv'), - keywords=None, - preview=metadata.preview, - realtime=searchinfo.earliest_time != 0 and searchinfo.latest_time != 0, - search=searchinfo.search, - sid=searchinfo.sid, - splunkVersion=searchinfo.splunk_version, - truncated=None) - - def _map_metadata(self, argv): - source = SearchCommand._MetadataSource(argv, self._input_header, self.search_results_info) - - def _map(metadata_map): - metadata = {} - - for name, value in list(metadata_map.items()): - if isinstance(value, dict): - value = _map(value) - else: - transform, extract = value - if extract is None: - value = None - else: - value = extract(source) - if not (value is None or transform is None): - value = transform(value) - metadata[name] = value - - return ObjectView(metadata) - - self._metadata = _map(SearchCommand._metadata_map) - - _metadata_map = { - 'action': - (lambda v: 'getinfo' if v == '__GETINFO__' else 'execute' if v == '__EXECUTE__' else None, - lambda s: s.argv[1]), - 'preview': - (bool, lambda s: s.input_header.get('preview')), - 'searchinfo': { - 'app': - (lambda v: v.ppc_app, lambda s: s.search_results_info), - 'args': - (None, lambda s: s.argv), - 'dispatch_dir': - (os.path.dirname, lambda s: s.input_header.get('infoPath')), - 'earliest_time': - (lambda v: float(v.rt_earliest) if len(v.rt_earliest) > 0 else 0.0, lambda s: s.search_results_info), - 'latest_time': - (lambda v: float(v.rt_latest) if len(v.rt_latest) > 0 else 0.0, lambda s: s.search_results_info), - 'owner': - (None, None), - 'raw_args': - (None, lambda s: s.argv), - 'search': - (unquote, lambda s: s.input_header.get('search')), - 'session_key': - (lambda v: v.auth_token, lambda s: s.search_results_info), - 'sid': - (None, lambda s: s.input_header.get('sid')), - 'splunk_version': - (None, lambda s: s.input_header.get('splunkVersion')), - 'splunkd_uri': - (lambda v: v.splunkd_uri, lambda s: s.search_results_info), - 'username': - (lambda v: v.ppc_user, lambda s: s.search_results_info)}} - - _MetadataSource = namedtuple('Source', ('argv', 'input_header', 'search_results_info')) - - def _prepare_protocol_v1(self, argv, ifile, ofile): - - debug = environment.splunklib_logger.debug - - # Provide as much context as possible in advance of parsing the command line and preparing for execution - - self._input_header.read(ifile) - self._protocol_version = 1 - self._map_metadata(argv) - - debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) - - try: - tempfile.tempdir = self._metadata.searchinfo.dispatch_dir - except AttributeError: - raise RuntimeError(f'{self.__class__.__name__}.metadata.searchinfo.dispatch_dir is undefined') - - debug(' tempfile.tempdir=%r', tempfile.tempdir) - - CommandLineParser.parse(self, argv[2:]) - self.prepare() - - if self.record: - self.record = False - - record_argv = [argv[0], argv[1], str(self._options), ' '.join(self.fieldnames)] - ifile, ofile = self._prepare_recording(record_argv, ifile, ofile) - self._record_writer.ofile = ofile - ifile.record(str(self._input_header), '\n\n') - - if self.show_configuration: - self.write_info(self.name + ' command configuration: ' + str(self._configuration)) - - return ifile # wrapped, if self.record is True - - def _prepare_recording(self, argv, ifile, ofile): - - # Create the recordings directory, if it doesn't already exist - - recordings = os.path.join(environment.splunk_home, 'var', 'run', 'splunklib.searchcommands', 'recordings') - - if not os.path.isdir(recordings): - os.makedirs(recordings) - - # Create input/output recorders from ifile and ofile - - recording = os.path.join(recordings, self.__class__.__name__ + '-' + repr(time()) + '.' + self._metadata.action) - ifile = Recorder(recording + '.input', ifile) - ofile = Recorder(recording + '.output', ofile) - - # Archive the dispatch directory--if it exists--so that it can be used as a baseline in mocks) - - dispatch_dir = self._metadata.searchinfo.dispatch_dir - - if dispatch_dir is not None: # __GETINFO__ action does not include a dispatch_dir - root_dir, base_dir = os.path.split(dispatch_dir) - make_archive(recording + '.dispatch_dir', 'gztar', root_dir, base_dir, logger=self.logger) - - # Save a splunk command line because it is useful for developing tests - - with open(recording + '.splunk_cmd', 'wb') as f: - f.write('splunk cmd python '.encode()) - f.write(os.path.basename(argv[0]).encode()) - for arg in islice(argv, 1, len(argv)): - f.write(' '.encode()) - f.write(arg.encode()) - - return ifile, ofile - - def _process_protocol_v1(self, argv, ifile, ofile): - - debug = environment.splunklib_logger.debug - class_name = self.__class__.__name__ - - debug('%s.process started under protocol_version=1', class_name) - self._record_writer = RecordWriterV1(ofile) - - # noinspection PyBroadException - try: - if argv[1] == '__GETINFO__': - - debug('Writing configuration settings') - - ifile = self._prepare_protocol_v1(argv, ifile, ofile) - self._record_writer.write_record(dict( - (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in - list(self._configuration.items()))) - self.finish() - - elif argv[1] == '__EXECUTE__': - - debug('Executing') - - ifile = self._prepare_protocol_v1(argv, ifile, ofile) - self._records = self._records_protocol_v1 - self._metadata.action = 'execute' - self._execute(ifile, None) - - else: - message = ( - f'Command {self.name} appears to be statically configured for search command protocol version 1 and static ' - 'configuration is unsupported by splunklib.searchcommands. Please ensure that ' - 'default/commands.conf contains this stanza:\n' - f'[{self.name}]\n' - f'filename = {os.path.basename(argv[0])}\n' - 'enableheader = true\n' - 'outputheader = true\n' - 'requires_srinfo = true\n' - 'supports_getinfo = true\n' - 'supports_multivalues = true\n' - 'supports_rawargs = true') - raise RuntimeError(message) - - except (SyntaxError, ValueError) as error: - self.write_error(str(error)) - self.flush() - exit(0) - - except SystemExit: - self.flush() - raise - - except: - self._report_unexpected_error() - self.flush() - exit(1) - - debug('%s.process finished under protocol_version=1', class_name) - - def _protocol_v2_option_parser(self, arg): - """ Determines if an argument is an Option/Value pair, or just a Positional Argument. - Method so different search commands can handle parsing of arguments differently. - - :param arg: A single argument provided to the command from SPL - :type arg: str - - :return: [OptionName, OptionValue] OR [PositionalArgument] - :rtype: List[str] - - """ - return arg.split('=', 1) - - def _process_protocol_v2(self, argv, ifile, ofile): - """ Processes records on the `input stream optionally writing records to the output stream. - - :param ifile: Input file object. - :type ifile: file or InputType - - :param ofile: Output file object. - :type ofile: file or OutputType - - :return: :const:`None` - - """ - debug = environment.splunklib_logger.debug - class_name = self.__class__.__name__ - - debug('%s.process started under protocol_version=2', class_name) - self._protocol_version = 2 - - # Read search command metadata from splunkd - # noinspection PyBroadException - try: - debug('Reading metadata') - metadata, body = self._read_chunk(self._as_binary_stream(ifile)) - - action = getattr(metadata, 'action', None) - - if action != 'getinfo': - raise RuntimeError(f'Expected getinfo action, not {action}') - - if len(body) > 0: - raise RuntimeError('Did not expect data for getinfo action') - - self._metadata = deepcopy(metadata) - - searchinfo = self._metadata.searchinfo - - searchinfo.earliest_time = float(searchinfo.earliest_time) - searchinfo.latest_time = float(searchinfo.latest_time) - searchinfo.search = unquote(searchinfo.search) - - self._map_input_header() - - debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) - - try: - tempfile.tempdir = self._metadata.searchinfo.dispatch_dir - except AttributeError: - raise RuntimeError(f'{class_name}.metadata.searchinfo.dispatch_dir is undefined') - - debug(' tempfile.tempdir=%r', tempfile.tempdir) - except: - self._record_writer = RecordWriterV2(ofile) - self._report_unexpected_error() - self.finish() - exit(1) - - # Write search command configuration for consumption by splunkd - # noinspection PyBroadException - try: - self._record_writer = RecordWriterV2(ofile, getattr(self._metadata.searchinfo, 'maxresultrows', None)) - self.fieldnames = [] - self.options.reset() - - args = self.metadata.searchinfo.args - error_count = 0 - - debug('Parsing arguments') - - if args and isinstance(args, list): - for arg in args: - result = self._protocol_v2_option_parser(arg) - if len(result) == 1: - self.fieldnames.append(str(result[0])) - else: - name, value = result - name = str(name) - try: - option = self.options[name] - except KeyError: - self.write_error(f'Unrecognized option: {name}={value}') - error_count += 1 - continue - try: - option.value = value - except ValueError: - self.write_error(f'Illegal value: {name}={value}') - error_count += 1 - continue - - missing = self.options.get_missing() - - if missing is not None: - if len(missing) == 1: - self.write_error(f'A value for "{missing[0]}" is required') - else: - self.write_error(f'Values for these required options are missing: {", ".join(missing)}') - error_count += 1 - - if error_count > 0: - exit(1) - - debug(' command: %s', str(self)) - - debug('Preparing for execution') - self.prepare() - - if self.record: - - ifile, ofile = self._prepare_recording(argv, ifile, ofile) - self._record_writer.ofile = ofile - - # Record the metadata that initiated this command after removing the record option from args/raw_args - - info = self._metadata.searchinfo - - for attr in 'args', 'raw_args': - setattr(info, attr, [arg for arg in getattr(info, attr) if not arg.startswith('record=')]) - - metadata = MetadataEncoder().encode(self._metadata) - ifile.record('chunked 1.0,', str(len(metadata)), ',0\n', metadata) - - if self.show_configuration: - self.write_info(self.name + ' command configuration: ' + str(self._configuration)) - - debug(' command configuration: %s', self._configuration) - - except SystemExit: - self._record_writer.write_metadata(self._configuration) - self.finish() - raise - except: - self._record_writer.write_metadata(self._configuration) - self._report_unexpected_error() - self.finish() - exit(1) - - self._record_writer.write_metadata(self._configuration) - - # Execute search command on data passing through the pipeline - # noinspection PyBroadException - try: - debug('Executing under protocol_version=2') - self._metadata.action = 'execute' - self._execute(ifile, None) - except SystemExit: - self.finish() - raise - except: - self._report_unexpected_error() - self.finish() - exit(1) - - debug('%s.process completed', class_name) - - def write_debug(self, message, *args): - self._record_writer.write_message('DEBUG', message, *args) - - def write_error(self, message, *args): - self._record_writer.write_message('ERROR', message, *args) - - def write_fatal(self, message, *args): - self._record_writer.write_message('FATAL', message, *args) - - def write_info(self, message, *args): - self._record_writer.write_message('INFO', message, *args) - - def write_warning(self, message, *args): - self._record_writer.write_message('WARN', message, *args) - - def write_metric(self, name, value): - """ Writes a metric that will be added to the search inspector. - - :param name: Name of the metric. - :type name: basestring - - :param value: A 4-tuple containing the value of metric ``name`` where - - value[0] = Elapsed seconds or :const:`None`. - value[1] = Number of invocations or :const:`None`. - value[2] = Input count or :const:`None`. - value[3] = Output count or :const:`None`. - - The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. - The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. - - :return: :const:`None`. - - """ - self._record_writer.write_metric(name, value) - - # P2 [ ] TODO: Support custom inspector values - - @staticmethod - def _decode_list(mv): - return [match.replace('$$', '$') for match in SearchCommand._encoded_value.findall(mv)] - - _encoded_value = re.compile(r'\$(?P(?:\$\$|[^$])*)\$(?:;|$)') # matches a single value in an encoded list - - # Note: Subclasses must override this method so that it can be called - # called as self._execute(ifile, None) - def _execute(self, ifile, process): - """ Default processing loop - - :param ifile: Input file object. - :type ifile: file - - :param process: Bound method to call in processing loop. - :type process: instancemethod - - :return: :const:`None`. - :rtype: NoneType - - """ - if self.protocol_version == 1: - self._record_writer.write_records(process(self._records(ifile))) - self.finish() - else: - assert self._protocol_version == 2 - self._execute_v2(ifile, process) - - @staticmethod - def _as_binary_stream(ifile): - naught = ifile.read(0) - if isinstance(naught, bytes): - return ifile - - try: - return ifile.buffer - except AttributeError as error: - raise RuntimeError(f'Failed to get underlying buffer: {error}') - - @staticmethod - def _read_chunk(istream): - # noinspection PyBroadException - assert isinstance(istream.read(0), bytes), 'Stream must be binary' - - try: - header = istream.readline() - except Exception as error: - raise RuntimeError(f'Failed to read transport header: {error}') - - if not header: - return None - - match = SearchCommand._header.match(ensure_str(header)) - - if match is None: - raise RuntimeError(f'Failed to parse transport header: {header}') - - metadata_length, body_length = match.groups() - metadata_length = int(metadata_length) - body_length = int(body_length) - - try: - metadata = istream.read(metadata_length) - except Exception as error: - raise RuntimeError(f'Failed to read metadata of length {metadata_length}: {error}') - - decoder = MetadataDecoder() - - try: - metadata = decoder.decode(ensure_str(metadata)) - except Exception as error: - raise RuntimeError(f'Failed to parse metadata of length {metadata_length}: {error}') - - # if body_length <= 0: - # return metadata, '' - - body = "" - try: - if body_length > 0: - body = istream.read(body_length) - except Exception as error: - raise RuntimeError(f'Failed to read body of length {body_length}: {error}') - - return metadata, ensure_str(body,errors="replace") - - _header = re.compile(r'chunked\s+1.0\s*,\s*(\d+)\s*,\s*(\d+)\s*\n') - - def _records_protocol_v1(self, ifile): - return self._read_csv_records(ifile) - - def _read_csv_records(self, ifile): - reader = csv.reader(ifile, dialect=CsvDialect) - - try: - fieldnames = next(reader) - except StopIteration: - return - - mv_fieldnames = dict((name, name[len('__mv_'):]) for name in fieldnames if name.startswith('__mv_')) - - if len(mv_fieldnames) == 0: - for values in reader: - yield OrderedDict(list(zip(fieldnames, values))) - return - - for values in reader: - record = OrderedDict() - for fieldname, value in zip(fieldnames, values): - if fieldname.startswith('__mv_'): - if len(value) > 0: - record[mv_fieldnames[fieldname]] = self._decode_list(value) - elif fieldname not in record: - record[fieldname] = value - yield record - - def _execute_v2(self, ifile, process): - istream = self._as_binary_stream(ifile) - - while True: - result = self._read_chunk(istream) - - if not result: - return - - metadata, body = result - action = getattr(metadata, 'action', None) - if action != 'execute': - raise RuntimeError(f'Expected execute action, not {action}') - - self._finished = getattr(metadata, 'finished', False) - self._record_writer.is_flushed = False - self._metadata.update(metadata) - self._execute_chunk_v2(process, result) - - self._record_writer.write_chunk(finished=self._finished) - - def _execute_chunk_v2(self, process, chunk): - metadata, body = chunk - - if len(body) <= 0 and not self._allow_empty_input: - raise ValueError( - "No records found to process. Set allow_empty_input=True in dispatch function to move forward " - "with empty records.") - - records = self._read_csv_records(StringIO(body)) - self._record_writer.write_records(process(records)) - - def _report_unexpected_error(self): - - error_type, error, tb = sys.exc_info() - origin = tb - - while origin.tb_next is not None: - origin = origin.tb_next - - filename = origin.tb_frame.f_code.co_filename - lineno = origin.tb_lineno - message = f'{error_type.__name__} at "{filename}", line {str(lineno)} : {error}' - - environment.splunklib_logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) - self.write_error(message) - - # endregion - - # region Types - - class ConfigurationSettings: - """ Represents the configuration settings common to all :class:`SearchCommand` classes. - - """ - - def __init__(self, command): - self.command = command - - def __repr__(self): - """ Converts the value of this instance to its string representation. - - The value of this ConfigurationSettings instance is represented as a string of comma-separated - :code:`(name, value)` pairs. - - :return: String representation of this instance - - """ - definitions = type(self).configuration_setting_definitions - settings = [repr((setting.name, setting.__get__(self), setting.supporting_protocols)) for setting in - definitions] - return '[' + ', '.join(settings) + ']' - - def __str__(self): - """ Converts the value of this instance to its string representation. - - The value of this ConfigurationSettings instance is represented as a string of comma-separated - :code:`name=value` pairs. Items with values of :const:`None` are filtered from the list. - - :return: String representation of this instance - - """ - # text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) - text = ', '.join([f'{name}={json_encode_string(str(value))}' for (name, value) in list(self.items())]) - return text - - # region Methods - - @classmethod - def fix_up(cls, command_class): - """ Adjusts and checks this class and its search command class. - - Derived classes typically override this method. It is used by the :decorator:`Configuration` decorator to - fix up the :class:`SearchCommand` class it adorns. This method is overridden by :class:`EventingCommand`, - :class:`GeneratingCommand`, :class:`ReportingCommand`, and :class:`StreamingCommand`, the base types for - all other search commands. - - :param command_class: Command class targeted by this class - - """ - return - - # TODO: Stop looking like a dictionary because we don't obey the semantics - # N.B.: Does not use Python 2 dict copy semantics - def iteritems(self): - definitions = type(self).configuration_setting_definitions - version = self.command.protocol_version - return [name_value1 for name_value1 in [(setting.name, setting.__get__(self)) for setting in - [setting for setting in definitions if - setting.is_supported_by_protocol(version)]] if - name_value1[1] is not None] - - # N.B.: Does not use Python 3 dict view semantics - - items = iteritems - - # endregion - - # endregion - - -SearchMetric = namedtuple('SearchMetric', ('elapsed_seconds', 'invocation_count', 'input_count', 'output_count')) - - -def dispatch(command_class, argv=sys.argv, input_file=sys.stdin, output_file=sys.stdout, module_name=None, - allow_empty_input=True): - """ Instantiates and executes a search command class - - This function implements a `conditional script stanza `_ based on the value of - :code:`module_name`:: - - if module_name is None or module_name == '__main__': - # execute command - - Call this function at module scope with :code:`module_name=__name__`, if you would like your module to act as either - a reusable module or a standalone program. Otherwise, if you wish this function to unconditionally instantiate and - execute :code:`command_class`, pass :const:`None` as the value of :code:`module_name`. - - :param command_class: Search command class to instantiate and execute. - :type command_class: type - :param argv: List of arguments to the command. - :type argv: list or tuple - :param input_file: File from which the command will read data. - :type input_file: :code:`file` - :param output_file: File to which the command will write data. - :type output_file: :code:`file` - :param module_name: Name of the module calling :code:`dispatch` or :const:`None`. - :type module_name: :code:`basestring` - :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read - :type allow_empty_input: bool - :returns: :const:`None` - - **Example** - - .. code-block:: python - :linenos: - - #!/usr/bin/env python - from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators - @Configuration() - class SomeStreamingCommand(StreamingCommand): - ... - def stream(records): - ... - dispatch(SomeStreamingCommand, module_name=__name__) - - Dispatches the :code:`SomeStreamingCommand`, if and only if :code:`__name__` is equal to :code:`'__main__'`. - - **Example** - - .. code-block:: python - :linenos: - - from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators - @Configuration() - class SomeStreamingCommand(StreamingCommand): - ... - def stream(records): - ... - dispatch(SomeStreamingCommand) - - Unconditionally dispatches :code:`SomeStreamingCommand`. - - """ - assert issubclass(command_class, SearchCommand) - - if module_name is None or module_name == '__main__': - command_class().process(argv, input_file, output_file, allow_empty_input) +# coding=utf-8 +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Absolute imports + +import csv +import io +import os +import re +import sys +import tempfile +import traceback +from collections import namedtuple, OrderedDict +from copy import deepcopy +from io import StringIO +from itertools import chain, islice +from logging import _nameToLevel as _levelNames, getLevelName, getLogger +from shutil import make_archive +from time import time +from urllib.parse import unquote +from urllib.parse import urlsplit +from warnings import warn +from xml.etree import ElementTree +from splunklib.utils import ensure_str + + +# Relative imports +import splunklib +from . import Boolean, Option, environment +from .internals import ( + CommandLineParser, + CsvDialect, + InputHeader, + Message, + MetadataDecoder, + MetadataEncoder, + ObjectView, + Recorder, + RecordWriterV1, + RecordWriterV2, + json_encode_string) +from ..client import Service + + +# ---------------------------------------------------------------------------------------------------------------------- + +# P1 [ ] TODO: Log these issues against ChunkedExternProcessor +# +# 1. Implement requires_preop configuration setting. +# This configuration setting is currently rejected by ChunkedExternProcessor. +# +# 2. Rename type=events as type=eventing for symmetry with type=reporting and type=streaming +# Eventing commands process records on the events pipeline. This change effects ChunkedExternProcessor.cpp, +# eventing_command.py, and generating_command.py. +# +# 3. For consistency with SCPV1, commands.conf should not require filename setting when chunked = true +# The SCPV1 processor uses .py as the default filename. The ChunkedExternProcessor should do the same. + +# P1 [ ] TODO: Verify that ChunkedExternProcessor complains if a streaming_preop has a type other than 'streaming' +# It once looked like sending type='reporting' for the streaming_preop was accepted. + +# ---------------------------------------------------------------------------------------------------------------------- + +# P2 [ ] TODO: Consider bumping None formatting up to Option.Item.__str__ + + +class SearchCommand: + """ Represents a custom search command. + + """ + + def __init__(self): + + # Variables that may be used, but not altered by derived classes + + class_name = self.__class__.__name__ + + self._logger, self._logging_configuration = getLogger(class_name), environment.logging_configuration + + # Variables backing option/property values + + self._configuration = self.ConfigurationSettings(self) + self._input_header = InputHeader() + self._fieldnames = None + self._finished = None + self._metadata = None + self._options = None + self._protocol_version = None + self._search_results_info = None + self._service = None + + # Internal variables + + self._default_logging_level = self._logger.level + self._record_writer = None + self._records = None + self._allow_empty_input = True + + def __str__(self): + text = ' '.join(chain((type(self).name, str(self.options)), [] if self.fieldnames is None else self.fieldnames)) + return text + + # region Options + + @Option + def logging_configuration(self): + """ **Syntax:** logging_configuration= + + **Description:** Loads an alternative logging configuration file for + a command invocation. The logging configuration file must be in Python + ConfigParser-format. Path names are relative to the app root directory. + + """ + return self._logging_configuration + + @logging_configuration.setter + def logging_configuration(self, value): + self._logger, self._logging_configuration = environment.configure_logging(self.__class__.__name__, value) + + @Option + def logging_level(self): + """ **Syntax:** logging_level=[CRITICAL|ERROR|WARNING|INFO|DEBUG|NOTSET] + + **Description:** Sets the threshold for the logger of this command invocation. Logging messages less severe than + `logging_level` will be ignored. + + """ + return getLevelName(self._logger.getEffectiveLevel()) + + @logging_level.setter + def logging_level(self, value): + if value is None: + value = self._default_logging_level + if isinstance(value, (bytes, str)): + try: + level = _levelNames[value.upper()] + except KeyError: + raise ValueError(f'Unrecognized logging level: {value}') + else: + try: + level = int(value) + except ValueError: + raise ValueError(f'Unrecognized logging level: {value}') + self._logger.setLevel(level) + + def add_field(self, current_record, field_name, field_value): + self._record_writer.custom_fields.add(field_name) + current_record[field_name] = field_value + + def gen_record(self, **record): + self._record_writer.custom_fields |= set(record.keys()) + return record + + record = Option(doc=''' + **Syntax: record= + + **Description:** When `true`, records the interaction between the command and splunkd. Defaults to `false`. + + ''', default=False, validate=Boolean()) + + show_configuration = Option(doc=''' + **Syntax:** show_configuration= + + **Description:** When `true`, reports command configuration as an informational message. Defaults to `false`. + + ''', default=False, validate=Boolean()) + + # endregion + + # region Properties + + @property + def configuration(self): + """ Returns the configuration settings for this command. + + """ + return self._configuration + + @property + def fieldnames(self): + """ Returns the fieldnames specified as argument to this command. + + """ + return self._fieldnames + + @fieldnames.setter + def fieldnames(self, value): + self._fieldnames = value + + @property + def input_header(self): + """ Returns the input header for this command. + + :return: The input header for this command. + :rtype: InputHeader + + """ + warn( + 'SearchCommand.input_header is deprecated and will be removed in a future release. ' + 'Please use SearchCommand.metadata instead.', DeprecationWarning, 2) + return self._input_header + + @property + def logger(self): + """ Returns the logger for this command. + + :return: The logger for this command. + :rtype: + + """ + return self._logger + + @property + def metadata(self): + return self._metadata + + @property + def options(self): + """ Returns the options specified as argument to this command. + + """ + if self._options is None: + self._options = Option.View(self) + return self._options + + @property + def protocol_version(self): + return self._protocol_version + + @property + def search_results_info(self): + """ Returns the search results info for this command invocation. + + The search results info object is created from the search results info file associated with the command + invocation. + + :return: Search results info:const:`None`, if the search results info file associated with the command + invocation is inaccessible. + :rtype: SearchResultsInfo or NoneType + + """ + if self._search_results_info is not None: + return self._search_results_info + + if self._protocol_version == 1: + try: + path = self._input_header['infoPath'] + except KeyError: + return None + else: + assert self._protocol_version == 2 + + try: + dispatch_dir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + return None + + path = os.path.join(dispatch_dir, 'info.csv') + + try: + with io.open(path, 'r') as f: + reader = csv.reader(f, dialect=CsvDialect) + fields = next(reader) + values = next(reader) + except IOError as error: + if error.errno == 2: + self.logger.error(f'Search results info file {json_encode_string(path)} does not exist.') + return + raise + + def convert_field(field): + return (field[1:] if field[0] == '_' else field).replace('.', '_') + + decode = MetadataDecoder().decode + + def convert_value(value): + try: + return decode(value) if len(value) > 0 else value + except ValueError: + return value + + info = ObjectView(dict((convert_field(f_v[0]), convert_value(f_v[1])) for f_v in zip(fields, values))) + + try: + count_map = info.countMap + except AttributeError: + pass + else: + count_map = count_map.split(';') + n = len(count_map) + info.countMap = dict(list(zip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2)))) + + try: + msg_type = info.msgType + msg_text = info.msg + except AttributeError: + pass + else: + messages = [t_m for t_m in zip(msg_type.split('\n'), msg_text.split('\n')) if t_m[0] or t_m[1]] + info.msg = [Message(message) for message in messages] + del info.msgType + + try: + info.vix_families = ElementTree.fromstring(info.vix_families) + except AttributeError: + pass + + self._search_results_info = info + return info + + @property + def service(self): + """ Returns a Splunk service object for this command invocation or None. + + The service object is created from the Splunkd URI and authentication token passed to the command invocation in + the search results info file. This data is not passed to a command invocation by default. You must request it by + specifying this pair of configuration settings in commands.conf: + + .. code-block:: python + + enableheader = true + requires_srinfo = true + + The :code:`enableheader` setting is :code:`true` by default. Hence, you need not set it. The + :code:`requires_srinfo` setting is false by default. Hence, you must set it. + + :return: :class:`splunklib.client.Service`, if :code:`enableheader` and :code:`requires_srinfo` are both + :code:`true`. Otherwise, if either :code:`enableheader` or :code:`requires_srinfo` are :code:`false`, a value + of :code:`None` is returned. + + """ + if self._service is not None: + return self._service + + metadata = self._metadata + + if metadata is None: + return None + + try: + searchinfo = self._metadata.searchinfo + except AttributeError: + return None + + splunkd_uri = searchinfo.splunkd_uri + + if splunkd_uri is None: + return None + + uri = urlsplit(splunkd_uri, allow_fragments=False) + + self._service = Service( + scheme=uri.scheme, host=uri.hostname, port=uri.port, app=searchinfo.app, token=searchinfo.session_key) + + return self._service + + # endregion + + # region Methods + + def error_exit(self, error, message=None): + self.write_error(error.message if message is None else message) + self.logger.error('Abnormal exit: %s', error) + exit(1) + + def finish(self): + """ Flushes the output buffer and signals that this command has finished processing data. + + :return: :const:`None` + + """ + self._record_writer.flush(finished=True) + + def flush(self): + """ Flushes the output buffer. + + :return: :const:`None` + + """ + self._record_writer.flush(finished=False) + + def prepare(self): + """ Prepare for execution. + + This method should be overridden in search command classes that wish to examine and update their configuration + or option settings prior to execution. It is called during the getinfo exchange before command metadata is sent + to splunkd. + + :return: :const:`None` + :rtype: NoneType + + """ + + def process(self, argv=sys.argv, ifile=sys.stdin, ofile=sys.stdout, allow_empty_input=True): + """ Process data. + + :param argv: Command line arguments. + :type argv: list or tuple + + :param ifile: Input data file. + :type ifile: file + + :param ofile: Output data file. + :type ofile: file + + :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read + :type allow_empty_input: bool + + :return: :const:`None` + :rtype: NoneType + + """ + + self._allow_empty_input = allow_empty_input + + if len(argv) > 1: + self._process_protocol_v1(argv, ifile, ofile) + else: + self._process_protocol_v2(argv, ifile, ofile) + + def _map_input_header(self): + metadata = self._metadata + searchinfo = metadata.searchinfo + self._input_header.update( + allowStream=None, + infoPath=os.path.join(searchinfo.dispatch_dir, 'info.csv'), + keywords=None, + preview=metadata.preview, + realtime=searchinfo.earliest_time != 0 and searchinfo.latest_time != 0, + search=searchinfo.search, + sid=searchinfo.sid, + splunkVersion=searchinfo.splunk_version, + truncated=None) + + def _map_metadata(self, argv): + source = SearchCommand._MetadataSource(argv, self._input_header, self.search_results_info) + + def _map(metadata_map): + metadata = {} + + for name, value in metadata_map.items(): + if isinstance(value, dict): + value = _map(value) + else: + transform, extract = value + if extract is None: + value = None + else: + value = extract(source) + if not (value is None or transform is None): + value = transform(value) + metadata[name] = value + + return ObjectView(metadata) + + self._metadata = _map(SearchCommand._metadata_map) + + _metadata_map = { + 'action': + (lambda v: 'getinfo' if v == '__GETINFO__' else 'execute' if v == '__EXECUTE__' else None, + lambda s: s.argv[1]), + 'preview': + (bool, lambda s: s.input_header.get('preview')), + 'searchinfo': { + 'app': + (lambda v: v.ppc_app, lambda s: s.search_results_info), + 'args': + (None, lambda s: s.argv), + 'dispatch_dir': + (os.path.dirname, lambda s: s.input_header.get('infoPath')), + 'earliest_time': + (lambda v: float(v.rt_earliest) if len(v.rt_earliest) > 0 else 0.0, lambda s: s.search_results_info), + 'latest_time': + (lambda v: float(v.rt_latest) if len(v.rt_latest) > 0 else 0.0, lambda s: s.search_results_info), + 'owner': + (None, None), + 'raw_args': + (None, lambda s: s.argv), + 'search': + (unquote, lambda s: s.input_header.get('search')), + 'session_key': + (lambda v: v.auth_token, lambda s: s.search_results_info), + 'sid': + (None, lambda s: s.input_header.get('sid')), + 'splunk_version': + (None, lambda s: s.input_header.get('splunkVersion')), + 'splunkd_uri': + (lambda v: v.splunkd_uri, lambda s: s.search_results_info), + 'username': + (lambda v: v.ppc_user, lambda s: s.search_results_info)}} + + _MetadataSource = namedtuple('Source', ('argv', 'input_header', 'search_results_info')) + + def _prepare_protocol_v1(self, argv, ifile, ofile): + + debug = environment.splunklib_logger.debug + + # Provide as much context as possible in advance of parsing the command line and preparing for execution + + self._input_header.read(ifile) + self._protocol_version = 1 + self._map_metadata(argv) + + debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) + + try: + tempfile.tempdir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + raise RuntimeError(f'{self.__class__.__name__}.metadata.searchinfo.dispatch_dir is undefined') + + debug(' tempfile.tempdir=%r', tempfile.tempdir) + + CommandLineParser.parse(self, argv[2:]) + self.prepare() + + if self.record: + self.record = False + + record_argv = [argv[0], argv[1], str(self._options), ' '.join(self.fieldnames)] + ifile, ofile = self._prepare_recording(record_argv, ifile, ofile) + self._record_writer.ofile = ofile + ifile.record(str(self._input_header), '\n\n') + + if self.show_configuration: + self.write_info(self.name + ' command configuration: ' + str(self._configuration)) + + return ifile # wrapped, if self.record is True + + def _prepare_recording(self, argv, ifile, ofile): + + # Create the recordings directory, if it doesn't already exist + + recordings = os.path.join(environment.splunk_home, 'var', 'run', 'splunklib.searchcommands', 'recordings') + + if not os.path.isdir(recordings): + os.makedirs(recordings) + + # Create input/output recorders from ifile and ofile + + recording = os.path.join(recordings, self.__class__.__name__ + '-' + repr(time()) + '.' + self._metadata.action) + ifile = Recorder(recording + '.input', ifile) + ofile = Recorder(recording + '.output', ofile) + + # Archive the dispatch directory--if it exists--so that it can be used as a baseline in mocks) + + dispatch_dir = self._metadata.searchinfo.dispatch_dir + + if dispatch_dir is not None: # __GETINFO__ action does not include a dispatch_dir + root_dir, base_dir = os.path.split(dispatch_dir) + make_archive(recording + '.dispatch_dir', 'gztar', root_dir, base_dir, logger=self.logger) + + # Save a splunk command line because it is useful for developing tests + + with open(recording + '.splunk_cmd', 'wb') as f: + f.write('splunk cmd python '.encode()) + f.write(os.path.basename(argv[0]).encode()) + for arg in islice(argv, 1, len(argv)): + f.write(' '.encode()) + f.write(arg.encode()) + + return ifile, ofile + + def _process_protocol_v1(self, argv, ifile, ofile): + + debug = environment.splunklib_logger.debug + class_name = self.__class__.__name__ + + debug('%s.process started under protocol_version=1', class_name) + self._record_writer = RecordWriterV1(ofile) + + # noinspection PyBroadException + try: + if argv[1] == '__GETINFO__': + + debug('Writing configuration settings') + + ifile = self._prepare_protocol_v1(argv, ifile, ofile) + self._record_writer.write_record(dict( + (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in + self._configuration.items())) + self.finish() + + elif argv[1] == '__EXECUTE__': + + debug('Executing') + + ifile = self._prepare_protocol_v1(argv, ifile, ofile) + self._records = self._records_protocol_v1 + self._metadata.action = 'execute' + self._execute(ifile, None) + + else: + message = ( + f'Command {self.name} appears to be statically configured for search command protocol version 1 and static ' + 'configuration is unsupported by splunklib.searchcommands. Please ensure that ' + 'default/commands.conf contains this stanza:\n' + f'[{self.name}]\n' + f'filename = {os.path.basename(argv[0])}\n' + 'enableheader = true\n' + 'outputheader = true\n' + 'requires_srinfo = true\n' + 'supports_getinfo = true\n' + 'supports_multivalues = true\n' + 'supports_rawargs = true') + raise RuntimeError(message) + + except (SyntaxError, ValueError) as error: + self.write_error(str(error)) + self.flush() + exit(0) + + except SystemExit: + self.flush() + raise + + except: + self._report_unexpected_error() + self.flush() + exit(1) + + debug('%s.process finished under protocol_version=1', class_name) + + def _protocol_v2_option_parser(self, arg): + """ Determines if an argument is an Option/Value pair, or just a Positional Argument. + Method so different search commands can handle parsing of arguments differently. + + :param arg: A single argument provided to the command from SPL + :type arg: str + + :return: [OptionName, OptionValue] OR [PositionalArgument] + :rtype: List[str] + + """ + return arg.split('=', 1) + + def _process_protocol_v2(self, argv, ifile, ofile): + """ Processes records on the `input stream optionally writing records to the output stream. + + :param ifile: Input file object. + :type ifile: file or InputType + + :param ofile: Output file object. + :type ofile: file or OutputType + + :return: :const:`None` + + """ + debug = environment.splunklib_logger.debug + class_name = self.__class__.__name__ + + debug('%s.process started under protocol_version=2', class_name) + self._protocol_version = 2 + + # Read search command metadata from splunkd + # noinspection PyBroadException + try: + debug('Reading metadata') + metadata, body = self._read_chunk(self._as_binary_stream(ifile)) + + action = getattr(metadata, 'action', None) + + if action != 'getinfo': + raise RuntimeError(f'Expected getinfo action, not {action}') + + if len(body) > 0: + raise RuntimeError('Did not expect data for getinfo action') + + self._metadata = deepcopy(metadata) + + searchinfo = self._metadata.searchinfo + + searchinfo.earliest_time = float(searchinfo.earliest_time) + searchinfo.latest_time = float(searchinfo.latest_time) + searchinfo.search = unquote(searchinfo.search) + + self._map_input_header() + + debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) + + try: + tempfile.tempdir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + raise RuntimeError(f'{class_name}.metadata.searchinfo.dispatch_dir is undefined') + + debug(' tempfile.tempdir=%r', tempfile.tempdir) + except: + self._record_writer = RecordWriterV2(ofile) + self._report_unexpected_error() + self.finish() + exit(1) + + # Write search command configuration for consumption by splunkd + # noinspection PyBroadException + try: + self._record_writer = RecordWriterV2(ofile, getattr(self._metadata.searchinfo, 'maxresultrows', None)) + self.fieldnames = [] + self.options.reset() + + args = self.metadata.searchinfo.args + error_count = 0 + + debug('Parsing arguments') + + if args and isinstance(args, list): + for arg in args: + result = self._protocol_v2_option_parser(arg) + if len(result) == 1: + self.fieldnames.append(str(result[0])) + else: + name, value = result + name = str(name) + try: + option = self.options[name] + except KeyError: + self.write_error(f'Unrecognized option: {name}={value}') + error_count += 1 + continue + try: + option.value = value + except ValueError: + self.write_error(f'Illegal value: {name}={value}') + error_count += 1 + continue + + missing = self.options.get_missing() + + if missing is not None: + if len(missing) == 1: + self.write_error(f'A value for "{missing[0]}" is required') + else: + self.write_error(f'Values for these required options are missing: {", ".join(missing)}') + error_count += 1 + + if error_count > 0: + exit(1) + + debug(' command: %s', str(self)) + + debug('Preparing for execution') + self.prepare() + + if self.record: + + ifile, ofile = self._prepare_recording(argv, ifile, ofile) + self._record_writer.ofile = ofile + + # Record the metadata that initiated this command after removing the record option from args/raw_args + + info = self._metadata.searchinfo + + for attr in 'args', 'raw_args': + setattr(info, attr, [arg for arg in getattr(info, attr) if not arg.startswith('record=')]) + + metadata = MetadataEncoder().encode(self._metadata) + ifile.record('chunked 1.0,', str(len(metadata)), ',0\n', metadata) + + if self.show_configuration: + self.write_info(self.name + ' command configuration: ' + str(self._configuration)) + + debug(' command configuration: %s', self._configuration) + + except SystemExit: + self._record_writer.write_metadata(self._configuration) + self.finish() + raise + except: + self._record_writer.write_metadata(self._configuration) + self._report_unexpected_error() + self.finish() + exit(1) + + self._record_writer.write_metadata(self._configuration) + + # Execute search command on data passing through the pipeline + # noinspection PyBroadException + try: + debug('Executing under protocol_version=2') + self._metadata.action = 'execute' + self._execute(ifile, None) + except SystemExit: + self.finish() + raise + except: + self._report_unexpected_error() + self.finish() + exit(1) + + debug('%s.process completed', class_name) + + def write_debug(self, message, *args): + self._record_writer.write_message('DEBUG', message, *args) + + def write_error(self, message, *args): + self._record_writer.write_message('ERROR', message, *args) + + def write_fatal(self, message, *args): + self._record_writer.write_message('FATAL', message, *args) + + def write_info(self, message, *args): + self._record_writer.write_message('INFO', message, *args) + + def write_warning(self, message, *args): + self._record_writer.write_message('WARN', message, *args) + + def write_metric(self, name, value): + """ Writes a metric that will be added to the search inspector. + + :param name: Name of the metric. + :type name: basestring + + :param value: A 4-tuple containing the value of metric ``name`` where + + value[0] = Elapsed seconds or :const:`None`. + value[1] = Number of invocations or :const:`None`. + value[2] = Input count or :const:`None`. + value[3] = Output count or :const:`None`. + + The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. + The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. + + :return: :const:`None`. + + """ + self._record_writer.write_metric(name, value) + + # P2 [ ] TODO: Support custom inspector values + + @staticmethod + def _decode_list(mv): + return [match.replace('$$', '$') for match in SearchCommand._encoded_value.findall(mv)] + + _encoded_value = re.compile(r'\$(?P(?:\$\$|[^$])*)\$(?:;|$)') # matches a single value in an encoded list + + # Note: Subclasses must override this method so that it can be called + # called as self._execute(ifile, None) + def _execute(self, ifile, process): + """ Default processing loop + + :param ifile: Input file object. + :type ifile: file + + :param process: Bound method to call in processing loop. + :type process: instancemethod + + :return: :const:`None`. + :rtype: NoneType + + """ + if self.protocol_version == 1: + self._record_writer.write_records(process(self._records(ifile))) + self.finish() + else: + assert self._protocol_version == 2 + self._execute_v2(ifile, process) + + @staticmethod + def _as_binary_stream(ifile): + naught = ifile.read(0) + if isinstance(naught, bytes): + return ifile + + try: + return ifile.buffer + except AttributeError as error: + raise RuntimeError(f'Failed to get underlying buffer: {error}') + + @staticmethod + def _read_chunk(istream): + # noinspection PyBroadException + assert isinstance(istream.read(0), bytes), 'Stream must be binary' + + try: + header = istream.readline() + except Exception as error: + raise RuntimeError(f'Failed to read transport header: {error}') + + if not header: + return None + + match = SearchCommand._header.match(ensure_str(header)) + + if match is None: + raise RuntimeError(f'Failed to parse transport header: {header}') + + metadata_length, body_length = match.groups() + metadata_length = int(metadata_length) + body_length = int(body_length) + + try: + metadata = istream.read(metadata_length) + except Exception as error: + raise RuntimeError(f'Failed to read metadata of length {metadata_length}: {error}') + + decoder = MetadataDecoder() + + try: + metadata = decoder.decode(ensure_str(metadata)) + except Exception as error: + raise RuntimeError(f'Failed to parse metadata of length {metadata_length}: {error}') + + # if body_length <= 0: + # return metadata, '' + + body = "" + try: + if body_length > 0: + body = istream.read(body_length) + except Exception as error: + raise RuntimeError(f'Failed to read body of length {body_length}: {error}') + + return metadata, ensure_str(body,errors="replace") + + _header = re.compile(r'chunked\s+1.0\s*,\s*(\d+)\s*,\s*(\d+)\s*\n') + + def _records_protocol_v1(self, ifile): + return self._read_csv_records(ifile) + + def _read_csv_records(self, ifile): + reader = csv.reader(ifile, dialect=CsvDialect) + + try: + fieldnames = next(reader) + except StopIteration: + return + + mv_fieldnames = dict((name, name[len('__mv_'):]) for name in fieldnames if name.startswith('__mv_')) + + if len(mv_fieldnames) == 0: + for values in reader: + yield OrderedDict(list(zip(fieldnames, values))) + return + + for values in reader: + record = OrderedDict() + for fieldname, value in zip(fieldnames, values): + if fieldname.startswith('__mv_'): + if len(value) > 0: + record[mv_fieldnames[fieldname]] = self._decode_list(value) + elif fieldname not in record: + record[fieldname] = value + yield record + + def _execute_v2(self, ifile, process): + istream = self._as_binary_stream(ifile) + + while True: + result = self._read_chunk(istream) + + if not result: + return + + metadata, body = result + action = getattr(metadata, 'action', None) + if action != 'execute': + raise RuntimeError(f'Expected execute action, not {action}') + + self._finished = getattr(metadata, 'finished', False) + self._record_writer.is_flushed = False + self._metadata.update(metadata) + self._execute_chunk_v2(process, result) + + self._record_writer.write_chunk(finished=self._finished) + + def _execute_chunk_v2(self, process, chunk): + metadata, body = chunk + + if len(body) <= 0 and not self._allow_empty_input: + raise ValueError( + "No records found to process. Set allow_empty_input=True in dispatch function to move forward " + "with empty records.") + + records = self._read_csv_records(StringIO(body)) + self._record_writer.write_records(process(records)) + + def _report_unexpected_error(self): + + error_type, error, tb = sys.exc_info() + origin = tb + + while origin.tb_next is not None: + origin = origin.tb_next + + filename = origin.tb_frame.f_code.co_filename + lineno = origin.tb_lineno + message = f'{error_type.__name__} at "{filename}", line {str(lineno)} : {error}' + + environment.splunklib_logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) + self.write_error(message) + + # endregion + + # region Types + + class ConfigurationSettings: + """ Represents the configuration settings common to all :class:`SearchCommand` classes. + + """ + + def __init__(self, command): + self.command = command + + def __repr__(self): + """ Converts the value of this instance to its string representation. + + The value of this ConfigurationSettings instance is represented as a string of comma-separated + :code:`(name, value)` pairs. + + :return: String representation of this instance + + """ + definitions = type(self).configuration_setting_definitions + settings = [repr((setting.name, setting.__get__(self), setting.supporting_protocols)) for setting in + definitions] + return '[' + ', '.join(settings) + ']' + + def __str__(self): + """ Converts the value of this instance to its string representation. + + The value of this ConfigurationSettings instance is represented as a string of comma-separated + :code:`name=value` pairs. Items with values of :const:`None` are filtered from the list. + + :return: String representation of this instance + + """ + # text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) + text = ', '.join([f'{name}={json_encode_string(str(value))}' for (name, value) in self.items()]) + return text + + # region Methods + + @classmethod + def fix_up(cls, command_class): + """ Adjusts and checks this class and its search command class. + + Derived classes typically override this method. It is used by the :decorator:`Configuration` decorator to + fix up the :class:`SearchCommand` class it adorns. This method is overridden by :class:`EventingCommand`, + :class:`GeneratingCommand`, :class:`ReportingCommand`, and :class:`StreamingCommand`, the base types for + all other search commands. + + :param command_class: Command class targeted by this class + + """ + return + + # TODO: Stop looking like a dictionary because we don't obey the semantics + # N.B.: Does not use Python 2 dict copy semantics + def iteritems(self): + definitions = type(self).configuration_setting_definitions + version = self.command.protocol_version + return [name_value1 for name_value1 in [(setting.name, setting.__get__(self)) for setting in + [setting for setting in definitions if + setting.is_supported_by_protocol(version)]] if + name_value1[1] is not None] + + # N.B.: Does not use Python 3 dict view semantics + + items = iteritems + + # endregion + + # endregion + + +SearchMetric = namedtuple('SearchMetric', ('elapsed_seconds', 'invocation_count', 'input_count', 'output_count')) + + +def dispatch(command_class, argv=sys.argv, input_file=sys.stdin, output_file=sys.stdout, module_name=None, + allow_empty_input=True): + """ Instantiates and executes a search command class + + This function implements a `conditional script stanza `_ based on the value of + :code:`module_name`:: + + if module_name is None or module_name == '__main__': + # execute command + + Call this function at module scope with :code:`module_name=__name__`, if you would like your module to act as either + a reusable module or a standalone program. Otherwise, if you wish this function to unconditionally instantiate and + execute :code:`command_class`, pass :const:`None` as the value of :code:`module_name`. + + :param command_class: Search command class to instantiate and execute. + :type command_class: type + :param argv: List of arguments to the command. + :type argv: list or tuple + :param input_file: File from which the command will read data. + :type input_file: :code:`file` + :param output_file: File to which the command will write data. + :type output_file: :code:`file` + :param module_name: Name of the module calling :code:`dispatch` or :const:`None`. + :type module_name: :code:`basestring` + :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read + :type allow_empty_input: bool + :returns: :const:`None` + + **Example** + + .. code-block:: python + :linenos: + + #!/usr/bin/env python + from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators + @Configuration() + class SomeStreamingCommand(StreamingCommand): + ... + def stream(records): + ... + dispatch(SomeStreamingCommand, module_name=__name__) + + Dispatches the :code:`SomeStreamingCommand`, if and only if :code:`__name__` is equal to :code:`'__main__'`. + + **Example** + + .. code-block:: python + :linenos: + + from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators + @Configuration() + class SomeStreamingCommand(StreamingCommand): + ... + def stream(records): + ... + dispatch(SomeStreamingCommand) + + Unconditionally dispatches :code:`SomeStreamingCommand`. + + """ + assert issubclass(command_class, SearchCommand) + + if module_name is None or module_name == '__main__': + command_class().process(argv, input_file, output_file, allow_empty_input) diff --git a/tests/searchcommands/chunked_data_stream.py b/tests/searchcommands/chunked_data_stream.py index d1ac5a5f..fcd0de7b 100644 --- a/tests/searchcommands/chunked_data_stream.py +++ b/tests/searchcommands/chunked_data_stream.py @@ -1,100 +1,100 @@ -import collections -import csv -import io -import json - -import splunklib.searchcommands.internals -from splunklib.utils import ensure_binary, ensure_str - - -class Chunk: - def __init__(self, version, meta, data): - self.version = ensure_str(version) - self.meta = json.loads(meta) - dialect = splunklib.searchcommands.internals.CsvDialect - self.data = csv.DictReader(io.StringIO(data.decode("utf-8")), - dialect=dialect) - - -class ChunkedDataStreamIter(collections.abc.Iterator): - def __init__(self, chunk_stream): - self.chunk_stream = chunk_stream - - def __next__(self): - return self.next() - - def next(self): - try: - return self.chunk_stream.read_chunk() - except EOFError: - raise StopIteration - - -class ChunkedDataStream(collections.abc.Iterable): - def __iter__(self): - return ChunkedDataStreamIter(self) - - def __init__(self, stream): - empty = stream.read(0) - assert isinstance(empty, bytes) - self.stream = stream - - def read_chunk(self): - header = self.stream.readline() - - while len(header) > 0 and header.strip() == b'': - header = self.stream.readline() # Skip empty lines - if len(header) == 0: - raise EOFError - - version, meta, data = header.rstrip().split(b',') - metabytes = self.stream.read(int(meta)) - databytes = self.stream.read(int(data)) - return Chunk(version, metabytes, databytes) - - -def build_chunk(keyval, data=None): - metadata = ensure_binary(json.dumps(keyval)) - data_output = _build_data_csv(data) - return b"chunked 1.0,%d,%d\n%s%s" % (len(metadata), len(data_output), metadata, data_output) - - -def build_empty_searchinfo(): - return { - 'earliest_time': 0, - 'latest_time': 0, - 'search': "", - 'dispatch_dir': "", - 'sid': "", - 'args': [], - 'splunk_version': "42.3.4", - } - - -def build_getinfo_chunk(): - return build_chunk({ - 'action': 'getinfo', - 'preview': False, - 'searchinfo': build_empty_searchinfo()}) - - -def build_data_chunk(data, finished=True): - return build_chunk({'action': 'execute', 'finished': finished}, data) - - -def _build_data_csv(data): - if data is None: - return b'' - if isinstance(data, bytes): - return data - csvout = io.StringIO() - - headers = set() - for datum in data: - headers.update(list(datum.keys())) - writer = csv.DictWriter(csvout, headers, - dialect=splunklib.searchcommands.internals.CsvDialect) - writer.writeheader() - for datum in data: - writer.writerow(datum) - return ensure_binary(csvout.getvalue()) +import collections +import csv +import io +import json + +import splunklib.searchcommands.internals +from splunklib.utils import ensure_binary, ensure_str + + +class Chunk: + def __init__(self, version, meta, data): + self.version = ensure_str(version) + self.meta = json.loads(meta) + dialect = splunklib.searchcommands.internals.CsvDialect + self.data = csv.DictReader(io.StringIO(data.decode("utf-8")), + dialect=dialect) + + +class ChunkedDataStreamIter(collections.abc.Iterator): + def __init__(self, chunk_stream): + self.chunk_stream = chunk_stream + + def __next__(self): + return self.next() + + def next(self): + try: + return self.chunk_stream.read_chunk() + except EOFError: + raise StopIteration + + +class ChunkedDataStream(collections.abc.Iterable): + def __iter__(self): + return ChunkedDataStreamIter(self) + + def __init__(self, stream): + empty = stream.read(0) + assert isinstance(empty, bytes) + self.stream = stream + + def read_chunk(self): + header = self.stream.readline() + + while len(header) > 0 and header.strip() == b'': + header = self.stream.readline() # Skip empty lines + if len(header) == 0: + raise EOFError + + version, meta, data = header.rstrip().split(b',') + metabytes = self.stream.read(int(meta)) + databytes = self.stream.read(int(data)) + return Chunk(version, metabytes, databytes) + + +def build_chunk(keyval, data=None): + metadata = ensure_binary(json.dumps(keyval)) + data_output = _build_data_csv(data) + return b"chunked 1.0,%d,%d\n%s%s" % (len(metadata), len(data_output), metadata, data_output) + + +def build_empty_searchinfo(): + return { + 'earliest_time': 0, + 'latest_time': 0, + 'search': "", + 'dispatch_dir': "", + 'sid': "", + 'args': [], + 'splunk_version': "42.3.4", + } + + +def build_getinfo_chunk(): + return build_chunk({ + 'action': 'getinfo', + 'preview': False, + 'searchinfo': build_empty_searchinfo()}) + + +def build_data_chunk(data, finished=True): + return build_chunk({'action': 'execute', 'finished': finished}, data) + + +def _build_data_csv(data): + if data is None: + return b'' + if isinstance(data, bytes): + return data + csvout = io.StringIO() + + headers = set() + for datum in data: + headers.update(datum.keys()) + writer = csv.DictWriter(csvout, headers, + dialect=splunklib.searchcommands.internals.CsvDialect) + writer.writeheader() + for datum in data: + writer.writerow(datum) + return ensure_binary(csvout.getvalue()) diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py old mode 100755 new mode 100644 index bea5c618..1e3cf25e --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -1,343 +1,343 @@ -#!/usr/bin/env python -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from contextlib import closing -from unittest import main, TestCase -import os -from io import StringIO, BytesIO -from functools import reduce -import pytest - -from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1 -from splunklib.searchcommands.decorators import Configuration, Option -from splunklib.searchcommands.validators import Boolean - -from splunklib.searchcommands.search_command import SearchCommand - - -@pytest.mark.smoke -class TestInternals(TestCase): - def setUp(self): - TestCase.setUp(self) - - def test_command_line_parser(self): - - @Configuration() - class TestCommandLineParserCommand(SearchCommand): - - required_option = Option(validate=Boolean(), require=True) - unnecessary_option = Option(validate=Boolean(), default=True, require=False) - - class ConfigurationSettings(SearchCommand.ConfigurationSettings): - - @classmethod - def fix_up(cls, command_class): pass - - # Command line without fieldnames - - options = ['required_option=true', 'unnecessary_option=false'] - - command = TestCommandLineParserCommand() - CommandLineParser.parse(command, options) - - for option in command.options.values(): - if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: - self.assertFalse(option.is_set) - continue - self.assertTrue(option.is_set) - - expected = 'testcommandlineparser required_option="t" unnecessary_option="f"' - self.assertEqual(expected, str(command)) - self.assertEqual(command.fieldnames, []) - - # Command line with fieldnames - - fieldnames = ['field_1', 'field_2', 'field_3'] - - command = TestCommandLineParserCommand() - CommandLineParser.parse(command, options + fieldnames) - - for option in command.options.values(): - if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: - self.assertFalse(option.is_set) - continue - self.assertTrue(option.is_set) - - expected = 'testcommandlineparser required_option="t" unnecessary_option="f" field_1 field_2 field_3' - self.assertEqual(expected, str(command)) - self.assertEqual(command.fieldnames, fieldnames) - - # Command line without any unnecessary options - - command = TestCommandLineParserCommand() - CommandLineParser.parse(command, ['required_option=true'] + fieldnames) - - for option in command.options.values(): - if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', - 'show_configuration']: - self.assertFalse(option.is_set) - continue - self.assertTrue(option.is_set) - - expected = 'testcommandlineparser required_option="t" field_1 field_2 field_3' - self.assertEqual(expected, str(command)) - self.assertEqual(command.fieldnames, fieldnames) - - # Command line with missing required options, with or without fieldnames or unnecessary options - - options = ['unnecessary_option=true'] - self.assertRaises(ValueError, CommandLineParser.parse, command, options + fieldnames) - self.assertRaises(ValueError, CommandLineParser.parse, command, options) - self.assertRaises(ValueError, CommandLineParser.parse, command, []) - - # Command line with unrecognized options - - self.assertRaises(ValueError, CommandLineParser.parse, command, - ['unrecognized_option_1=foo', 'unrecognized_option_2=bar']) - - # Command line with a variety of quoted/escaped text options - - @Configuration() - class TestCommandLineParserCommand(SearchCommand): - - text = Option() - - class ConfigurationSettings(SearchCommand.ConfigurationSettings): - - @classmethod - def fix_up(cls, command_class): pass - - strings = [ - r'"foo bar"', - r'"foo/bar"', - r'"foo\\bar"', - r'"""foo bar"""', - r'"\"foo bar\""', - r'Hello\ World!', - r'\"Hello\ World!\"'] - - expected_values = [ - r'foo bar', - r'foo/bar', - r'foo\bar', - r'"foo bar"', - r'"foo bar"', - r'Hello World!', - r'"Hello World!"' - ] - - for string, expected_value in zip(strings, expected_values): - command = TestCommandLineParserCommand() - argv = ['text', '=', string] - CommandLineParser.parse(command, argv) - self.assertEqual(command.text, expected_value) - - for string, expected_value in zip(strings, expected_values): - command = TestCommandLineParserCommand() - argv = [string] - CommandLineParser.parse(command, argv) - self.assertEqual(command.fieldnames[0], expected_value) - - for string, expected_value in zip(strings, expected_values): - command = TestCommandLineParserCommand() - argv = ['text', '=', string] + strings - CommandLineParser.parse(command, argv) - self.assertEqual(command.text, expected_value) - self.assertEqual(command.fieldnames, expected_values) - - strings = [ - 'some\\ string\\', - r'some\ string"', - r'"some string', - r'some"string' - ] - - for string in strings: - command = TestCommandLineParserCommand() - argv = [string] - self.assertRaises(SyntaxError, CommandLineParser.parse, command, argv) - - def test_command_line_parser_unquote(self): - parser = CommandLineParser - - options = [ - r'foo', # unquoted string with no escaped characters - r'fo\o\ b\"a\\r', # unquoted string with some escaped characters - r'"foo"', # quoted string with no special characters - r'"""foobar1"""', # quoted string with quotes escaped like this: "" - r'"\"foobar2\""', # quoted string with quotes escaped like this: \" - r'"foo ""x"" bar"', # quoted string with quotes escaped like this: "" - r'"foo \"x\" bar"', # quoted string with quotes escaped like this: \" - r'"\\foobar"', # quoted string with an escaped backslash - r'"foo \\ bar"', # quoted string with an escaped backslash - r'"foobar\\"', # quoted string with an escaped backslash - r'foo\\\bar', # quoted string with an escaped backslash and an escaped 'b' - r'""', # pair of quotes - r''] # empty string - - expected = [ - r'foo', - r'foo b"a\r', - r'foo', - r'"foobar1"', - r'"foobar2"', - r'foo "x" bar', - r'foo "x" bar', - '\\foobar', - r'foo \ bar', - 'foobar\\', - r'foo\bar', - r'', - r''] - - # Command line with an assortment of string values - - self.assertEqual(expected[-4], parser.unquote(options[-4])) - - for i in range(0, len(options)): - self.assertEqual(expected[i], parser.unquote(options[i])) - - self.assertRaises(SyntaxError, parser.unquote, '"') - self.assertRaises(SyntaxError, parser.unquote, '"foo') - self.assertRaises(SyntaxError, parser.unquote, 'foo"') - self.assertRaises(SyntaxError, parser.unquote, 'foo\\') - - def test_input_header(self): - - # No items - - input_header = InputHeader() - - with closing(StringIO('\r\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 0) - - # One unnamed single-line item (same as no items) - - input_header = InputHeader() - - with closing(StringIO('this%20is%20an%20unnamed%20single-line%20item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 0) - - input_header = InputHeader() - - with closing(StringIO('this%20is%20an%20unnamed\nmulti-\nline%20item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 0) - - # One named single-line item - - input_header = InputHeader() - - with closing(StringIO('Foo:this%20is%20a%20single-line%20item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 1) - self.assertEqual(input_header['Foo'], 'this is a single-line item') - - input_header = InputHeader() - - with closing(StringIO('Bar:this is a\nmulti-\nline item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 1) - self.assertEqual(input_header['Bar'], 'this is a\nmulti-\nline item') - - # The infoPath item (which is the path to a file that we open for reads) - - input_header = InputHeader() - - with closing(StringIO('infoPath:non-existent.csv\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 1) - self.assertEqual(input_header['infoPath'], 'non-existent.csv') - - # Set of named items - - collection = { - 'word_list': 'hello\nworld\n!', - 'word_1': 'hello', - 'word_2': 'world', - 'word_3': '!', - 'sentence': 'hello world!'} - - input_header = InputHeader() - text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', list(collection.items()), '') + '\n' - - with closing(StringIO(text)) as input_file: - input_header.read(input_file) - - self.assertDictEqual(input_header, collection) - - # Set of named items with an unnamed item at the beginning (the only place that an unnamed item can appear) - - with closing(StringIO('unnamed item\n' + text)) as input_file: - input_header.read(input_file) - - self.assertDictEqual(input_header, collection) - - # Test iterators, indirectly through items, keys, and values - - self.assertEqual(sorted(input_header.items()), sorted(collection.items())) - self.assertEqual(sorted(input_header.keys()), sorted(collection.keys())) - self.assertEqual(sorted(input_header.values()), sorted(collection.values())) - - def test_messages_header(self): - - @Configuration() - class TestMessagesHeaderCommand(SearchCommand): - class ConfigurationSettings(SearchCommand.ConfigurationSettings): - - @classmethod - def fix_up(cls, command_class): pass - - command = TestMessagesHeaderCommand() - command._protocol_version = 1 - output_buffer = BytesIO() - command._record_writer = RecordWriterV1(output_buffer) - - messages = [ - (command.write_debug, 'debug_message'), - (command.write_error, 'error_message'), - (command.write_fatal, 'fatal_message'), - (command.write_info, 'info_message'), - (command.write_warning, 'warning_message')] - - for write, message in messages: - write(message) - - command.finish() - - expected = ( - 'debug_message=debug_message\r\n' - 'error_message=error_message\r\n' - 'error_message=fatal_message\r\n' - 'info_message=info_message\r\n' - 'warn_message=warning_message\r\n' - '\r\n') - - self.assertEqual(output_buffer.getvalue().decode('utf-8'), expected) - - _package_path = os.path.dirname(__file__) - - -if __name__ == "__main__": - main() +#!/usr/bin/env python +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from contextlib import closing +from unittest import main, TestCase +import os +from io import StringIO, BytesIO +from functools import reduce +import pytest + +from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1 +from splunklib.searchcommands.decorators import Configuration, Option +from splunklib.searchcommands.validators import Boolean + +from splunklib.searchcommands.search_command import SearchCommand + + +@pytest.mark.smoke +class TestInternals(TestCase): + def setUp(self): + TestCase.setUp(self) + + def test_command_line_parser(self): + + @Configuration() + class TestCommandLineParserCommand(SearchCommand): + + required_option = Option(validate=Boolean(), require=True) + unnecessary_option = Option(validate=Boolean(), default=True, require=False) + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + + @classmethod + def fix_up(cls, command_class): pass + + # Command line without fieldnames + + options = ['required_option=true', 'unnecessary_option=false'] + + command = TestCommandLineParserCommand() + CommandLineParser.parse(command, options) + + for option in command.options.values(): + if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: + self.assertFalse(option.is_set) + continue + self.assertTrue(option.is_set) + + expected = 'testcommandlineparser required_option="t" unnecessary_option="f"' + self.assertEqual(expected, str(command)) + self.assertEqual(command.fieldnames, []) + + # Command line with fieldnames + + fieldnames = ['field_1', 'field_2', 'field_3'] + + command = TestCommandLineParserCommand() + CommandLineParser.parse(command, options + fieldnames) + + for option in command.options.values(): + if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: + self.assertFalse(option.is_set) + continue + self.assertTrue(option.is_set) + + expected = 'testcommandlineparser required_option="t" unnecessary_option="f" field_1 field_2 field_3' + self.assertEqual(expected, str(command)) + self.assertEqual(command.fieldnames, fieldnames) + + # Command line without any unnecessary options + + command = TestCommandLineParserCommand() + CommandLineParser.parse(command, ['required_option=true'] + fieldnames) + + for option in command.options.values(): + if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', + 'show_configuration']: + self.assertFalse(option.is_set) + continue + self.assertTrue(option.is_set) + + expected = 'testcommandlineparser required_option="t" field_1 field_2 field_3' + self.assertEqual(expected, str(command)) + self.assertEqual(command.fieldnames, fieldnames) + + # Command line with missing required options, with or without fieldnames or unnecessary options + + options = ['unnecessary_option=true'] + self.assertRaises(ValueError, CommandLineParser.parse, command, options + fieldnames) + self.assertRaises(ValueError, CommandLineParser.parse, command, options) + self.assertRaises(ValueError, CommandLineParser.parse, command, []) + + # Command line with unrecognized options + + self.assertRaises(ValueError, CommandLineParser.parse, command, + ['unrecognized_option_1=foo', 'unrecognized_option_2=bar']) + + # Command line with a variety of quoted/escaped text options + + @Configuration() + class TestCommandLineParserCommand(SearchCommand): + + text = Option() + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + + @classmethod + def fix_up(cls, command_class): pass + + strings = [ + r'"foo bar"', + r'"foo/bar"', + r'"foo\\bar"', + r'"""foo bar"""', + r'"\"foo bar\""', + r'Hello\ World!', + r'\"Hello\ World!\"'] + + expected_values = [ + r'foo bar', + r'foo/bar', + r'foo\bar', + r'"foo bar"', + r'"foo bar"', + r'Hello World!', + r'"Hello World!"' + ] + + for string, expected_value in zip(strings, expected_values): + command = TestCommandLineParserCommand() + argv = ['text', '=', string] + CommandLineParser.parse(command, argv) + self.assertEqual(command.text, expected_value) + + for string, expected_value in zip(strings, expected_values): + command = TestCommandLineParserCommand() + argv = [string] + CommandLineParser.parse(command, argv) + self.assertEqual(command.fieldnames[0], expected_value) + + for string, expected_value in zip(strings, expected_values): + command = TestCommandLineParserCommand() + argv = ['text', '=', string] + strings + CommandLineParser.parse(command, argv) + self.assertEqual(command.text, expected_value) + self.assertEqual(command.fieldnames, expected_values) + + strings = [ + 'some\\ string\\', + r'some\ string"', + r'"some string', + r'some"string' + ] + + for string in strings: + command = TestCommandLineParserCommand() + argv = [string] + self.assertRaises(SyntaxError, CommandLineParser.parse, command, argv) + + def test_command_line_parser_unquote(self): + parser = CommandLineParser + + options = [ + r'foo', # unquoted string with no escaped characters + r'fo\o\ b\"a\\r', # unquoted string with some escaped characters + r'"foo"', # quoted string with no special characters + r'"""foobar1"""', # quoted string with quotes escaped like this: "" + r'"\"foobar2\""', # quoted string with quotes escaped like this: \" + r'"foo ""x"" bar"', # quoted string with quotes escaped like this: "" + r'"foo \"x\" bar"', # quoted string with quotes escaped like this: \" + r'"\\foobar"', # quoted string with an escaped backslash + r'"foo \\ bar"', # quoted string with an escaped backslash + r'"foobar\\"', # quoted string with an escaped backslash + r'foo\\\bar', # quoted string with an escaped backslash and an escaped 'b' + r'""', # pair of quotes + r''] # empty string + + expected = [ + r'foo', + r'foo b"a\r', + r'foo', + r'"foobar1"', + r'"foobar2"', + r'foo "x" bar', + r'foo "x" bar', + '\\foobar', + r'foo \ bar', + 'foobar\\', + r'foo\bar', + r'', + r''] + + # Command line with an assortment of string values + + self.assertEqual(expected[-4], parser.unquote(options[-4])) + + for i in range(0, len(options)): + self.assertEqual(expected[i], parser.unquote(options[i])) + + self.assertRaises(SyntaxError, parser.unquote, '"') + self.assertRaises(SyntaxError, parser.unquote, '"foo') + self.assertRaises(SyntaxError, parser.unquote, 'foo"') + self.assertRaises(SyntaxError, parser.unquote, 'foo\\') + + def test_input_header(self): + + # No items + + input_header = InputHeader() + + with closing(StringIO('\r\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 0) + + # One unnamed single-line item (same as no items) + + input_header = InputHeader() + + with closing(StringIO('this%20is%20an%20unnamed%20single-line%20item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 0) + + input_header = InputHeader() + + with closing(StringIO('this%20is%20an%20unnamed\nmulti-\nline%20item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 0) + + # One named single-line item + + input_header = InputHeader() + + with closing(StringIO('Foo:this%20is%20a%20single-line%20item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 1) + self.assertEqual(input_header['Foo'], 'this is a single-line item') + + input_header = InputHeader() + + with closing(StringIO('Bar:this is a\nmulti-\nline item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 1) + self.assertEqual(input_header['Bar'], 'this is a\nmulti-\nline item') + + # The infoPath item (which is the path to a file that we open for reads) + + input_header = InputHeader() + + with closing(StringIO('infoPath:non-existent.csv\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 1) + self.assertEqual(input_header['infoPath'], 'non-existent.csv') + + # Set of named items + + collection = { + 'word_list': 'hello\nworld\n!', + 'word_1': 'hello', + 'word_2': 'world', + 'word_3': '!', + 'sentence': 'hello world!'} + + input_header = InputHeader() + text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', collection.items(), '') + '\n' + + with closing(StringIO(text)) as input_file: + input_header.read(input_file) + + self.assertDictEqual(input_header, collection) + + # Set of named items with an unnamed item at the beginning (the only place that an unnamed item can appear) + + with closing(StringIO('unnamed item\n' + text)) as input_file: + input_header.read(input_file) + + self.assertDictEqual(input_header, collection) + + # Test iterators, indirectly through items, keys, and values + + self.assertEqual(sorted(input_header.items()), sorted(collection.items())) + self.assertEqual(sorted(input_header.keys()), sorted(collection.keys())) + self.assertEqual(sorted(input_header.values()), sorted(collection.values())) + + def test_messages_header(self): + + @Configuration() + class TestMessagesHeaderCommand(SearchCommand): + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + + @classmethod + def fix_up(cls, command_class): pass + + command = TestMessagesHeaderCommand() + command._protocol_version = 1 + output_buffer = BytesIO() + command._record_writer = RecordWriterV1(output_buffer) + + messages = [ + (command.write_debug, 'debug_message'), + (command.write_error, 'error_message'), + (command.write_fatal, 'fatal_message'), + (command.write_info, 'info_message'), + (command.write_warning, 'warning_message')] + + for write, message in messages: + write(message) + + command.finish() + + expected = ( + 'debug_message=debug_message\r\n' + 'error_message=error_message\r\n' + 'error_message=fatal_message\r\n' + 'info_message=info_message\r\n' + 'warn_message=warning_message\r\n' + '\r\n') + + self.assertEqual(output_buffer.getvalue().decode('utf-8'), expected) + + _package_path = os.path.dirname(__file__) + + +if __name__ == "__main__": + main() diff --git a/tests/test_binding.py b/tests/test_binding.py old mode 100755 new mode 100644 index 5f967c80..9d4dd4b8 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -1,975 +1,975 @@ -#!/usr/bin/env python -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from http import server as BaseHTTPServer -from io import BytesIO, StringIO -from threading import Thread -from urllib.request import Request, urlopen - -from xml.etree.ElementTree import XML - -import json -import logging -from tests import testlib -import unittest -import socket -import ssl - -import splunklib -from splunklib import binding -from splunklib.binding import HTTPError, AuthenticationError, UrlEncoded -from splunklib import data -from splunklib.utils import ensure_str - -import pytest - -# splunkd endpoint paths -PATH_USERS = "authentication/users/" - -# XML Namespaces -NAMESPACE_ATOM = "http://www.w3.org/2005/Atom" -NAMESPACE_REST = "http://dev.splunk.com/ns/rest" -NAMESPACE_OPENSEARCH = "http://a9.com/-/spec/opensearch/1.1" - -# XML Extended Name Fragments -XNAMEF_ATOM = "{%s}%%s" % NAMESPACE_ATOM -XNAMEF_REST = "{%s}%%s" % NAMESPACE_REST -XNAMEF_OPENSEARCH = "{%s}%%s" % NAMESPACE_OPENSEARCH - -# XML Extended Names -XNAME_AUTHOR = XNAMEF_ATOM % "author" -XNAME_ENTRY = XNAMEF_ATOM % "entry" -XNAME_FEED = XNAMEF_ATOM % "feed" -XNAME_ID = XNAMEF_ATOM % "id" -XNAME_TITLE = XNAMEF_ATOM % "title" - - -def load(response): - return data.load(response.body.read()) - - -class BindingTestCase(unittest.TestCase): - context = None - - def setUp(self): - logging.info("%s", self.__class__.__name__) - self.opts = testlib.parse([], {}, ".env") - self.context = binding.connect(**self.opts.kwargs) - logging.debug("Connected to splunkd.") - - -class TestResponseReader(BindingTestCase): - def test_empty(self): - response = binding.ResponseReader(BytesIO(b"")) - self.assertTrue(response.empty) - self.assertEqual(response.peek(10), b"") - self.assertEqual(response.read(10), b"") - - arr = bytearray(10) - self.assertEqual(response.readinto(arr), 0) - self.assertEqual(arr, bytearray(10)) - self.assertTrue(response.empty) - - def test_read_past_end(self): - txt = b"abcd" - response = binding.ResponseReader(BytesIO(txt)) - self.assertFalse(response.empty) - self.assertEqual(response.peek(10), txt) - self.assertEqual(response.read(10), txt) - self.assertTrue(response.empty) - self.assertEqual(response.peek(10), b"") - self.assertEqual(response.read(10), b"") - - def test_read_partial(self): - txt = b"This is a test of the emergency broadcasting system." - response = binding.ResponseReader(BytesIO(txt)) - self.assertEqual(response.peek(5), txt[:5]) - self.assertFalse(response.empty) - self.assertEqual(response.read(), txt) - self.assertTrue(response.empty) - self.assertEqual(response.read(), b'') - - def test_readable(self): - txt = "abcd" - response = binding.ResponseReader(StringIO(txt)) - self.assertTrue(response.readable()) - - def test_readinto_bytearray(self): - txt = b"Checking readinto works as expected" - response = binding.ResponseReader(BytesIO(txt)) - arr = bytearray(10) - self.assertEqual(response.readinto(arr), 10) - self.assertEqual(arr[:10], b"Checking r") - self.assertEqual(response.readinto(arr), 10) - self.assertEqual(arr[:10], b"eadinto wo") - self.assertEqual(response.readinto(arr), 10) - self.assertEqual(arr[:10], b"rks as exp") - self.assertEqual(response.readinto(arr), 5) - self.assertEqual(arr[:5], b"ected") - self.assertTrue(response.empty) - - def test_readinto_memoryview(self): - txt = b"Checking readinto works as expected" - response = binding.ResponseReader(BytesIO(txt)) - arr = bytearray(10) - mv = memoryview(arr) - self.assertEqual(response.readinto(mv), 10) - self.assertEqual(arr[:10], b"Checking r") - self.assertEqual(response.readinto(mv), 10) - self.assertEqual(arr[:10], b"eadinto wo") - self.assertEqual(response.readinto(mv), 10) - self.assertEqual(arr[:10], b"rks as exp") - self.assertEqual(response.readinto(mv), 5) - self.assertEqual(arr[:5], b"ected") - self.assertTrue(response.empty) - - -class TestUrlEncoded(BindingTestCase): - def test_idempotent(self): - a = UrlEncoded('abc') - self.assertEqual(a, UrlEncoded(a)) - - def test_append(self): - self.assertEqual(UrlEncoded('a') + UrlEncoded('b'), - UrlEncoded('ab')) - - def test_append_string(self): - self.assertEqual(UrlEncoded('a') + '%', - UrlEncoded('a%')) - - def test_append_to_string(self): - self.assertEqual('%' + UrlEncoded('a'), - UrlEncoded('%a')) - - def test_interpolation_fails(self): - self.assertRaises(TypeError, lambda: UrlEncoded('%s') % 'boris') - - def test_chars(self): - for char, code in [(' ', '%20'), - ('"', '%22'), - ('%', '%25')]: - self.assertEqual(UrlEncoded(char), - UrlEncoded(code, skip_encode=True)) - - def test_repr(self): - self.assertEqual(repr(UrlEncoded('% %')), "UrlEncoded('% %')") - - -class TestAuthority(unittest.TestCase): - def test_authority_default(self): - self.assertEqual(binding._authority(), - "https://localhost:8089") - - def test_ipv4_host(self): - self.assertEqual( - binding._authority( - host="splunk.utopia.net"), - "https://splunk.utopia.net:8089") - - def test_ipv6_host(self): - self.assertEqual( - binding._authority( - host="2001:0db8:85a3:0000:0000:8a2e:0370:7334"), - "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") - - def test_ipv6_host_enclosed(self): - self.assertEqual( - binding._authority( - host="[2001:0db8:85a3:0000:0000:8a2e:0370:7334]"), - "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") - - def test_all_fields(self): - self.assertEqual( - binding._authority( - scheme="http", - host="splunk.utopia.net", - port="471"), - "http://splunk.utopia.net:471") - - -class TestUserManipulation(BindingTestCase): - def setUp(self): - BindingTestCase.setUp(self) - self.username = testlib.tmpname() - self.password = "changeme!" - self.roles = "power" - - # Delete user if it exists already - try: - response = self.context.delete(PATH_USERS + self.username) - self.assertEqual(response.status, 200) - except HTTPError as e: - self.assertTrue(e.status in [400, 500]) - - def tearDown(self): - BindingTestCase.tearDown(self) - try: - self.context.delete(PATH_USERS + self.username) - except HTTPError as e: - if e.status not in [400, 500]: - raise - - def test_user_without_role_fails(self): - self.assertRaises(binding.HTTPError, - self.context.post, - PATH_USERS, name=self.username, - password=self.password) - - def test_create_user(self): - response = self.context.post( - PATH_USERS, name=self.username, - password=self.password, roles=self.roles) - self.assertEqual(response.status, 201) - - response = self.context.get(PATH_USERS + self.username) - entry = load(response).feed.entry - self.assertEqual(entry.title, self.username) - - def test_update_user(self): - self.test_create_user() - response = self.context.post( - PATH_USERS + self.username, - password=self.password, - roles=self.roles, - defaultApp="search", - realname="Renzo", - email="email.me@now.com") - self.assertEqual(response.status, 200) - - response = self.context.get(PATH_USERS + self.username) - self.assertEqual(response.status, 200) - entry = load(response).feed.entry - self.assertEqual(entry.title, self.username) - self.assertEqual(entry.content.defaultApp, "search") - self.assertEqual(entry.content.realname, "Renzo") - self.assertEqual(entry.content.email, "email.me@now.com") - - def test_post_with_body_behaves(self): - self.test_create_user() - response = self.context.post( - PATH_USERS + self.username, - body="defaultApp=search", - ) - self.assertEqual(response.status, 200) - - def test_post_with_get_arguments_to_receivers_stream(self): - text = 'Hello, world!' - response = self.context.post( - '/services/receivers/simple', - headers=[('x-splunk-input-mode', 'streaming')], - source='sdk', sourcetype='sdk_test', - body=text - ) - self.assertEqual(response.status, 200) - - -class TestSocket(BindingTestCase): - def test_socket(self): - socket = self.context.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - # Sockets take bytes not strings - # - # def test_unicode_socket(self): - # socket = self.context.connect() - # socket.write(u"POST %s HTTP/1.1\r\n" %\ - # self.context._abspath("some/path/to/post/to")) - # socket.write(u"Host: %s:%s\r\n" %\ - # (self.context.host, self.context.port)) - # socket.write(u"Accept-Encoding: identity\r\n") - # socket.write((u"Authorization: %s\r\n" %\ - # self.context.token).encode('utf-8')) - # socket.write(u"X-Splunk-Input-Mode: Streaming\r\n") - # socket.write("\r\n") - # socket.close() - - def test_socket_gethostbyname(self): - self.assertTrue(self.context.connect()) - self.context.host = socket.gethostbyname(self.context.host) - self.assertTrue(self.context.connect()) - - -class TestUnicodeConnect(BindingTestCase): - def test_unicode_connect(self): - opts = self.opts.kwargs.copy() - opts['host'] = str(opts['host']) - context = binding.connect(**opts) - # Just check to make sure the service is alive - response = context.get("/services") - self.assertEqual(response.status, 200) - - -@pytest.mark.smoke -class TestAutologin(BindingTestCase): - def test_with_autologin(self): - self.context.autologin = True - self.assertEqual(self.context.get("/services").status, 200) - self.context.logout() - self.assertEqual(self.context.get("/services").status, 200) - - def test_without_autologin(self): - self.context.autologin = False - self.assertEqual(self.context.get("/services").status, 200) - self.context.logout() - self.assertRaises(AuthenticationError, - self.context.get, "/services") - - -class TestAbspath(BindingTestCase): - def setUp(self): - BindingTestCase.setUp(self) - self.kwargs = self.opts.kwargs.copy() - if 'app' in self.kwargs: del self.kwargs['app'] - if 'owner' in self.kwargs: del self.kwargs['owner'] - - def test_default(self): - path = self.context._abspath("foo", owner=None, app=None) - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/services/foo") - - def test_with_owner(self): - path = self.context._abspath("foo", owner="me", app=None) - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/system/foo") - - def test_with_app(self): - path = self.context._abspath("foo", owner=None, app="MyApp") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_with_both(self): - path = self.context._abspath("foo", owner="me", app="MyApp") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_user_sharing(self): - path = self.context._abspath("foo", owner="me", app="MyApp", sharing="user") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_sharing_app(self): - path = self.context._abspath("foo", owner="me", app="MyApp", sharing="app") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_sharing_global(self): - path = self.context._abspath("foo", owner="me", app="MyApp", sharing="global") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_sharing_system(self): - path = self.context._abspath("foo bar", owner="me", app="MyApp", sharing="system") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/system/foo%20bar") - - def test_url_forbidden_characters(self): - path = self.context._abspath('/a/b c/d') - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, '/a/b%20c/d') - - def test_context_defaults(self): - context = binding.connect(**self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/services/foo") - - def test_context_with_owner(self): - context = binding.connect(owner="me", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/system/foo") - - def test_context_with_app(self): - context = binding.connect(app="MyApp", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_context_with_both(self): - context = binding.connect(owner="me", app="MyApp", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_context_with_user_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="user", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_context_with_app_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="app", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_context_with_global_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="global", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_context_with_system_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="system", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/system/foo") - - def test_context_with_owner_as_email(self): - context = binding.connect(owner="me@me.com", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me%40me.com/system/foo") - self.assertEqual(path, UrlEncoded("/servicesNS/me@me.com/system/foo")) - - -# An urllib2 based HTTP request handler, used to test the binding layers -# support for pluggable request handlers. -def urllib2_handler(url, message, **kwargs): - method = message['method'].lower() - data = message.get('body', b"") if method == 'post' else None - headers = dict(message.get('headers', [])) - req = Request(url, data, headers) - try: - response = urlopen(req, context=ssl._create_unverified_context()) - except HTTPError as response: - pass # Propagate HTTP errors via the returned response message - return { - 'status': response.code, - 'reason': response.msg, - 'headers': dict(response.info()), - 'body': BytesIO(response.read()) - } - - -def isatom(body): - """Answers if the given response body looks like ATOM.""" - root = XML(body) - return \ - root.tag == XNAME_FEED and \ - root.find(XNAME_AUTHOR) is not None and \ - root.find(XNAME_ID) is not None and \ - root.find(XNAME_TITLE) is not None - - -class TestPluggableHTTP(testlib.SDKTestCase): - # Verify pluggable HTTP reqeust handlers. - def test_handlers(self): - paths = ["/services", "authentication/users", - "search/jobs"] - handlers = [binding.handler(), # default handler - urllib2_handler] - for handler in handlers: - logging.debug("Connecting with handler %s", handler) - context = binding.connect( - handler=handler, - **self.opts.kwargs) - for path in paths: - body = context.get(path).body.read() - self.assertTrue(isatom(body)) - - -def urllib2_insert_cookie_handler(url, message, **kwargs): - method = message['method'].lower() - data = message.get('body', b"") if method == 'post' else None - headers = dict(message.get('headers', [])) - req = Request(url, data, headers) - try: - response = urlopen(req, context=ssl._create_unverified_context()) - except HTTPError as response: - pass # Propagate HTTP errors via the returned response message - - # Mimic the insertion of 3rd party cookies into the response. - # An example is "sticky session"/"insert cookie" persistence - # of a load balancer for a SHC. - header_list = list(response.info().items()) - header_list.append(("Set-Cookie", "BIGipServer_splunk-shc-8089=1234567890.12345.0000; path=/; Httponly; Secure")) - header_list.append(("Set-Cookie", "home_made=yummy")) - - return { - 'status': response.code, - 'reason': response.msg, - 'headers': header_list, - 'body': BytesIO(response.read()) - } - - -class TestCookiePersistence(testlib.SDKTestCase): - # Verify persistence of 3rd party inserted cookies. - def test_3rdPartyInsertedCookiePersistence(self): - paths = ["/services", "authentication/users", - "search/jobs"] - logging.debug("Connecting with urllib2_insert_cookie_handler %s", urllib2_insert_cookie_handler) - context = binding.connect( - handler=urllib2_insert_cookie_handler, - **self.opts.kwargs) - - persisted_cookies = context.get_cookies() - - splunk_token_found = False - for k, v in persisted_cookies.items(): - if k[:8] == "splunkd_": - splunk_token_found = True - break - - self.assertEqual(splunk_token_found, True) - self.assertEqual(persisted_cookies['BIGipServer_splunk-shc-8089'], "1234567890.12345.0000") - self.assertEqual(persisted_cookies['home_made'], "yummy") - - -@pytest.mark.smoke -class TestLogout(BindingTestCase): - def test_logout(self): - response = self.context.get("/services") - self.assertEqual(response.status, 200) - self.context.logout() - self.assertEqual(self.context.token, binding._NoAuthenticationToken) - self.assertEqual(self.context.get_cookies(), {}) - self.assertRaises(AuthenticationError, - self.context.get, "/services") - self.assertRaises(AuthenticationError, - self.context.post, "/services") - self.assertRaises(AuthenticationError, - self.context.delete, "/services") - self.context.login() - response = self.context.get("/services") - self.assertEqual(response.status, 200) - - -class TestCookieAuthentication(unittest.TestCase): - def setUp(self): - self.opts = testlib.parse([], {}, ".env") - self.context = binding.connect(**self.opts.kwargs) - - # Skip these tests if running below Splunk 6.2, cookie-auth didn't exist before - from splunklib import client - service = client.Service(**self.opts.kwargs) - # TODO: Workaround the fact that skipTest is not defined by unittest2.TestCase - service.login() - splver = service.splunk_version - if splver[:2] < (6, 2): - self.skipTest("Skipping cookie-auth tests, running in %d.%d.%d, this feature was added in 6.2+" % splver) - - if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: - - def assertIsNotNone(self, obj, msg=None): - if obj is None: - raise self.failureException(msg or '%r is not None' % obj) - - @pytest.mark.smoke - def test_cookie_in_auth_headers(self): - self.assertIsNotNone(self.context._auth_headers) - self.assertNotEqual(self.context._auth_headers, []) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(self.context._auth_headers[0][0], "Cookie") - self.assertEqual(self.context._auth_headers[0][1][:8], "splunkd_") - - @pytest.mark.smoke - def test_got_cookie_on_connect(self): - self.assertIsNotNone(self.context.get_cookies()) - self.assertNotEqual(self.context.get_cookies(), {}) - self.assertEqual(len(self.context.get_cookies()), 1) - self.assertEqual(list(self.context.get_cookies().keys())[0][:8], "splunkd_") - - @pytest.mark.smoke - def test_cookie_with_autologin(self): - self.context.autologin = True - self.assertEqual(self.context.get("/services").status, 200) - self.assertTrue(self.context.has_cookies()) - self.context.logout() - self.assertFalse(self.context.has_cookies()) - self.assertEqual(self.context.get("/services").status, 200) - self.assertTrue(self.context.has_cookies()) - - @pytest.mark.smoke - def test_cookie_without_autologin(self): - self.context.autologin = False - self.assertEqual(self.context.get("/services").status, 200) - self.assertTrue(self.context.has_cookies()) - self.context.logout() - self.assertFalse(self.context.has_cookies()) - self.assertRaises(AuthenticationError, - self.context.get, "/services") - - @pytest.mark.smoke - def test_got_updated_cookie_with_get(self): - old_cookies = self.context.get_cookies() - resp = self.context.get("apps/local") - found = False - for key, value in resp.headers: - if key.lower() == "set-cookie": - found = True - self.assertEqual(value[:8], "splunkd_") - - new_cookies = {} - binding._parse_cookies(value, new_cookies) - # We're only expecting 1 in this scenario - self.assertEqual(len(old_cookies), 1) - self.assertTrue(len(list(new_cookies.values())), 1) - self.assertEqual(old_cookies, new_cookies) - self.assertEqual(list(new_cookies.values())[0], list(old_cookies.values())[0]) - self.assertTrue(found) - - @pytest.mark.smoke - def test_login_fails_with_bad_cookie(self): - # We should get an error if using a bad cookie - try: - binding.connect(**{"cookie": "bad=cookie"}) - self.fail() - except AuthenticationError as ae: - self.assertEqual(str(ae), "Login failed.") - - @pytest.mark.smoke - def test_login_with_multiple_cookies(self): - # We should get an error if using a bad cookie - new_context = binding.Context() - new_context.get_cookies().update({"bad": "cookie"}) - try: - new_context = new_context.login() - self.fail() - except AuthenticationError as ae: - self.assertEqual(str(ae), "Login failed.") - # Bring in a valid cookie now - for key, value in list(self.context.get_cookies().items()): - new_context.get_cookies()[key] = value - - self.assertEqual(len(new_context.get_cookies()), 2) - self.assertTrue('bad' in list(new_context.get_cookies().keys())) - self.assertTrue('cookie' in list(new_context.get_cookies().values())) - - for k, v in list(self.context.get_cookies().items()): - self.assertEqual(new_context.get_cookies()[k], v) - - self.assertEqual(new_context.get("apps/local").status, 200) - - @pytest.mark.smoke - def test_login_fails_without_cookie_or_token(self): - opts = { - 'host': self.opts.kwargs['host'], - 'port': self.opts.kwargs['port'] - } - try: - binding.connect(**opts) - self.fail() - except AuthenticationError as ae: - self.assertEqual(str(ae), "Login failed.") - - -class TestNamespace(unittest.TestCase): - def test_namespace(self): - tests = [ - ({}, - {'sharing': None, 'owner': None, 'app': None}), - - ({'owner': "Bob"}, - {'sharing': None, 'owner': "Bob", 'app': None}), - - ({'app': "search"}, - {'sharing': None, 'owner': None, 'app': "search"}), - - ({'owner': "Bob", 'app': "search"}, - {'sharing': None, 'owner': "Bob", 'app': "search"}), - - ({'sharing': "user", 'owner': "Bob@bob.com"}, - {'sharing': "user", 'owner': "Bob@bob.com", 'app': None}), - - ({'sharing': "user"}, - {'sharing': "user", 'owner': None, 'app': None}), - - ({'sharing': "user", 'owner': "Bob"}, - {'sharing': "user", 'owner': "Bob", 'app': None}), - - ({'sharing': "user", 'app': "search"}, - {'sharing': "user", 'owner': None, 'app': "search"}), - - ({'sharing': "user", 'owner': "Bob", 'app': "search"}, - {'sharing': "user", 'owner': "Bob", 'app': "search"}), - - ({'sharing': "app"}, - {'sharing': "app", 'owner': "nobody", 'app': None}), - - ({'sharing': "app", 'owner': "Bob"}, - {'sharing': "app", 'owner': "nobody", 'app': None}), - - ({'sharing': "app", 'app': "search"}, - {'sharing': "app", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "app", 'owner': "Bob", 'app': "search"}, - {'sharing': "app", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "global"}, - {'sharing': "global", 'owner': "nobody", 'app': None}), - - ({'sharing': "global", 'owner': "Bob"}, - {'sharing': "global", 'owner': "nobody", 'app': None}), - - ({'sharing': "global", 'app': "search"}, - {'sharing': "global", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "global", 'owner': "Bob", 'app': "search"}, - {'sharing': "global", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "system"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': "system", 'owner': "Bob"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': "system", 'app': "search"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': "system", 'owner': "Bob", 'app': "search"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': 'user', 'owner': '-', 'app': '-'}, - {'sharing': 'user', 'owner': '-', 'app': '-'})] - - for kwargs, expected in tests: - namespace = binding.namespace(**kwargs) - for k, v in list(expected.items()): - self.assertEqual(namespace[k], v) - - def test_namespace_fails(self): - self.assertRaises(ValueError, binding.namespace, sharing="gobble") - - -@pytest.mark.smoke -class TestBasicAuthentication(unittest.TestCase): - def setUp(self): - self.opts = testlib.parse([], {}, ".env") - opts = self.opts.kwargs.copy() - opts["basic"] = True - opts["username"] = self.opts.kwargs["username"] - opts["password"] = self.opts.kwargs["password"] - - self.context = binding.connect(**opts) - from splunklib import client - service = client.Service(**opts) - - if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: - def assertIsNotNone(self, obj, msg=None): - if obj is None: - raise self.failureException(msg or '%r is not None' % obj) - - def test_basic_in_auth_headers(self): - self.assertIsNotNone(self.context._auth_headers) - self.assertNotEqual(self.context._auth_headers, []) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(self.context._auth_headers[0][0], "Authorization") - self.assertEqual(self.context._auth_headers[0][1][:6], "Basic ") - self.assertEqual(self.context.get("/services").status, 200) - - -@pytest.mark.smoke -class TestTokenAuthentication(BindingTestCase): - def test_preexisting_token(self): - token = self.context.token - opts = self.opts.kwargs.copy() - opts["token"] = token - opts["username"] = "boris the mad baboon" - opts["password"] = "nothing real" - - newContext = binding.Context(**opts) - response = newContext.get("/services") - self.assertEqual(response.status, 200) - - socket = newContext.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - def test_preexisting_token_sans_splunk(self): - token = self.context.token - if token.startswith('Splunk '): - token = token.split(' ', 1)[1] - self.assertFalse(token.startswith('Splunk ')) - else: - self.fail('Token did not start with "Splunk ".') - opts = self.opts.kwargs.copy() - opts["token"] = token - opts["username"] = "boris the mad baboon" - opts["password"] = "nothing real" - - newContext = binding.Context(**opts) - response = newContext.get("/services") - self.assertEqual(response.status, 200) - - socket = newContext.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - def test_connect_with_preexisting_token_sans_user_and_pass(self): - token = self.context.token - opts = self.opts.kwargs.copy() - del opts['username'] - del opts['password'] - opts["token"] = token - - newContext = binding.connect(**opts) - response = newContext.get('/services') - self.assertEqual(response.status, 200) - - socket = newContext.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - -class TestPostWithBodyParam(unittest.TestCase): - - def test_post(self): - def handler(url, message, **kwargs): - assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" - assert message["body"] == b"testkey=testvalue" - return splunklib.data.Record({ - "status": 200, - "headers": [], - }) - - ctx = binding.Context(handler=handler) - ctx.post("foo/bar", owner="testowner", app="testapp", body={"testkey": "testvalue"}) - - def test_post_with_params_and_body(self): - def handler(url, message, **kwargs): - assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar?extrakey=extraval" - assert message["body"] == b"testkey=testvalue" - return splunklib.data.Record({ - "status": 200, - "headers": [], - }) - - ctx = binding.Context(handler=handler) - ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp", body={"testkey": "testvalue"}) - - def test_post_with_params_and_no_body(self): - def handler(url, message, **kwargs): - assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" - assert message["body"] == b"extrakey=extraval" - return splunklib.data.Record({ - "status": 200, - "headers": [], - }) - - ctx = binding.Context(handler=handler) - ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp") - - -def _wrap_handler(func, response_code=200, body=""): - def wrapped(handler_self): - result = func(handler_self) - if result is None: - handler_self.send_response(response_code) - handler_self.end_headers() - handler_self.wfile.write(body) - - return wrapped - - -class MockServer: - def __init__(self, port=9093, **handlers): - methods = {"do_" + k: _wrap_handler(v) for (k, v) in list(handlers.items())} - - def init(handler_self, socket, address, server): - BaseHTTPServer.BaseHTTPRequestHandler.__init__(handler_self, socket, address, server) - - def log(*args): # To silence server access logs - pass - - methods["__init__"] = init - methods["log_message"] = log - Handler = type("Handler", - (BaseHTTPServer.BaseHTTPRequestHandler, object), - methods) - self._svr = BaseHTTPServer.HTTPServer(("localhost", port), Handler) - - def run(): - self._svr.handle_request() - - self._thread = Thread(target=run) - self._thread.daemon = True - - def __enter__(self): - self._thread.start() - return self._svr - - def __exit__(self, typ, value, traceback): - self._thread.join(10) - self._svr.server_close() - - -class TestFullPost(unittest.TestCase): - - def test_post_with_body_urlencoded(self): - def check_response(handler): - length = int(handler.headers.get('content-length', 0)) - body = handler.rfile.read(length) - assert body.decode('utf-8') == "foo=bar" - - with MockServer(POST=check_response): - ctx = binding.connect(port=9093, scheme='http', token="waffle") - ctx.post("/", foo="bar") - - def test_post_with_body_string(self): - def check_response(handler): - length = int(handler.headers.get('content-length', 0)) - body = handler.rfile.read(length) - assert handler.headers['content-type'] == 'application/json' - assert json.loads(body)["baz"] == "baf" - - with MockServer(POST=check_response): - ctx = binding.connect(port=9093, scheme='http', token="waffle", - headers=[("Content-Type", "application/json")]) - ctx.post("/", foo="bar", body='{"baz": "baf"}') - - def test_post_with_body_dict(self): - def check_response(handler): - length = int(handler.headers.get('content-length', 0)) - body = handler.rfile.read(length) - assert handler.headers['content-type'] == 'application/x-www-form-urlencoded' - assert ensure_str(body) in ['baz=baf&hep=cat', 'hep=cat&baz=baf'] - - with MockServer(POST=check_response): - ctx = binding.connect(port=9093, scheme='http', token="waffle") - ctx.post("/", foo="bar", body={"baz": "baf", "hep": "cat"}) - - -if __name__ == "__main__": - unittest.main() +#!/usr/bin/env python +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from http import server as BaseHTTPServer +from io import BytesIO, StringIO +from threading import Thread +from urllib.request import Request, urlopen + +from xml.etree.ElementTree import XML + +import json +import logging +from tests import testlib +import unittest +import socket +import ssl + +import splunklib +from splunklib import binding +from splunklib.binding import HTTPError, AuthenticationError, UrlEncoded +from splunklib import data +from splunklib.utils import ensure_str + +import pytest + +# splunkd endpoint paths +PATH_USERS = "authentication/users/" + +# XML Namespaces +NAMESPACE_ATOM = "http://www.w3.org/2005/Atom" +NAMESPACE_REST = "http://dev.splunk.com/ns/rest" +NAMESPACE_OPENSEARCH = "http://a9.com/-/spec/opensearch/1.1" + +# XML Extended Name Fragments +XNAMEF_ATOM = "{%s}%%s" % NAMESPACE_ATOM +XNAMEF_REST = "{%s}%%s" % NAMESPACE_REST +XNAMEF_OPENSEARCH = "{%s}%%s" % NAMESPACE_OPENSEARCH + +# XML Extended Names +XNAME_AUTHOR = XNAMEF_ATOM % "author" +XNAME_ENTRY = XNAMEF_ATOM % "entry" +XNAME_FEED = XNAMEF_ATOM % "feed" +XNAME_ID = XNAMEF_ATOM % "id" +XNAME_TITLE = XNAMEF_ATOM % "title" + + +def load(response): + return data.load(response.body.read()) + + +class BindingTestCase(unittest.TestCase): + context = None + + def setUp(self): + logging.info("%s", self.__class__.__name__) + self.opts = testlib.parse([], {}, ".env") + self.context = binding.connect(**self.opts.kwargs) + logging.debug("Connected to splunkd.") + + +class TestResponseReader(BindingTestCase): + def test_empty(self): + response = binding.ResponseReader(BytesIO(b"")) + self.assertTrue(response.empty) + self.assertEqual(response.peek(10), b"") + self.assertEqual(response.read(10), b"") + + arr = bytearray(10) + self.assertEqual(response.readinto(arr), 0) + self.assertEqual(arr, bytearray(10)) + self.assertTrue(response.empty) + + def test_read_past_end(self): + txt = b"abcd" + response = binding.ResponseReader(BytesIO(txt)) + self.assertFalse(response.empty) + self.assertEqual(response.peek(10), txt) + self.assertEqual(response.read(10), txt) + self.assertTrue(response.empty) + self.assertEqual(response.peek(10), b"") + self.assertEqual(response.read(10), b"") + + def test_read_partial(self): + txt = b"This is a test of the emergency broadcasting system." + response = binding.ResponseReader(BytesIO(txt)) + self.assertEqual(response.peek(5), txt[:5]) + self.assertFalse(response.empty) + self.assertEqual(response.read(), txt) + self.assertTrue(response.empty) + self.assertEqual(response.read(), b'') + + def test_readable(self): + txt = "abcd" + response = binding.ResponseReader(StringIO(txt)) + self.assertTrue(response.readable()) + + def test_readinto_bytearray(self): + txt = b"Checking readinto works as expected" + response = binding.ResponseReader(BytesIO(txt)) + arr = bytearray(10) + self.assertEqual(response.readinto(arr), 10) + self.assertEqual(arr[:10], b"Checking r") + self.assertEqual(response.readinto(arr), 10) + self.assertEqual(arr[:10], b"eadinto wo") + self.assertEqual(response.readinto(arr), 10) + self.assertEqual(arr[:10], b"rks as exp") + self.assertEqual(response.readinto(arr), 5) + self.assertEqual(arr[:5], b"ected") + self.assertTrue(response.empty) + + def test_readinto_memoryview(self): + txt = b"Checking readinto works as expected" + response = binding.ResponseReader(BytesIO(txt)) + arr = bytearray(10) + mv = memoryview(arr) + self.assertEqual(response.readinto(mv), 10) + self.assertEqual(arr[:10], b"Checking r") + self.assertEqual(response.readinto(mv), 10) + self.assertEqual(arr[:10], b"eadinto wo") + self.assertEqual(response.readinto(mv), 10) + self.assertEqual(arr[:10], b"rks as exp") + self.assertEqual(response.readinto(mv), 5) + self.assertEqual(arr[:5], b"ected") + self.assertTrue(response.empty) + + +class TestUrlEncoded(BindingTestCase): + def test_idempotent(self): + a = UrlEncoded('abc') + self.assertEqual(a, UrlEncoded(a)) + + def test_append(self): + self.assertEqual(UrlEncoded('a') + UrlEncoded('b'), + UrlEncoded('ab')) + + def test_append_string(self): + self.assertEqual(UrlEncoded('a') + '%', + UrlEncoded('a%')) + + def test_append_to_string(self): + self.assertEqual('%' + UrlEncoded('a'), + UrlEncoded('%a')) + + def test_interpolation_fails(self): + self.assertRaises(TypeError, lambda: UrlEncoded('%s') % 'boris') + + def test_chars(self): + for char, code in [(' ', '%20'), + ('"', '%22'), + ('%', '%25')]: + self.assertEqual(UrlEncoded(char), + UrlEncoded(code, skip_encode=True)) + + def test_repr(self): + self.assertEqual(repr(UrlEncoded('% %')), "UrlEncoded('% %')") + + +class TestAuthority(unittest.TestCase): + def test_authority_default(self): + self.assertEqual(binding._authority(), + "https://localhost:8089") + + def test_ipv4_host(self): + self.assertEqual( + binding._authority( + host="splunk.utopia.net"), + "https://splunk.utopia.net:8089") + + def test_ipv6_host(self): + self.assertEqual( + binding._authority( + host="2001:0db8:85a3:0000:0000:8a2e:0370:7334"), + "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") + + def test_ipv6_host_enclosed(self): + self.assertEqual( + binding._authority( + host="[2001:0db8:85a3:0000:0000:8a2e:0370:7334]"), + "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") + + def test_all_fields(self): + self.assertEqual( + binding._authority( + scheme="http", + host="splunk.utopia.net", + port="471"), + "http://splunk.utopia.net:471") + + +class TestUserManipulation(BindingTestCase): + def setUp(self): + BindingTestCase.setUp(self) + self.username = testlib.tmpname() + self.password = "changeme!" + self.roles = "power" + + # Delete user if it exists already + try: + response = self.context.delete(PATH_USERS + self.username) + self.assertEqual(response.status, 200) + except HTTPError as e: + self.assertTrue(e.status in [400, 500]) + + def tearDown(self): + BindingTestCase.tearDown(self) + try: + self.context.delete(PATH_USERS + self.username) + except HTTPError as e: + if e.status not in [400, 500]: + raise + + def test_user_without_role_fails(self): + self.assertRaises(binding.HTTPError, + self.context.post, + PATH_USERS, name=self.username, + password=self.password) + + def test_create_user(self): + response = self.context.post( + PATH_USERS, name=self.username, + password=self.password, roles=self.roles) + self.assertEqual(response.status, 201) + + response = self.context.get(PATH_USERS + self.username) + entry = load(response).feed.entry + self.assertEqual(entry.title, self.username) + + def test_update_user(self): + self.test_create_user() + response = self.context.post( + PATH_USERS + self.username, + password=self.password, + roles=self.roles, + defaultApp="search", + realname="Renzo", + email="email.me@now.com") + self.assertEqual(response.status, 200) + + response = self.context.get(PATH_USERS + self.username) + self.assertEqual(response.status, 200) + entry = load(response).feed.entry + self.assertEqual(entry.title, self.username) + self.assertEqual(entry.content.defaultApp, "search") + self.assertEqual(entry.content.realname, "Renzo") + self.assertEqual(entry.content.email, "email.me@now.com") + + def test_post_with_body_behaves(self): + self.test_create_user() + response = self.context.post( + PATH_USERS + self.username, + body="defaultApp=search", + ) + self.assertEqual(response.status, 200) + + def test_post_with_get_arguments_to_receivers_stream(self): + text = 'Hello, world!' + response = self.context.post( + '/services/receivers/simple', + headers=[('x-splunk-input-mode', 'streaming')], + source='sdk', sourcetype='sdk_test', + body=text + ) + self.assertEqual(response.status, 200) + + +class TestSocket(BindingTestCase): + def test_socket(self): + socket = self.context.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + # Sockets take bytes not strings + # + # def test_unicode_socket(self): + # socket = self.context.connect() + # socket.write(u"POST %s HTTP/1.1\r\n" %\ + # self.context._abspath("some/path/to/post/to")) + # socket.write(u"Host: %s:%s\r\n" %\ + # (self.context.host, self.context.port)) + # socket.write(u"Accept-Encoding: identity\r\n") + # socket.write((u"Authorization: %s\r\n" %\ + # self.context.token).encode('utf-8')) + # socket.write(u"X-Splunk-Input-Mode: Streaming\r\n") + # socket.write("\r\n") + # socket.close() + + def test_socket_gethostbyname(self): + self.assertTrue(self.context.connect()) + self.context.host = socket.gethostbyname(self.context.host) + self.assertTrue(self.context.connect()) + + +class TestUnicodeConnect(BindingTestCase): + def test_unicode_connect(self): + opts = self.opts.kwargs.copy() + opts['host'] = str(opts['host']) + context = binding.connect(**opts) + # Just check to make sure the service is alive + response = context.get("/services") + self.assertEqual(response.status, 200) + + +@pytest.mark.smoke +class TestAutologin(BindingTestCase): + def test_with_autologin(self): + self.context.autologin = True + self.assertEqual(self.context.get("/services").status, 200) + self.context.logout() + self.assertEqual(self.context.get("/services").status, 200) + + def test_without_autologin(self): + self.context.autologin = False + self.assertEqual(self.context.get("/services").status, 200) + self.context.logout() + self.assertRaises(AuthenticationError, + self.context.get, "/services") + + +class TestAbspath(BindingTestCase): + def setUp(self): + BindingTestCase.setUp(self) + self.kwargs = self.opts.kwargs.copy() + if 'app' in self.kwargs: del self.kwargs['app'] + if 'owner' in self.kwargs: del self.kwargs['owner'] + + def test_default(self): + path = self.context._abspath("foo", owner=None, app=None) + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/services/foo") + + def test_with_owner(self): + path = self.context._abspath("foo", owner="me", app=None) + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/system/foo") + + def test_with_app(self): + path = self.context._abspath("foo", owner=None, app="MyApp") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_with_both(self): + path = self.context._abspath("foo", owner="me", app="MyApp") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_user_sharing(self): + path = self.context._abspath("foo", owner="me", app="MyApp", sharing="user") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_sharing_app(self): + path = self.context._abspath("foo", owner="me", app="MyApp", sharing="app") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_sharing_global(self): + path = self.context._abspath("foo", owner="me", app="MyApp", sharing="global") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_sharing_system(self): + path = self.context._abspath("foo bar", owner="me", app="MyApp", sharing="system") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/system/foo%20bar") + + def test_url_forbidden_characters(self): + path = self.context._abspath('/a/b c/d') + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, '/a/b%20c/d') + + def test_context_defaults(self): + context = binding.connect(**self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/services/foo") + + def test_context_with_owner(self): + context = binding.connect(owner="me", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/system/foo") + + def test_context_with_app(self): + context = binding.connect(app="MyApp", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_context_with_both(self): + context = binding.connect(owner="me", app="MyApp", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_context_with_user_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="user", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_context_with_app_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="app", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_context_with_global_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="global", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_context_with_system_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="system", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/system/foo") + + def test_context_with_owner_as_email(self): + context = binding.connect(owner="me@me.com", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me%40me.com/system/foo") + self.assertEqual(path, UrlEncoded("/servicesNS/me@me.com/system/foo")) + + +# An urllib2 based HTTP request handler, used to test the binding layers +# support for pluggable request handlers. +def urllib2_handler(url, message, **kwargs): + method = message['method'].lower() + data = message.get('body', b"") if method == 'post' else None + headers = dict(message.get('headers', [])) + req = Request(url, data, headers) + try: + response = urlopen(req, context=ssl._create_unverified_context()) + except HTTPError as response: + pass # Propagate HTTP errors via the returned response message + return { + 'status': response.code, + 'reason': response.msg, + 'headers': dict(response.info()), + 'body': BytesIO(response.read()) + } + + +def isatom(body): + """Answers if the given response body looks like ATOM.""" + root = XML(body) + return \ + root.tag == XNAME_FEED and \ + root.find(XNAME_AUTHOR) is not None and \ + root.find(XNAME_ID) is not None and \ + root.find(XNAME_TITLE) is not None + + +class TestPluggableHTTP(testlib.SDKTestCase): + # Verify pluggable HTTP reqeust handlers. + def test_handlers(self): + paths = ["/services", "authentication/users", + "search/jobs"] + handlers = [binding.handler(), # default handler + urllib2_handler] + for handler in handlers: + logging.debug("Connecting with handler %s", handler) + context = binding.connect( + handler=handler, + **self.opts.kwargs) + for path in paths: + body = context.get(path).body.read() + self.assertTrue(isatom(body)) + + +def urllib2_insert_cookie_handler(url, message, **kwargs): + method = message['method'].lower() + data = message.get('body', b"") if method == 'post' else None + headers = dict(message.get('headers', [])) + req = Request(url, data, headers) + try: + response = urlopen(req, context=ssl._create_unverified_context()) + except HTTPError as response: + pass # Propagate HTTP errors via the returned response message + + # Mimic the insertion of 3rd party cookies into the response. + # An example is "sticky session"/"insert cookie" persistence + # of a load balancer for a SHC. + header_list = list(response.info().items()) + header_list.append(("Set-Cookie", "BIGipServer_splunk-shc-8089=1234567890.12345.0000; path=/; Httponly; Secure")) + header_list.append(("Set-Cookie", "home_made=yummy")) + + return { + 'status': response.code, + 'reason': response.msg, + 'headers': header_list, + 'body': BytesIO(response.read()) + } + + +class TestCookiePersistence(testlib.SDKTestCase): + # Verify persistence of 3rd party inserted cookies. + def test_3rdPartyInsertedCookiePersistence(self): + paths = ["/services", "authentication/users", + "search/jobs"] + logging.debug("Connecting with urllib2_insert_cookie_handler %s", urllib2_insert_cookie_handler) + context = binding.connect( + handler=urllib2_insert_cookie_handler, + **self.opts.kwargs) + + persisted_cookies = context.get_cookies() + + splunk_token_found = False + for k, v in persisted_cookies.items(): + if k[:8] == "splunkd_": + splunk_token_found = True + break + + self.assertEqual(splunk_token_found, True) + self.assertEqual(persisted_cookies['BIGipServer_splunk-shc-8089'], "1234567890.12345.0000") + self.assertEqual(persisted_cookies['home_made'], "yummy") + + +@pytest.mark.smoke +class TestLogout(BindingTestCase): + def test_logout(self): + response = self.context.get("/services") + self.assertEqual(response.status, 200) + self.context.logout() + self.assertEqual(self.context.token, binding._NoAuthenticationToken) + self.assertEqual(self.context.get_cookies(), {}) + self.assertRaises(AuthenticationError, + self.context.get, "/services") + self.assertRaises(AuthenticationError, + self.context.post, "/services") + self.assertRaises(AuthenticationError, + self.context.delete, "/services") + self.context.login() + response = self.context.get("/services") + self.assertEqual(response.status, 200) + + +class TestCookieAuthentication(unittest.TestCase): + def setUp(self): + self.opts = testlib.parse([], {}, ".env") + self.context = binding.connect(**self.opts.kwargs) + + # Skip these tests if running below Splunk 6.2, cookie-auth didn't exist before + from splunklib import client + service = client.Service(**self.opts.kwargs) + # TODO: Workaround the fact that skipTest is not defined by unittest2.TestCase + service.login() + splver = service.splunk_version + if splver[:2] < (6, 2): + self.skipTest("Skipping cookie-auth tests, running in %d.%d.%d, this feature was added in 6.2+" % splver) + + if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: + + def assertIsNotNone(self, obj, msg=None): + if obj is None: + raise self.failureException(msg or '%r is not None' % obj) + + @pytest.mark.smoke + def test_cookie_in_auth_headers(self): + self.assertIsNotNone(self.context._auth_headers) + self.assertNotEqual(self.context._auth_headers, []) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(self.context._auth_headers[0][0], "Cookie") + self.assertEqual(self.context._auth_headers[0][1][:8], "splunkd_") + + @pytest.mark.smoke + def test_got_cookie_on_connect(self): + self.assertIsNotNone(self.context.get_cookies()) + self.assertNotEqual(self.context.get_cookies(), {}) + self.assertEqual(len(self.context.get_cookies()), 1) + self.assertEqual(list(self.context.get_cookies().keys())[0][:8], "splunkd_") + + @pytest.mark.smoke + def test_cookie_with_autologin(self): + self.context.autologin = True + self.assertEqual(self.context.get("/services").status, 200) + self.assertTrue(self.context.has_cookies()) + self.context.logout() + self.assertFalse(self.context.has_cookies()) + self.assertEqual(self.context.get("/services").status, 200) + self.assertTrue(self.context.has_cookies()) + + @pytest.mark.smoke + def test_cookie_without_autologin(self): + self.context.autologin = False + self.assertEqual(self.context.get("/services").status, 200) + self.assertTrue(self.context.has_cookies()) + self.context.logout() + self.assertFalse(self.context.has_cookies()) + self.assertRaises(AuthenticationError, + self.context.get, "/services") + + @pytest.mark.smoke + def test_got_updated_cookie_with_get(self): + old_cookies = self.context.get_cookies() + resp = self.context.get("apps/local") + found = False + for key, value in resp.headers: + if key.lower() == "set-cookie": + found = True + self.assertEqual(value[:8], "splunkd_") + + new_cookies = {} + binding._parse_cookies(value, new_cookies) + # We're only expecting 1 in this scenario + self.assertEqual(len(old_cookies), 1) + self.assertTrue(len(list(new_cookies.values())), 1) + self.assertEqual(old_cookies, new_cookies) + self.assertEqual(list(new_cookies.values())[0], list(old_cookies.values())[0]) + self.assertTrue(found) + + @pytest.mark.smoke + def test_login_fails_with_bad_cookie(self): + # We should get an error if using a bad cookie + try: + binding.connect(**{"cookie": "bad=cookie"}) + self.fail() + except AuthenticationError as ae: + self.assertEqual(str(ae), "Login failed.") + + @pytest.mark.smoke + def test_login_with_multiple_cookies(self): + # We should get an error if using a bad cookie + new_context = binding.Context() + new_context.get_cookies().update({"bad": "cookie"}) + try: + new_context = new_context.login() + self.fail() + except AuthenticationError as ae: + self.assertEqual(str(ae), "Login failed.") + # Bring in a valid cookie now + for key, value in self.context.get_cookies().items(): + new_context.get_cookies()[key] = value + + self.assertEqual(len(new_context.get_cookies()), 2) + self.assertTrue('bad' in list(new_context.get_cookies().keys())) + self.assertTrue('cookie' in list(new_context.get_cookies().values())) + + for k, v in self.context.get_cookies().items(): + self.assertEqual(new_context.get_cookies()[k], v) + + self.assertEqual(new_context.get("apps/local").status, 200) + + @pytest.mark.smoke + def test_login_fails_without_cookie_or_token(self): + opts = { + 'host': self.opts.kwargs['host'], + 'port': self.opts.kwargs['port'] + } + try: + binding.connect(**opts) + self.fail() + except AuthenticationError as ae: + self.assertEqual(str(ae), "Login failed.") + + +class TestNamespace(unittest.TestCase): + def test_namespace(self): + tests = [ + ({}, + {'sharing': None, 'owner': None, 'app': None}), + + ({'owner': "Bob"}, + {'sharing': None, 'owner': "Bob", 'app': None}), + + ({'app': "search"}, + {'sharing': None, 'owner': None, 'app': "search"}), + + ({'owner': "Bob", 'app': "search"}, + {'sharing': None, 'owner': "Bob", 'app': "search"}), + + ({'sharing': "user", 'owner': "Bob@bob.com"}, + {'sharing': "user", 'owner': "Bob@bob.com", 'app': None}), + + ({'sharing': "user"}, + {'sharing': "user", 'owner': None, 'app': None}), + + ({'sharing': "user", 'owner': "Bob"}, + {'sharing': "user", 'owner': "Bob", 'app': None}), + + ({'sharing': "user", 'app': "search"}, + {'sharing': "user", 'owner': None, 'app': "search"}), + + ({'sharing': "user", 'owner': "Bob", 'app': "search"}, + {'sharing': "user", 'owner': "Bob", 'app': "search"}), + + ({'sharing': "app"}, + {'sharing': "app", 'owner': "nobody", 'app': None}), + + ({'sharing': "app", 'owner': "Bob"}, + {'sharing': "app", 'owner': "nobody", 'app': None}), + + ({'sharing': "app", 'app': "search"}, + {'sharing': "app", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "app", 'owner': "Bob", 'app': "search"}, + {'sharing': "app", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "global"}, + {'sharing': "global", 'owner': "nobody", 'app': None}), + + ({'sharing': "global", 'owner': "Bob"}, + {'sharing': "global", 'owner': "nobody", 'app': None}), + + ({'sharing': "global", 'app': "search"}, + {'sharing': "global", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "global", 'owner': "Bob", 'app': "search"}, + {'sharing': "global", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "system"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': "system", 'owner': "Bob"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': "system", 'app': "search"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': "system", 'owner': "Bob", 'app': "search"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': 'user', 'owner': '-', 'app': '-'}, + {'sharing': 'user', 'owner': '-', 'app': '-'})] + + for kwargs, expected in tests: + namespace = binding.namespace(**kwargs) + for k, v in expected.items(): + self.assertEqual(namespace[k], v) + + def test_namespace_fails(self): + self.assertRaises(ValueError, binding.namespace, sharing="gobble") + + +@pytest.mark.smoke +class TestBasicAuthentication(unittest.TestCase): + def setUp(self): + self.opts = testlib.parse([], {}, ".env") + opts = self.opts.kwargs.copy() + opts["basic"] = True + opts["username"] = self.opts.kwargs["username"] + opts["password"] = self.opts.kwargs["password"] + + self.context = binding.connect(**opts) + from splunklib import client + service = client.Service(**opts) + + if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: + def assertIsNotNone(self, obj, msg=None): + if obj is None: + raise self.failureException(msg or '%r is not None' % obj) + + def test_basic_in_auth_headers(self): + self.assertIsNotNone(self.context._auth_headers) + self.assertNotEqual(self.context._auth_headers, []) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(self.context._auth_headers[0][0], "Authorization") + self.assertEqual(self.context._auth_headers[0][1][:6], "Basic ") + self.assertEqual(self.context.get("/services").status, 200) + + +@pytest.mark.smoke +class TestTokenAuthentication(BindingTestCase): + def test_preexisting_token(self): + token = self.context.token + opts = self.opts.kwargs.copy() + opts["token"] = token + opts["username"] = "boris the mad baboon" + opts["password"] = "nothing real" + + newContext = binding.Context(**opts) + response = newContext.get("/services") + self.assertEqual(response.status, 200) + + socket = newContext.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + def test_preexisting_token_sans_splunk(self): + token = self.context.token + if token.startswith('Splunk '): + token = token.split(' ', 1)[1] + self.assertFalse(token.startswith('Splunk ')) + else: + self.fail('Token did not start with "Splunk ".') + opts = self.opts.kwargs.copy() + opts["token"] = token + opts["username"] = "boris the mad baboon" + opts["password"] = "nothing real" + + newContext = binding.Context(**opts) + response = newContext.get("/services") + self.assertEqual(response.status, 200) + + socket = newContext.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + def test_connect_with_preexisting_token_sans_user_and_pass(self): + token = self.context.token + opts = self.opts.kwargs.copy() + del opts['username'] + del opts['password'] + opts["token"] = token + + newContext = binding.connect(**opts) + response = newContext.get('/services') + self.assertEqual(response.status, 200) + + socket = newContext.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + +class TestPostWithBodyParam(unittest.TestCase): + + def test_post(self): + def handler(url, message, **kwargs): + assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" + assert message["body"] == b"testkey=testvalue" + return splunklib.data.Record({ + "status": 200, + "headers": [], + }) + + ctx = binding.Context(handler=handler) + ctx.post("foo/bar", owner="testowner", app="testapp", body={"testkey": "testvalue"}) + + def test_post_with_params_and_body(self): + def handler(url, message, **kwargs): + assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar?extrakey=extraval" + assert message["body"] == b"testkey=testvalue" + return splunklib.data.Record({ + "status": 200, + "headers": [], + }) + + ctx = binding.Context(handler=handler) + ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp", body={"testkey": "testvalue"}) + + def test_post_with_params_and_no_body(self): + def handler(url, message, **kwargs): + assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" + assert message["body"] == b"extrakey=extraval" + return splunklib.data.Record({ + "status": 200, + "headers": [], + }) + + ctx = binding.Context(handler=handler) + ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp") + + +def _wrap_handler(func, response_code=200, body=""): + def wrapped(handler_self): + result = func(handler_self) + if result is None: + handler_self.send_response(response_code) + handler_self.end_headers() + handler_self.wfile.write(body) + + return wrapped + + +class MockServer: + def __init__(self, port=9093, **handlers): + methods = {"do_" + k: _wrap_handler(v) for (k, v) in handlers.items()} + + def init(handler_self, socket, address, server): + BaseHTTPServer.BaseHTTPRequestHandler.__init__(handler_self, socket, address, server) + + def log(*args): # To silence server access logs + pass + + methods["__init__"] = init + methods["log_message"] = log + Handler = type("Handler", + (BaseHTTPServer.BaseHTTPRequestHandler, object), + methods) + self._svr = BaseHTTPServer.HTTPServer(("localhost", port), Handler) + + def run(): + self._svr.handle_request() + + self._thread = Thread(target=run) + self._thread.daemon = True + + def __enter__(self): + self._thread.start() + return self._svr + + def __exit__(self, typ, value, traceback): + self._thread.join(10) + self._svr.server_close() + + +class TestFullPost(unittest.TestCase): + + def test_post_with_body_urlencoded(self): + def check_response(handler): + length = int(handler.headers.get('content-length', 0)) + body = handler.rfile.read(length) + assert body.decode('utf-8') == "foo=bar" + + with MockServer(POST=check_response): + ctx = binding.connect(port=9093, scheme='http', token="waffle") + ctx.post("/", foo="bar") + + def test_post_with_body_string(self): + def check_response(handler): + length = int(handler.headers.get('content-length', 0)) + body = handler.rfile.read(length) + assert handler.headers['content-type'] == 'application/json' + assert json.loads(body)["baz"] == "baf" + + with MockServer(POST=check_response): + ctx = binding.connect(port=9093, scheme='http', token="waffle", + headers=[("Content-Type", "application/json")]) + ctx.post("/", foo="bar", body='{"baz": "baf"}') + + def test_post_with_body_dict(self): + def check_response(handler): + length = int(handler.headers.get('content-length', 0)) + body = handler.rfile.read(length) + assert handler.headers['content-type'] == 'application/x-www-form-urlencoded' + assert ensure_str(body) in ['baz=baf&hep=cat', 'hep=cat&baz=baf'] + + with MockServer(POST=check_response): + ctx = binding.connect(port=9093, scheme='http', token="waffle") + ctx.post("/", foo="bar", body={"baz": "baf", "hep": "cat"}) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/testlib.py b/tests/testlib.py index c3109e24..a92790e2 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -1,261 +1,261 @@ -#!/usr/bin/env python -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Shared unit test utilities.""" -import contextlib - -import os -import time -import logging -import sys - -# Run the test suite on the SDK without installing it. -sys.path.insert(0, '../') - -from time import sleep -from datetime import datetime, timedelta - -import unittest - -from utils import parse - -from splunklib import client - - - -logging.basicConfig( - filename='test.log', - level=logging.DEBUG, - format="%(asctime)s:%(levelname)s:%(message)s") - - -class NoRestartRequiredError(Exception): - pass - - -class WaitTimedOutError(Exception): - pass - - -def to_bool(x): - if x == '1': - return True - if x == '0': - return False - raise ValueError(f"Not a boolean value: {x}") - - -def tmpname(): - name = 'delete-me-' + str(os.getpid()) + str(time.time()).replace('.', '-') - return name - - -def wait(predicate, timeout=60, pause_time=0.5): - assert pause_time < timeout - start = datetime.now() - diff = timedelta(seconds=timeout) - while not predicate(): - if datetime.now() - start > diff: - logging.debug("wait timed out after %d seconds", timeout) - raise WaitTimedOutError - sleep(pause_time) - logging.debug("wait finished after %s seconds", datetime.now() - start) - - -class SDKTestCase(unittest.TestCase): - restart_already_required = False - installedApps = [] - - def assertEventuallyTrue(self, predicate, timeout=30, pause_time=0.5, - timeout_message="Operation timed out."): - assert pause_time < timeout - start = datetime.now() - diff = timedelta(seconds=timeout) - while not predicate(): - if datetime.now() - start > diff: - logging.debug("wait timed out after %d seconds", timeout) - self.fail(timeout_message) - sleep(pause_time) - logging.debug("wait finished after %s seconds", datetime.now() - start) - - def check_content(self, entity, **kwargs): - for k, v in list(kwargs): - self.assertEqual(entity[k], str(v)) - - def check_entity(self, entity): - assert entity is not None - self.assertTrue(entity.name is not None) - self.assertTrue(entity.path is not None) - - self.assertTrue(entity.state is not None) - self.assertTrue(entity.content is not None) - - # Verify access metadata - assert entity.access is not None - entity.access.app - entity.access.owner - entity.access.sharing - - # Verify content metadata - - # In some cases, the REST API does not return field metadata for when - # entities are intially listed by a collection, so we refresh to make - # sure the metadata is available. - entity.refresh() - - self.assertTrue(isinstance(entity.fields.required, list)) - self.assertTrue(isinstance(entity.fields.optional, list)) - self.assertTrue(isinstance(entity.fields.wildcard, list)) - - # Verify that all required fields appear in entity content - - for field in entity.fields.required: - try: - self.assertTrue(field in entity.content) - except: - # Check for known exceptions - if "configs/conf-times" in entity.path: - if field in ["is_sub_menu"]: - continue - raise - - def clear_restart_message(self): - """Tell Splunk to forget that it needs to be restarted. - - This is used mostly in cases such as deleting a temporary application. - Splunk asks to be restarted when that happens, but unless the application - contained modular input kinds or the like, it isn't necessary. - """ - if not self.service.restart_required: - raise ValueError("Tried to clear restart message when there was none.") - try: - self.service.delete("messages/restart_required") - except client.HTTPError as he: - if he.status != 404: - raise - - @contextlib.contextmanager - def fake_splunk_version(self, version): - original_version = self.service.splunk_version - try: - self.service._splunk_version = version - yield - finally: - self.service._splunk_version = original_version - - def install_app_from_collection(self, name): - collectionName = 'sdkappcollection' - if collectionName not in self.service.apps: - raise ValueError("sdk-test-application not installed in splunkd") - appPath = self.pathInApp(collectionName, ["build", name + ".tar"]) - kwargs = {"update": True, "name": appPath, "filename": True} - - try: - self.service.post("apps/local", **kwargs) - except client.HTTPError as he: - if he.status == 400: - raise IOError(f"App {name} not found in app collection") - if self.service.restart_required: - self.service.restart(120) - self.installedApps.append(name) - - def app_collection_installed(self): - collectionName = 'sdkappcollection' - return collectionName in self.service.apps - - def pathInApp(self, appName, pathComponents): - r"""Return a path to *pathComponents* in *appName*. - - `pathInApp` is used to refer to files in applications installed with - `install_app_from_collection`. For example, the app `file_to_upload` in - the collection contains `log.txt`. To get the path to it, call:: - - pathInApp('file_to_upload', ['log.txt']) - - The path to `setup.xml` in `has_setup_xml` would be fetched with:: - - pathInApp('has_setup_xml', ['default', 'setup.xml']) - - `pathInApp` figures out the correct separator to use (based on whether - splunkd is running on Windows or Unix) and joins the elements in - *pathComponents* into a path relative to the application specified by - *appName*. - - *pathComponents* should be a list of strings giving the components. - This function will try to figure out the correct separator (/ or \) - for the platform that splunkd is running on and construct the path - as needed. - - :return: A string giving the path. - """ - splunkHome = self.service.settings['SPLUNK_HOME'] - if "\\" in splunkHome: - # This clause must come first, since Windows machines may - # have mixed \ and / in their paths. - separator = "\\" - elif "/" in splunkHome: - separator = "/" - else: - raise ValueError("No separators in $SPLUNK_HOME. Can't determine what file separator to use.") - appPath = separator.join([splunkHome, "etc", "apps", appName] + pathComponents) - return appPath - - def uncheckedRestartSplunk(self, timeout=240): - self.service.restart(timeout) - - def restartSplunk(self, timeout=240): - if self.service.restart_required: - self.service.restart(timeout) - else: - raise NoRestartRequiredError() - - @classmethod - def setUpClass(cls): - cls.opts = parse([], {}, ".env") - cls.opts.kwargs.update({'retries': 3}) - # Before we start, make sure splunk doesn't need a restart. - service = client.connect(**cls.opts.kwargs) - if service.restart_required: - service.restart(timeout=120) - - def setUp(self): - unittest.TestCase.setUp(self) - self.opts.kwargs.update({'retries': 3}) - self.service = client.connect(**self.opts.kwargs) - # If Splunk is in a state requiring restart, go ahead - # and restart. That way we'll be sane for the rest of - # the test. - if self.service.restart_required: - self.restartSplunk() - logging.debug("Connected to splunkd version %s", '.'.join(str(x) for x in self.service.splunk_version)) - - def tearDown(self): - from splunklib.binding import HTTPError - - if self.service.restart_required: - self.fail("Test left Splunk in a state requiring a restart.") - - for appName in self.installedApps: - if appName in self.service.apps: - try: - self.service.apps.delete(appName) - wait(lambda: appName not in self.service.apps) - except HTTPError as error: - if not (os.name == 'nt' and error.status == 500): - raise - print(f'Ignoring failure to delete {appName} during tear down: {error}') - if self.service.restart_required: - self.clear_restart_message() +#!/usr/bin/env python +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Shared unit test utilities.""" +import contextlib + +import os +import time +import logging +import sys + +# Run the test suite on the SDK without installing it. +sys.path.insert(0, '../') + +from time import sleep +from datetime import datetime, timedelta + +import unittest + +from utils import parse + +from splunklib import client + + + +logging.basicConfig( + filename='test.log', + level=logging.DEBUG, + format="%(asctime)s:%(levelname)s:%(message)s") + + +class NoRestartRequiredError(Exception): + pass + + +class WaitTimedOutError(Exception): + pass + + +def to_bool(x): + if x == '1': + return True + if x == '0': + return False + raise ValueError(f"Not a boolean value: {x}") + + +def tmpname(): + name = 'delete-me-' + str(os.getpid()) + str(time.time()).replace('.', '-') + return name + + +def wait(predicate, timeout=60, pause_time=0.5): + assert pause_time < timeout + start = datetime.now() + diff = timedelta(seconds=timeout) + while not predicate(): + if datetime.now() - start > diff: + logging.debug("wait timed out after %d seconds", timeout) + raise WaitTimedOutError + sleep(pause_time) + logging.debug("wait finished after %s seconds", datetime.now() - start) + + +class SDKTestCase(unittest.TestCase): + restart_already_required = False + installedApps = [] + + def assertEventuallyTrue(self, predicate, timeout=30, pause_time=0.5, + timeout_message="Operation timed out."): + assert pause_time < timeout + start = datetime.now() + diff = timedelta(seconds=timeout) + while not predicate(): + if datetime.now() - start > diff: + logging.debug("wait timed out after %d seconds", timeout) + self.fail(timeout_message) + sleep(pause_time) + logging.debug("wait finished after %s seconds", datetime.now() - start) + + def check_content(self, entity, **kwargs): + for k, v in kwargs: + self.assertEqual(entity[k], str(v)) + + def check_entity(self, entity): + assert entity is not None + self.assertTrue(entity.name is not None) + self.assertTrue(entity.path is not None) + + self.assertTrue(entity.state is not None) + self.assertTrue(entity.content is not None) + + # Verify access metadata + assert entity.access is not None + entity.access.app + entity.access.owner + entity.access.sharing + + # Verify content metadata + + # In some cases, the REST API does not return field metadata for when + # entities are intially listed by a collection, so we refresh to make + # sure the metadata is available. + entity.refresh() + + self.assertTrue(isinstance(entity.fields.required, list)) + self.assertTrue(isinstance(entity.fields.optional, list)) + self.assertTrue(isinstance(entity.fields.wildcard, list)) + + # Verify that all required fields appear in entity content + + for field in entity.fields.required: + try: + self.assertTrue(field in entity.content) + except: + # Check for known exceptions + if "configs/conf-times" in entity.path: + if field in ["is_sub_menu"]: + continue + raise + + def clear_restart_message(self): + """Tell Splunk to forget that it needs to be restarted. + + This is used mostly in cases such as deleting a temporary application. + Splunk asks to be restarted when that happens, but unless the application + contained modular input kinds or the like, it isn't necessary. + """ + if not self.service.restart_required: + raise ValueError("Tried to clear restart message when there was none.") + try: + self.service.delete("messages/restart_required") + except client.HTTPError as he: + if he.status != 404: + raise + + @contextlib.contextmanager + def fake_splunk_version(self, version): + original_version = self.service.splunk_version + try: + self.service._splunk_version = version + yield + finally: + self.service._splunk_version = original_version + + def install_app_from_collection(self, name): + collectionName = 'sdkappcollection' + if collectionName not in self.service.apps: + raise ValueError("sdk-test-application not installed in splunkd") + appPath = self.pathInApp(collectionName, ["build", name + ".tar"]) + kwargs = {"update": True, "name": appPath, "filename": True} + + try: + self.service.post("apps/local", **kwargs) + except client.HTTPError as he: + if he.status == 400: + raise IOError(f"App {name} not found in app collection") + if self.service.restart_required: + self.service.restart(120) + self.installedApps.append(name) + + def app_collection_installed(self): + collectionName = 'sdkappcollection' + return collectionName in self.service.apps + + def pathInApp(self, appName, pathComponents): + r"""Return a path to *pathComponents* in *appName*. + + `pathInApp` is used to refer to files in applications installed with + `install_app_from_collection`. For example, the app `file_to_upload` in + the collection contains `log.txt`. To get the path to it, call:: + + pathInApp('file_to_upload', ['log.txt']) + + The path to `setup.xml` in `has_setup_xml` would be fetched with:: + + pathInApp('has_setup_xml', ['default', 'setup.xml']) + + `pathInApp` figures out the correct separator to use (based on whether + splunkd is running on Windows or Unix) and joins the elements in + *pathComponents* into a path relative to the application specified by + *appName*. + + *pathComponents* should be a list of strings giving the components. + This function will try to figure out the correct separator (/ or \) + for the platform that splunkd is running on and construct the path + as needed. + + :return: A string giving the path. + """ + splunkHome = self.service.settings['SPLUNK_HOME'] + if "\\" in splunkHome: + # This clause must come first, since Windows machines may + # have mixed \ and / in their paths. + separator = "\\" + elif "/" in splunkHome: + separator = "/" + else: + raise ValueError("No separators in $SPLUNK_HOME. Can't determine what file separator to use.") + appPath = separator.join([splunkHome, "etc", "apps", appName] + pathComponents) + return appPath + + def uncheckedRestartSplunk(self, timeout=240): + self.service.restart(timeout) + + def restartSplunk(self, timeout=240): + if self.service.restart_required: + self.service.restart(timeout) + else: + raise NoRestartRequiredError() + + @classmethod + def setUpClass(cls): + cls.opts = parse([], {}, ".env") + cls.opts.kwargs.update({'retries': 3}) + # Before we start, make sure splunk doesn't need a restart. + service = client.connect(**cls.opts.kwargs) + if service.restart_required: + service.restart(timeout=120) + + def setUp(self): + unittest.TestCase.setUp(self) + self.opts.kwargs.update({'retries': 3}) + self.service = client.connect(**self.opts.kwargs) + # If Splunk is in a state requiring restart, go ahead + # and restart. That way we'll be sane for the rest of + # the test. + if self.service.restart_required: + self.restartSplunk() + logging.debug("Connected to splunkd version %s", '.'.join(str(x) for x in self.service.splunk_version)) + + def tearDown(self): + from splunklib.binding import HTTPError + + if self.service.restart_required: + self.fail("Test left Splunk in a state requiring a restart.") + + for appName in self.installedApps: + if appName in self.service.apps: + try: + self.service.apps.delete(appName) + wait(lambda: appName not in self.service.apps) + except HTTPError as error: + if not (os.name == 'nt' and error.status == 500): + raise + print(f'Ignoring failure to delete {appName} during tear down: {error}') + if self.service.restart_required: + self.clear_restart_message() From e23fa4dee2107c65e862c11794fd65c8f86d0c1f Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 13 Mar 2024 17:23:33 +0530 Subject: [PATCH 55/60] Update internals.py --- splunklib/searchcommands/internals.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index 962d8c8b..def04951 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -789,8 +789,6 @@ def write_metadata(self, configuration): metadata = chain(configuration.items(), (('inspector', self._inspector if self._inspector else None),)) self._write_chunk(metadata, '') - # Removed additional new line - # self.write('\n') self._clear() def write_metric(self, name, value): From a633d6f5402ad99f79a7ae36427b5cac65ed5962 Mon Sep 17 00:00:00 2001 From: maszyk99 Date: Wed, 13 Mar 2024 15:21:32 +0100 Subject: [PATCH 56/60] Add comment about json encoder back --- splunklib/searchcommands/internals.py | 1 + 1 file changed, 1 insertion(+) diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index def04951..abceac30 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -669,6 +669,7 @@ def _write_record(self, record): # We may be running under PyPy 2.5 which does not include the _json module _iterencode_json = JSONEncoder(separators=(',', ':')).iterencode else: + # Creating _iterencode_json this way yields a two-fold performance improvement on Python 2.7.9 and 2.7.10 from json.encoder import encode_basestring_ascii @staticmethod From 85807ef1eb3b8bfcc3aca7c9b7f10e2a92492ae4 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 15 Mar 2024 12:13:58 +0530 Subject: [PATCH 57/60] reverted line separators - line separators were mistakenly updated, hence reverting it back. --- splunklib/binding.py | 2994 +++---- splunklib/client.py | 7816 +++++++++---------- splunklib/searchcommands/search_command.py | 2286 +++--- tests/searchcommands/chunked_data_stream.py | 200 +- tests/searchcommands/test_internals_v1.py | 686 +- tests/test_binding.py | 1950 ++--- tests/testlib.py | 522 +- 7 files changed, 8227 insertions(+), 8227 deletions(-) mode change 100644 => 100755 tests/searchcommands/test_internals_v1.py mode change 100644 => 100755 tests/test_binding.py diff --git a/splunklib/binding.py b/splunklib/binding.py index 7437fc2b..958be96e 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -1,1497 +1,1497 @@ -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""The **splunklib.binding** module provides a low-level binding interface to the -`Splunk REST API `_. - -This module handles the wire details of calling the REST API, such as -authentication tokens, prefix paths, URL encoding, and so on. Actual path -segments, ``GET`` and ``POST`` arguments, and the parsing of responses is left -to the user. - -If you want a friendlier interface to the Splunk REST API, use the -:mod:`splunklib.client` module. -""" - -import io -import json -import logging -import socket -import ssl -import time -from base64 import b64encode -from contextlib import contextmanager -from datetime import datetime -from functools import wraps -from io import BytesIO -from urllib import parse -from http import client -from http.cookies import SimpleCookie -from xml.etree.ElementTree import XML, ParseError -from splunklib.data import record -from splunklib import __version__ - - -logger = logging.getLogger(__name__) - -__all__ = [ - "AuthenticationError", - "connect", - "Context", - "handler", - "HTTPError", - "UrlEncoded", - "_encode", - "_make_cookie_header", - "_NoAuthenticationToken", - "namespace" -] - -SENSITIVE_KEYS = ['Authorization', 'Cookie', 'action.email.auth_password', 'auth', 'auth_password', 'clear_password', 'clientId', - 'crc-salt', 'encr_password', 'oldpassword', 'passAuth', 'password', 'session', 'suppressionKey', - 'token'] - -# If you change these, update the docstring -# on _authority as well. -DEFAULT_HOST = "localhost" -DEFAULT_PORT = "8089" -DEFAULT_SCHEME = "https" - - -def _log_duration(f): - @wraps(f) - def new_f(*args, **kwargs): - start_time = datetime.now() - val = f(*args, **kwargs) - end_time = datetime.now() - logger.debug("Operation took %s", end_time - start_time) - return val - - return new_f - - -def mask_sensitive_data(data): - ''' - Masked sensitive fields data for logging purpose - ''' - if not isinstance(data, dict): - try: - data = json.loads(data) - except Exception as ex: - return data - - # json.loads will return "123"(str) as 123(int), so return the data if it's not 'dict' type - if not isinstance(data, dict): - return data - mdata = {} - for k, v in data.items(): - if k in SENSITIVE_KEYS: - mdata[k] = "******" - else: - mdata[k] = mask_sensitive_data(v) - return mdata - - -def _parse_cookies(cookie_str, dictionary): - """Tries to parse any key-value pairs of cookies in a string, - then updates the the dictionary with any key-value pairs found. - - **Example**:: - - dictionary = {} - _parse_cookies('my=value', dictionary) - # Now the following is True - dictionary['my'] == 'value' - - :param cookie_str: A string containing "key=value" pairs from an HTTP "Set-Cookie" header. - :type cookie_str: ``str`` - :param dictionary: A dictionary to update with any found key-value pairs. - :type dictionary: ``dict`` - """ - parsed_cookie = SimpleCookie(cookie_str) - for cookie in parsed_cookie.values(): - dictionary[cookie.key] = cookie.coded_value - - -def _make_cookie_header(cookies): - """ - Takes a list of 2-tuples of key-value pairs of - cookies, and returns a valid HTTP ``Cookie`` - header. - - **Example**:: - - header = _make_cookie_header([("key", "value"), ("key_2", "value_2")]) - # Now the following is True - header == "key=value; key_2=value_2" - - :param cookies: A list of 2-tuples of cookie key-value pairs. - :type cookies: ``list`` of 2-tuples - :return: ``str` An HTTP header cookie string. - :rtype: ``str`` - """ - return "; ".join(f"{key}={value}" for key, value in cookies) - - -# Singleton values to eschew None -class _NoAuthenticationToken: - """The value stored in a :class:`Context` or :class:`splunklib.client.Service` - class that is not logged in. - - If a ``Context`` or ``Service`` object is created without an authentication - token, and there has not yet been a call to the ``login`` method, the token - field of the ``Context`` or ``Service`` object is set to - ``_NoAuthenticationToken``. - - Likewise, after a ``Context`` or ``Service`` object has been logged out, the - token is set to this value again. - """ - - -class UrlEncoded(str): - """This class marks URL-encoded strings. - It should be considered an SDK-private implementation detail. - - Manually tracking whether strings are URL encoded can be difficult. Avoid - calling ``urllib.quote`` to replace special characters with escapes. When - you receive a URL-encoded string, *do* use ``urllib.unquote`` to replace - escapes with single characters. Then, wrap any string you want to use as a - URL in ``UrlEncoded``. Note that because the ``UrlEncoded`` class is - idempotent, making multiple calls to it is OK. - - ``UrlEncoded`` objects are identical to ``str`` objects (including being - equal if their contents are equal) except when passed to ``UrlEncoded`` - again. - - ``UrlEncoded`` removes the ``str`` type support for interpolating values - with ``%`` (doing that raises a ``TypeError``). There is no reliable way to - encode values this way, so instead, interpolate into a string, quoting by - hand, and call ``UrlEncode`` with ``skip_encode=True``. - - **Example**:: - - import urllib - UrlEncoded(f'{scheme}://{urllib.quote(host)}', skip_encode=True) - - If you append ``str`` strings and ``UrlEncoded`` strings, the result is also - URL encoded. - - **Example**:: - - UrlEncoded('ab c') + 'de f' == UrlEncoded('ab cde f') - 'ab c' + UrlEncoded('de f') == UrlEncoded('ab cde f') - """ - - def __new__(self, val='', skip_encode=False, encode_slash=False): - if isinstance(val, UrlEncoded): - # Don't urllib.quote something already URL encoded. - return val - if skip_encode: - return str.__new__(self, val) - if encode_slash: - return str.__new__(self, parse.quote_plus(val)) - # When subclassing str, just call str.__new__ method - # with your class and the value you want to have in the - # new string. - return str.__new__(self, parse.quote(val)) - - def __add__(self, other): - """self + other - - If *other* is not a ``UrlEncoded``, URL encode it before - adding it. - """ - if isinstance(other, UrlEncoded): - return UrlEncoded(str.__add__(self, other), skip_encode=True) - - return UrlEncoded(str.__add__(self, parse.quote(other)), skip_encode=True) - - def __radd__(self, other): - """other + self - - If *other* is not a ``UrlEncoded``, URL _encode it before - adding it. - """ - if isinstance(other, UrlEncoded): - return UrlEncoded(str.__radd__(self, other), skip_encode=True) - - return UrlEncoded(str.__add__(parse.quote(other), self), skip_encode=True) - - def __mod__(self, fields): - """Interpolation into ``UrlEncoded``s is disabled. - - If you try to write ``UrlEncoded("%s") % "abc", will get a - ``TypeError``. - """ - raise TypeError("Cannot interpolate into a UrlEncoded object.") - - def __repr__(self): - return f"UrlEncoded({repr(parse.unquote(str(self)))})" - - -@contextmanager -def _handle_auth_error(msg): - """Handle re-raising HTTP authentication errors as something clearer. - - If an ``HTTPError`` is raised with status 401 (access denied) in - the body of this context manager, re-raise it as an - ``AuthenticationError`` instead, with *msg* as its message. - - This function adds no round trips to the server. - - :param msg: The message to be raised in ``AuthenticationError``. - :type msg: ``str`` - - **Example**:: - - with _handle_auth_error("Your login failed."): - ... # make an HTTP request - """ - try: - yield - except HTTPError as he: - if he.status == 401: - raise AuthenticationError(msg, he) - else: - raise - - -def _authentication(request_fun): - """Decorator to handle autologin and authentication errors. - - *request_fun* is a function taking no arguments that needs to - be run with this ``Context`` logged into Splunk. - - ``_authentication``'s behavior depends on whether the - ``autologin`` field of ``Context`` is set to ``True`` or - ``False``. If it's ``False``, then ``_authentication`` - aborts if the ``Context`` is not logged in, and raises an - ``AuthenticationError`` if an ``HTTPError`` of status 401 is - raised in *request_fun*. If it's ``True``, then - ``_authentication`` will try at all sensible places to - log in before issuing the request. - - If ``autologin`` is ``False``, ``_authentication`` makes - one roundtrip to the server if the ``Context`` is logged in, - or zero if it is not. If ``autologin`` is ``True``, it's less - deterministic, and may make at most three roundtrips (though - that would be a truly pathological case). - - :param request_fun: A function of no arguments encapsulating - the request to make to the server. - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(..., autologin=True) - c.logout() - def f(): - c.get("/services") - return 42 - print(_authentication(f)) - """ - - @wraps(request_fun) - def wrapper(self, *args, **kwargs): - if self.token is _NoAuthenticationToken and not self.has_cookies(): - # Not yet logged in. - if self.autologin and self.username and self.password: - # This will throw an uncaught - # AuthenticationError if it fails. - self.login() - else: - # Try the request anyway without authentication. - # Most requests will fail. Some will succeed, such as - # 'GET server/info'. - with _handle_auth_error("Request aborted: not logged in."): - return request_fun(self, *args, **kwargs) - try: - # Issue the request - return request_fun(self, *args, **kwargs) - except HTTPError as he: - if he.status == 401 and self.autologin: - # Authentication failed. Try logging in, and then - # rerunning the request. If either step fails, throw - # an AuthenticationError and give up. - with _handle_auth_error("Autologin failed."): - self.login() - with _handle_auth_error("Authentication Failed! If session token is used, it seems to have been expired."): - return request_fun(self, *args, **kwargs) - elif he.status == 401 and not self.autologin: - raise AuthenticationError( - "Request failed: Session is not logged in.", he) - else: - raise - - return wrapper - - -def _authority(scheme=DEFAULT_SCHEME, host=DEFAULT_HOST, port=DEFAULT_PORT): - """Construct a URL authority from the given *scheme*, *host*, and *port*. - - Named in accordance with RFC2396_, which defines URLs as:: - - ://? - - .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt - - So ``https://localhost:8000/a/b/b?boris=hilda`` would be parsed as:: - - scheme := https - authority := localhost:8000 - path := /a/b/c - query := boris=hilda - - :param scheme: URL scheme (the default is "https") - :type scheme: "http" or "https" - :param host: The host name (the default is "localhost") - :type host: string - :param port: The port number (the default is 8089) - :type port: integer - :return: The URL authority. - :rtype: UrlEncoded (subclass of ``str``) - - **Example**:: - - _authority() == "https://localhost:8089" - - _authority(host="splunk.utopia.net") == "https://splunk.utopia.net:8089" - - _authority(host="2001:0db8:85a3:0000:0000:8a2e:0370:7334") == \ - "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089" - - _authority(scheme="http", host="splunk.utopia.net", port="471") == \ - "http://splunk.utopia.net:471" - - """ - # check if host is an IPv6 address and not enclosed in [ ] - if ':' in host and not (host.startswith('[') and host.endswith(']')): - # IPv6 addresses must be enclosed in [ ] in order to be well - # formed. - host = '[' + host + ']' - return UrlEncoded(f"{scheme}://{host}:{port}", skip_encode=True) - - -# kwargs: sharing, owner, app -def namespace(sharing=None, owner=None, app=None, **kwargs): - """This function constructs a Splunk namespace. - - Every Splunk resource belongs to a namespace. The namespace is specified by - the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. - The possible values for ``sharing`` are: "user", "app", "global" and "system", - which map to the following combinations of ``owner`` and ``app`` values: - - "user" => {owner}, {app} - - "app" => nobody, {app} - - "global" => nobody, {app} - - "system" => nobody, system - - "nobody" is a special user name that basically means no user, and "system" - is the name reserved for system resources. - - "-" is a wildcard that can be used for both ``owner`` and ``app`` values and - refers to all users and all apps, respectively. - - In general, when you specify a namespace you can specify any combination of - these three values and the library will reconcile the triple, overriding the - provided values as appropriate. - - Finally, if no namespacing is specified the library will make use of the - ``/services`` branch of the REST API, which provides a namespaced view of - Splunk resources equivelent to using ``owner={currentUser}`` and - ``app={defaultApp}``. - - The ``namespace`` function returns a representation of the namespace from - reconciling the values you provide. It ignores any keyword arguments other - than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of - configuration information without first having to extract individual keys. - - :param sharing: The sharing mode (the default is "user"). - :type sharing: "system", "global", "app", or "user" - :param owner: The owner context (the default is "None"). - :type owner: ``string`` - :param app: The app context (the default is "None"). - :type app: ``string`` - :returns: A :class:`splunklib.data.Record` containing the reconciled - namespace. - - **Example**:: - - import splunklib.binding as binding - n = binding.namespace(sharing="user", owner="boris", app="search") - n = binding.namespace(sharing="global", app="search") - """ - if sharing in ["system"]: - return record({'sharing': sharing, 'owner': "nobody", 'app': "system"}) - if sharing in ["global", "app"]: - return record({'sharing': sharing, 'owner': "nobody", 'app': app}) - if sharing in ["user", None]: - return record({'sharing': sharing, 'owner': owner, 'app': app}) - raise ValueError("Invalid value for argument: 'sharing'") - - -class Context: - """This class represents a context that encapsulates a splunkd connection. - - The ``Context`` class encapsulates the details of HTTP requests, - authentication, a default namespace, and URL prefixes to simplify access to - the REST API. - - After creating a ``Context`` object, you must call its :meth:`login` - method before you can issue requests to splunkd. Or, use the :func:`connect` - function to create an already-authenticated ``Context`` object. You can - provide a session token explicitly (the same token can be shared by multiple - ``Context`` objects) to provide authentication. - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verification for https connections. - :type verify: ``Boolean`` - :param sharing: The sharing mode for the namespace (the default is "user"). - :type sharing: "global", "system", "app", or "user" - :param owner: The owner context of the namespace (optional, the default is "None"). - :type owner: ``string`` - :param app: The app context of the namespace (optional, the default is "None"). - :type app: ``string`` - :param token: A session token. When provided, you don't need to call :meth:`login`. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param username: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param password: The password for the Splunk account. - :type password: ``string`` - :param splunkToken: Splunk authentication token - :type splunkToken: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param retires: Number of retries for each HTTP connection (optional, the default is 0). - NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER AND BLOCK THE - CURRENT THREAD WHILE RETRYING. - :type retries: ``int`` - :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). - :type retryDelay: ``int`` (in seconds) - :param handler: The HTTP request handler (optional). - :returns: A ``Context`` instance. - - **Example**:: - - import splunklib.binding as binding - c = binding.Context(username="boris", password="natasha", ...) - c.login() - # Or equivalently - c = binding.connect(username="boris", password="natasha") - # Or if you already have a session token - c = binding.Context(token="atg232342aa34324a") - # Or if you already have a valid cookie - c = binding.Context(cookie="splunkd_8089=...") - """ - - def __init__(self, handler=None, **kwargs): - self.http = HttpLib(handler, kwargs.get("verify", False), key_file=kwargs.get("key_file"), - cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), - # Default to False for backward compat - retries=kwargs.get("retries", 0), retryDelay=kwargs.get("retryDelay", 10)) - self.token = kwargs.get("token", _NoAuthenticationToken) - if self.token is None: # In case someone explicitly passes token=None - self.token = _NoAuthenticationToken - self.scheme = kwargs.get("scheme", DEFAULT_SCHEME) - self.host = kwargs.get("host", DEFAULT_HOST) - self.port = int(kwargs.get("port", DEFAULT_PORT)) - self.authority = _authority(self.scheme, self.host, self.port) - self.namespace = namespace(**kwargs) - self.username = kwargs.get("username", "") - self.password = kwargs.get("password", "") - self.basic = kwargs.get("basic", False) - self.bearerToken = kwargs.get("splunkToken", "") - self.autologin = kwargs.get("autologin", False) - self.additional_headers = kwargs.get("headers", []) - - # Store any cookies in the self.http._cookies dict - if "cookie" in kwargs and kwargs['cookie'] not in [None, _NoAuthenticationToken]: - _parse_cookies(kwargs["cookie"], self.http._cookies) - - def get_cookies(self): - """Gets the dictionary of cookies from the ``HttpLib`` member of this instance. - - :return: Dictionary of cookies stored on the ``self.http``. - :rtype: ``dict`` - """ - return self.http._cookies - - def has_cookies(self): - """Returns true if the ``HttpLib`` member of this instance has auth token stored. - - :return: ``True`` if there is auth token present, else ``False`` - :rtype: ``bool`` - """ - auth_token_key = "splunkd_" - return any(auth_token_key in key for key in self.get_cookies().keys()) - - # Shared per-context request headers - @property - def _auth_headers(self): - """Headers required to authenticate a request. - - Assumes your ``Context`` already has a authentication token or - cookie, either provided explicitly or obtained by logging - into the Splunk instance. - - :returns: A list of 2-tuples containing key and value - """ - header = [] - if self.has_cookies(): - return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] - elif self.basic and (self.username and self.password): - token = f'Basic {b64encode(("%s:%s" % (self.username, self.password)).encode("utf-8")).decode("ascii")}' - elif self.bearerToken: - token = f'Bearer {self.bearerToken}' - elif self.token is _NoAuthenticationToken: - token = [] - else: - # Ensure the token is properly formatted - if self.token.startswith('Splunk '): - token = self.token - else: - token = f'Splunk {self.token}' - if token: - header.append(("Authorization", token)) - if self.get_cookies(): - header.append(("Cookie", _make_cookie_header(list(self.get_cookies().items())))) - - return header - - def connect(self): - """Returns an open connection (socket) to the Splunk instance. - - This method is used for writing bulk events to an index or similar tasks - where the overhead of opening a connection multiple times would be - prohibitive. - - :returns: A socket. - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(...) - socket = c.connect() - socket.write("POST %s HTTP/1.1\\r\\n" % "some/path/to/post/to") - socket.write("Host: %s:%s\\r\\n" % (c.host, c.port)) - socket.write("Accept-Encoding: identity\\r\\n") - socket.write("Authorization: %s\\r\\n" % c.token) - socket.write("X-Splunk-Input-Mode: Streaming\\r\\n") - socket.write("\\r\\n") - """ - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - if self.scheme == "https": - sock = ssl.wrap_socket(sock) - sock.connect((socket.gethostbyname(self.host), self.port)) - return sock - - @_authentication - @_log_duration - def delete(self, path_segment, owner=None, app=None, sharing=None, **query): - """Performs a DELETE operation at the REST path segment with the given - namespace and query. - - This method is named to match the HTTP method. ``delete`` makes at least - one round trip to the server, one additional round trip for each 303 - status returned, and at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.delete('saved/searches/boris') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '1786'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:53:06 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - c.delete('nonexistant/path') # raises HTTPError - c.logout() - c.delete('apps/local') # raises AuthenticationError - """ - path = self.authority + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) - logger.debug("DELETE request to %s (body: %s)", path, mask_sensitive_data(query)) - response = self.http.delete(path, self._auth_headers, **query) - return response - - @_authentication - @_log_duration - def get(self, path_segment, owner=None, app=None, headers=None, sharing=None, **query): - """Performs a GET operation from the REST path segment with the given - namespace and query. - - This method is named to match the HTTP method. ``get`` makes at least - one round trip to the server, one additional round trip for each 303 - status returned, and at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.get('apps/local') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '26208'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:30:35 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - c.get('nonexistant/path') # raises HTTPError - c.logout() - c.get('apps/local') # raises AuthenticationError - """ - if headers is None: - headers = [] - - path = self.authority + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) - logger.debug("GET request to %s (body: %s)", path, mask_sensitive_data(query)) - all_headers = headers + self.additional_headers + self._auth_headers - response = self.http.get(path, all_headers, **query) - return response - - @_authentication - @_log_duration - def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, **query): - """Performs a POST operation from the REST path segment with the given - namespace and query. - - This method is named to match the HTTP method. ``post`` makes at least - one round trip to the server, one additional round trip for each 303 - status returned, and at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - Some of Splunk's endpoints, such as ``receivers/simple`` and - ``receivers/stream``, require unstructured data in the POST body - and all metadata passed as GET-style arguments. If you provide - a ``body`` argument to ``post``, it will be used as the POST - body, and all other keyword arguments will be passed as - GET-style arguments in the URL. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param query: All other keyword arguments, which are used as query - parameters. - :param body: Parameters to be used in the post body. If specified, - any parameters in the query will be applied to the URL instead of - the body. If a dict is supplied, the key-value pairs will be form - encoded. If a string is supplied, the body will be passed through - in the request unchanged. - :type body: ``dict`` or ``str`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.post('saved/searches', name='boris', - search='search * earliest=-1m | head 1') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '10455'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:46:06 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'Created', - 'status': 201} - c.post('nonexistant/path') # raises HTTPError - c.logout() - # raises AuthenticationError: - c.post('saved/searches', name='boris', - search='search * earliest=-1m | head 1') - """ - if headers is None: - headers = [] - - path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - - logger.debug("POST request to %s (body: %s)", path, mask_sensitive_data(query)) - all_headers = headers + self.additional_headers + self._auth_headers - response = self.http.post(path, all_headers, **query) - return response - - @_authentication - @_log_duration - def request(self, path_segment, method="GET", headers=None, body={}, - owner=None, app=None, sharing=None): - """Issues an arbitrary HTTP request to the REST path segment. - - This method is named to match ``httplib.request``. This function - makes a single round trip to the server. - - If *owner*, *app*, and *sharing* are omitted, this method uses the - default :class:`Context` namespace. All other keyword arguments are - included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Context`` object is not - logged in. - :raises HTTPError: Raised when an error occurred in a GET operation from - *path_segment*. - :param path_segment: A REST path segment. - :type path_segment: ``string`` - :param method: The HTTP method to use (optional). - :type method: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param body: Content of the HTTP request (optional). - :type body: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - c = binding.connect(...) - c.request('saved/searches', method='GET') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '46722'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 17:24:19 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - c.request('nonexistant/path', method='GET') # raises HTTPError - c.logout() - c.get('apps/local') # raises AuthenticationError - """ - if headers is None: - headers = [] - - path = self.authority \ - + self._abspath(path_segment, owner=owner, - app=app, sharing=sharing) - - all_headers = headers + self.additional_headers + self._auth_headers - logger.debug("%s request to %s (headers: %s, body: %s)", - method, path, str(mask_sensitive_data(dict(all_headers))), mask_sensitive_data(body)) - if body: - body = _encode(**body) - - if method == "GET": - path = path + UrlEncoded('?' + body, skip_encode=True) - message = {'method': method, - 'headers': all_headers} - else: - message = {'method': method, - 'headers': all_headers, - 'body': body} - else: - message = {'method': method, - 'headers': all_headers} - - response = self.http.request(path, message) - - return response - - def login(self): - """Logs into the Splunk instance referred to by the :class:`Context` - object. - - Unless a ``Context`` is created with an explicit authentication token - (probably obtained by logging in from a different ``Context`` object) - you must call :meth:`login` before you can issue requests. - The authentication token obtained from the server is stored in the - ``token`` field of the ``Context`` object. - - :raises AuthenticationError: Raised when login fails. - :returns: The ``Context`` object, so you can chain calls. - - **Example**:: - - import splunklib.binding as binding - c = binding.Context(...).login() - # Then issue requests... - """ - - if self.has_cookies() and \ - (not self.username and not self.password): - # If we were passed session cookie(s), but no username or - # password, then login is a nop, since we're automatically - # logged in. - return - - if self.token is not _NoAuthenticationToken and \ - (not self.username and not self.password): - # If we were passed a session token, but no username or - # password, then login is a nop, since we're automatically - # logged in. - return - - if self.basic and (self.username and self.password): - # Basic auth mode requested, so this method is a nop as long - # as credentials were passed in. - return - - if self.bearerToken: - # Bearer auth mode requested, so this method is a nop as long - # as authentication token was passed in. - return - # Only try to get a token and updated cookie if username & password are specified - try: - response = self.http.post( - self.authority + self._abspath("/services/auth/login"), - username=self.username, - password=self.password, - headers=self.additional_headers, - cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header - - body = response.body.read() - session = XML(body).findtext("./sessionKey") - self.token = f"Splunk {session}" - return self - except HTTPError as he: - if he.status == 401: - raise AuthenticationError("Login failed.", he) - else: - raise - - def logout(self): - """Forgets the current session token, and cookies.""" - self.token = _NoAuthenticationToken - self.http._cookies = {} - return self - - def _abspath(self, path_segment, - owner=None, app=None, sharing=None): - """Qualifies *path_segment* into an absolute path for a URL. - - If *path_segment* is already absolute, returns it unchanged. - If *path_segment* is relative, then qualifies it with either - the provided namespace arguments or the ``Context``'s default - namespace. Any forbidden characters in *path_segment* are URL - encoded. This function has no network activity. - - Named to be consistent with RFC2396_. - - .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt - - :param path_segment: A relative or absolute URL path segment. - :type path_segment: ``string`` - :param owner, app, sharing: Components of a namespace (defaults - to the ``Context``'s namespace if all - three are omitted) - :type owner, app, sharing: ``string`` - :return: A ``UrlEncoded`` (a subclass of ``str``). - :rtype: ``string`` - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(owner='boris', app='search', sharing='user') - c._abspath('/a/b/c') == '/a/b/c' - c._abspath('/a/b c/d') == '/a/b%20c/d' - c._abspath('apps/local/search') == \ - '/servicesNS/boris/search/apps/local/search' - c._abspath('apps/local/search', sharing='system') == \ - '/servicesNS/nobody/system/apps/local/search' - url = c.authority + c._abspath('apps/local/sharing') - """ - skip_encode = isinstance(path_segment, UrlEncoded) - # If path_segment is absolute, escape all forbidden characters - # in it and return it. - if path_segment.startswith('/'): - return UrlEncoded(path_segment, skip_encode=skip_encode) - - # path_segment is relative, so we need a namespace to build an - # absolute path. - if owner or app or sharing: - ns = namespace(owner=owner, app=app, sharing=sharing) - else: - ns = self.namespace - - # If no app or owner are specified, then use the /services - # endpoint. Otherwise, use /servicesNS with the specified - # namespace. If only one of app and owner is specified, use - # '-' for the other. - if ns.app is None and ns.owner is None: - return UrlEncoded(f"/services/{path_segment}", skip_encode=skip_encode) - - oname = "nobody" if ns.owner is None else ns.owner - aname = "system" if ns.app is None else ns.app - path = UrlEncoded(f"/servicesNS/{oname}/{aname}/{path_segment}", skip_encode=skip_encode) - return path - - -def connect(**kwargs): - """This function returns an authenticated :class:`Context` object. - - This function is a shorthand for calling :meth:`Context.login`. - - This function makes one round trip to the server. - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param owner: The owner context of the namespace (the default is "None"). - :type owner: ``string`` - :param app: The app context of the namespace (the default is "None"). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (the default is "user"). - :type sharing: "global", "system", "app", or "user" - :param token: The current session token (optional). Session tokens can be - shared across multiple service instances. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param username: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param password: The password for the Splunk account. - :type password: ``string`` - :param headers: List of extra HTTP headers to send (optional). - :type headers: ``list`` of 2-tuples. - :param autologin: When ``True``, automatically tries to log in again if the - session terminates. - :type autologin: ``Boolean`` - :return: An initialized :class:`Context` instance. - - **Example**:: - - import splunklib.binding as binding - c = binding.connect(...) - response = c.get("apps/local") - """ - c = Context(**kwargs) - c.login() - return c - - -# Note: the error response schema supports multiple messages but we only -# return the first, although we do return the body so that an exception -# handler that wants to read multiple messages can do so. -class HTTPError(Exception): - """This exception is raised for HTTP responses that return an error.""" - - def __init__(self, response, _message=None): - status = response.status - reason = response.reason - body = response.body.read() - try: - detail = XML(body).findtext("./messages/msg") - except ParseError: - detail = body - detail_formatted = "" if detail is None else f" -- {detail}" - message = f"HTTP {status} {reason}{detail_formatted}" - Exception.__init__(self, _message or message) - self.status = status - self.reason = reason - self.headers = response.headers - self.body = body - self._response = response - - -class AuthenticationError(HTTPError): - """Raised when a login request to Splunk fails. - - If your username was unknown or you provided an incorrect password - in a call to :meth:`Context.login` or :meth:`splunklib.client.Service.login`, - this exception is raised. - """ - - def __init__(self, message, cause): - # Put the body back in the response so that HTTPError's constructor can - # read it again. - cause._response.body = BytesIO(cause.body) - - HTTPError.__init__(self, cause._response, message) - - -# -# The HTTP interface used by the Splunk binding layer abstracts the underlying -# HTTP library using request & response 'messages' which are implemented as -# dictionaries with the following structure: -# -# # HTTP request message (only method required) -# request { -# method : str, -# headers? : [(str, str)*], -# body? : str, -# } -# -# # HTTP response message (all keys present) -# response { -# status : int, -# reason : str, -# headers : [(str, str)*], -# body : file, -# } -# - -# Encode the given kwargs as a query string. This wrapper will also _encode -# a list value as a sequence of assignments to the corresponding arg name, -# for example an argument such as 'foo=[1,2,3]' will be encoded as -# 'foo=1&foo=2&foo=3'. -def _encode(**kwargs): - items = [] - for key, value in kwargs.items(): - if isinstance(value, list): - items.extend([(key, item) for item in value]) - else: - items.append((key, value)) - return parse.urlencode(items) - - -# Crack the given url into (scheme, host, port, path) -def _spliturl(url): - parsed_url = parse.urlparse(url) - host = parsed_url.hostname - port = parsed_url.port - path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path - # Strip brackets if its an IPv6 address - if host.startswith('[') and host.endswith(']'): host = host[1:-1] - if port is None: port = DEFAULT_PORT - return parsed_url.scheme, host, port, path - - -# Given an HTTP request handler, this wrapper objects provides a related -# family of convenience methods built using that handler. -class HttpLib: - """A set of convenient methods for making HTTP calls. - - ``HttpLib`` provides a general :meth:`request` method, and :meth:`delete`, - :meth:`post`, and :meth:`get` methods for the three HTTP methods that Splunk - uses. - - By default, ``HttpLib`` uses Python's built-in ``httplib`` library, - but you can replace it by passing your own handling function to the - constructor for ``HttpLib``. - - The handling function should have the type: - - ``handler(`url`, `request_dict`) -> response_dict`` - - where `url` is the URL to make the request to (including any query and - fragment sections) as a dictionary with the following keys: - - - method: The method for the request, typically ``GET``, ``POST``, or ``DELETE``. - - - headers: A list of pairs specifying the HTTP headers (for example: ``[('key': value), ...]``). - - - body: A string containing the body to send with the request (this string - should default to ''). - - and ``response_dict`` is a dictionary with the following keys: - - - status: An integer containing the HTTP status code (such as 200 or 404). - - - reason: The reason phrase, if any, returned by the server. - - - headers: A list of pairs containing the response headers (for example, ``[('key': value), ...]``). - - - body: A stream-like object supporting ``read(size=None)`` and ``close()`` - methods to get the body of the response. - - The response dictionary is returned directly by ``HttpLib``'s methods with - no further processing. By default, ``HttpLib`` calls the :func:`handler` function - to get a handler function. - - If using the default handler, SSL verification can be disabled by passing verify=False. - """ - - def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, - retryDelay=10): - if custom_handler is None: - self.handler = handler(verify=verify, key_file=key_file, cert_file=cert_file, context=context) - else: - self.handler = custom_handler - self._cookies = {} - self.retries = retries - self.retryDelay = retryDelay - - def delete(self, url, headers=None, **kwargs): - """Sends a DELETE request to a URL. - - :param url: The URL. - :type url: ``string`` - :param headers: A list of pairs specifying the headers for the HTTP - response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). - :type headers: ``list`` - :param kwargs: Additional keyword arguments (optional). These arguments - are interpreted as the query part of the URL. The order of keyword - arguments is not preserved in the request, but the keywords and - their arguments will be URL encoded. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - if headers is None: headers = [] - if kwargs: - # url is already a UrlEncoded. We have to manually declare - # the query to be encoded or it will get automatically URL - # encoded by being appended to url. - url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - message = { - 'method': "DELETE", - 'headers': headers, - } - return self.request(url, message) - - def get(self, url, headers=None, **kwargs): - """Sends a GET request to a URL. - - :param url: The URL. - :type url: ``string`` - :param headers: A list of pairs specifying the headers for the HTTP - response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). - :type headers: ``list`` - :param kwargs: Additional keyword arguments (optional). These arguments - are interpreted as the query part of the URL. The order of keyword - arguments is not preserved in the request, but the keywords and - their arguments will be URL encoded. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - if headers is None: headers = [] - if kwargs: - # url is already a UrlEncoded. We have to manually declare - # the query to be encoded or it will get automatically URL - # encoded by being appended to url. - url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - return self.request(url, {'method': "GET", 'headers': headers}) - - def post(self, url, headers=None, **kwargs): - """Sends a POST request to a URL. - - :param url: The URL. - :type url: ``string`` - :param headers: A list of pairs specifying the headers for the HTTP - response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). - :type headers: ``list`` - :param kwargs: Additional keyword arguments (optional). If the argument - is ``body``, the value is used as the body for the request, and the - keywords and their arguments will be URL encoded. If there is no - ``body`` keyword argument, all the keyword arguments are encoded - into the body of the request in the format ``x-www-form-urlencoded``. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - if headers is None: headers = [] - - # We handle GET-style arguments and an unstructured body. This is here - # to support the receivers/stream endpoint. - if 'body' in kwargs: - # We only use application/x-www-form-urlencoded if there is no other - # Content-Type header present. This can happen in cases where we - # send requests as application/json, e.g. for KV Store. - if len([x for x in headers if x[0].lower() == "content-type"]) == 0: - headers.append(("Content-Type", "application/x-www-form-urlencoded")) - - body = kwargs.pop('body') - if isinstance(body, dict): - body = _encode(**body).encode('utf-8') - if len(kwargs) > 0: - url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) - else: - body = _encode(**kwargs).encode('utf-8') - message = { - 'method': "POST", - 'headers': headers, - 'body': body - } - return self.request(url, message) - - def request(self, url, message, **kwargs): - """Issues an HTTP request to a URL. - - :param url: The URL. - :type url: ``string`` - :param message: A dictionary with the format as described in - :class:`HttpLib`. - :type message: ``dict`` - :param kwargs: Additional keyword arguments (optional). These arguments - are passed unchanged to the handler. - :type kwargs: ``dict`` - :returns: A dictionary describing the response (see :class:`HttpLib` for - its structure). - :rtype: ``dict`` - """ - while True: - try: - response = self.handler(url, message, **kwargs) - break - except Exception: - if self.retries <= 0: - raise - else: - time.sleep(self.retryDelay) - self.retries -= 1 - response = record(response) - if 400 <= response.status: - raise HTTPError(response) - - # Update the cookie with any HTTP request - # Initially, assume list of 2-tuples - key_value_tuples = response.headers - # If response.headers is a dict, get the key-value pairs as 2-tuples - # this is the case when using urllib2 - if isinstance(response.headers, dict): - key_value_tuples = list(response.headers.items()) - for key, value in key_value_tuples: - if key.lower() == "set-cookie": - _parse_cookies(value, self._cookies) - - return response - - -# Converts an httplib response into a file-like object. -class ResponseReader(io.RawIOBase): - """This class provides a file-like interface for :class:`httplib` responses. - - The ``ResponseReader`` class is intended to be a layer to unify the different - types of HTTP libraries used with this SDK. This class also provides a - preview of the stream and a few useful predicates. - """ - - # For testing, you can use a StringIO as the argument to - # ``ResponseReader`` instead of an ``httplib.HTTPResponse``. It - # will work equally well. - def __init__(self, response, connection=None): - self._response = response - self._connection = connection - self._buffer = b'' - - def __str__(self): - return str(self.read(), 'UTF-8') - - @property - def empty(self): - """Indicates whether there is any more data in the response.""" - return self.peek(1) == b"" - - def peek(self, size): - """Nondestructively retrieves a given number of characters. - - The next :meth:`read` operation behaves as though this method was never - called. - - :param size: The number of characters to retrieve. - :type size: ``integer`` - """ - c = self.read(size) - self._buffer = self._buffer + c - return c - - def close(self): - """Closes this response.""" - if self._connection: - self._connection.close() - self._response.close() - - def read(self, size=None): - """Reads a given number of characters from the response. - - :param size: The number of characters to read, or "None" to read the - entire response. - :type size: ``integer`` or "None" - - """ - r = self._buffer - self._buffer = b'' - if size is not None: - size -= len(r) - r = r + self._response.read(size) - return r - - def readable(self): - """ Indicates that the response reader is readable.""" - return True - - def readinto(self, byte_array): - """ Read data into a byte array, upto the size of the byte array. - - :param byte_array: A byte array/memory view to pour bytes into. - :type byte_array: ``bytearray`` or ``memoryview`` - - """ - max_size = len(byte_array) - data = self.read(max_size) - bytes_read = len(data) - byte_array[:bytes_read] = data - return bytes_read - - -def handler(key_file=None, cert_file=None, timeout=None, verify=False, context=None): - """This class returns an instance of the default HTTP request handler using - the values you provide. - - :param `key_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing your private key (optional). - :type key_file: ``string`` - :param `cert_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing a certificate chain file (optional). - :type cert_file: ``string`` - :param `timeout`: The request time-out period, in seconds (optional). - :type timeout: ``integer`` or "None" - :param `verify`: Set to False to disable SSL verification on https connections. - :type verify: ``Boolean`` - :param `context`: The SSLContext that can is used with the HTTPSConnection when verify=True is enabled and context is specified - :type context: ``SSLContext` - """ - - def connect(scheme, host, port): - kwargs = {} - if timeout is not None: kwargs['timeout'] = timeout - if scheme == "http": - return client.HTTPConnection(host, port, **kwargs) - if scheme == "https": - if key_file is not None: kwargs['key_file'] = key_file - if cert_file is not None: kwargs['cert_file'] = cert_file - - if not verify: - kwargs['context'] = ssl._create_unverified_context() - elif context: - # verify is True in elif branch and context is not None - kwargs['context'] = context - - return client.HTTPSConnection(host, port, **kwargs) - raise ValueError(f"unsupported scheme: {scheme}") - - def request(url, message, **kwargs): - scheme, host, port, path = _spliturl(url) - body = message.get("body", "") - head = { - "Content-Length": str(len(body)), - "Host": host, - "User-Agent": "splunk-sdk-python/%s" % __version__, - "Accept": "*/*", - "Connection": "Close", - } # defaults - for key, value in message["headers"]: - head[key] = value - method = message.get("method", "GET") - - connection = connect(scheme, host, port) - is_keepalive = False - try: - connection.request(method, path, body, head) - if timeout is not None: - connection.sock.settimeout(timeout) - response = connection.getresponse() - is_keepalive = "keep-alive" in response.getheader("connection", default="close").lower() - finally: - if not is_keepalive: - connection.close() - - return { - "status": response.status, - "reason": response.reason, - "headers": response.getheaders(), - "body": ResponseReader(response, connection if is_keepalive else None), - } - - return request +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The **splunklib.binding** module provides a low-level binding interface to the +`Splunk REST API `_. + +This module handles the wire details of calling the REST API, such as +authentication tokens, prefix paths, URL encoding, and so on. Actual path +segments, ``GET`` and ``POST`` arguments, and the parsing of responses is left +to the user. + +If you want a friendlier interface to the Splunk REST API, use the +:mod:`splunklib.client` module. +""" + +import io +import json +import logging +import socket +import ssl +import time +from base64 import b64encode +from contextlib import contextmanager +from datetime import datetime +from functools import wraps +from io import BytesIO +from urllib import parse +from http import client +from http.cookies import SimpleCookie +from xml.etree.ElementTree import XML, ParseError +from splunklib.data import record +from splunklib import __version__ + + +logger = logging.getLogger(__name__) + +__all__ = [ + "AuthenticationError", + "connect", + "Context", + "handler", + "HTTPError", + "UrlEncoded", + "_encode", + "_make_cookie_header", + "_NoAuthenticationToken", + "namespace" +] + +SENSITIVE_KEYS = ['Authorization', 'Cookie', 'action.email.auth_password', 'auth', 'auth_password', 'clear_password', 'clientId', + 'crc-salt', 'encr_password', 'oldpassword', 'passAuth', 'password', 'session', 'suppressionKey', + 'token'] + +# If you change these, update the docstring +# on _authority as well. +DEFAULT_HOST = "localhost" +DEFAULT_PORT = "8089" +DEFAULT_SCHEME = "https" + + +def _log_duration(f): + @wraps(f) + def new_f(*args, **kwargs): + start_time = datetime.now() + val = f(*args, **kwargs) + end_time = datetime.now() + logger.debug("Operation took %s", end_time - start_time) + return val + + return new_f + + +def mask_sensitive_data(data): + ''' + Masked sensitive fields data for logging purpose + ''' + if not isinstance(data, dict): + try: + data = json.loads(data) + except Exception as ex: + return data + + # json.loads will return "123"(str) as 123(int), so return the data if it's not 'dict' type + if not isinstance(data, dict): + return data + mdata = {} + for k, v in data.items(): + if k in SENSITIVE_KEYS: + mdata[k] = "******" + else: + mdata[k] = mask_sensitive_data(v) + return mdata + + +def _parse_cookies(cookie_str, dictionary): + """Tries to parse any key-value pairs of cookies in a string, + then updates the the dictionary with any key-value pairs found. + + **Example**:: + + dictionary = {} + _parse_cookies('my=value', dictionary) + # Now the following is True + dictionary['my'] == 'value' + + :param cookie_str: A string containing "key=value" pairs from an HTTP "Set-Cookie" header. + :type cookie_str: ``str`` + :param dictionary: A dictionary to update with any found key-value pairs. + :type dictionary: ``dict`` + """ + parsed_cookie = SimpleCookie(cookie_str) + for cookie in parsed_cookie.values(): + dictionary[cookie.key] = cookie.coded_value + + +def _make_cookie_header(cookies): + """ + Takes a list of 2-tuples of key-value pairs of + cookies, and returns a valid HTTP ``Cookie`` + header. + + **Example**:: + + header = _make_cookie_header([("key", "value"), ("key_2", "value_2")]) + # Now the following is True + header == "key=value; key_2=value_2" + + :param cookies: A list of 2-tuples of cookie key-value pairs. + :type cookies: ``list`` of 2-tuples + :return: ``str` An HTTP header cookie string. + :rtype: ``str`` + """ + return "; ".join(f"{key}={value}" for key, value in cookies) + + +# Singleton values to eschew None +class _NoAuthenticationToken: + """The value stored in a :class:`Context` or :class:`splunklib.client.Service` + class that is not logged in. + + If a ``Context`` or ``Service`` object is created without an authentication + token, and there has not yet been a call to the ``login`` method, the token + field of the ``Context`` or ``Service`` object is set to + ``_NoAuthenticationToken``. + + Likewise, after a ``Context`` or ``Service`` object has been logged out, the + token is set to this value again. + """ + + +class UrlEncoded(str): + """This class marks URL-encoded strings. + It should be considered an SDK-private implementation detail. + + Manually tracking whether strings are URL encoded can be difficult. Avoid + calling ``urllib.quote`` to replace special characters with escapes. When + you receive a URL-encoded string, *do* use ``urllib.unquote`` to replace + escapes with single characters. Then, wrap any string you want to use as a + URL in ``UrlEncoded``. Note that because the ``UrlEncoded`` class is + idempotent, making multiple calls to it is OK. + + ``UrlEncoded`` objects are identical to ``str`` objects (including being + equal if their contents are equal) except when passed to ``UrlEncoded`` + again. + + ``UrlEncoded`` removes the ``str`` type support for interpolating values + with ``%`` (doing that raises a ``TypeError``). There is no reliable way to + encode values this way, so instead, interpolate into a string, quoting by + hand, and call ``UrlEncode`` with ``skip_encode=True``. + + **Example**:: + + import urllib + UrlEncoded(f'{scheme}://{urllib.quote(host)}', skip_encode=True) + + If you append ``str`` strings and ``UrlEncoded`` strings, the result is also + URL encoded. + + **Example**:: + + UrlEncoded('ab c') + 'de f' == UrlEncoded('ab cde f') + 'ab c' + UrlEncoded('de f') == UrlEncoded('ab cde f') + """ + + def __new__(self, val='', skip_encode=False, encode_slash=False): + if isinstance(val, UrlEncoded): + # Don't urllib.quote something already URL encoded. + return val + if skip_encode: + return str.__new__(self, val) + if encode_slash: + return str.__new__(self, parse.quote_plus(val)) + # When subclassing str, just call str.__new__ method + # with your class and the value you want to have in the + # new string. + return str.__new__(self, parse.quote(val)) + + def __add__(self, other): + """self + other + + If *other* is not a ``UrlEncoded``, URL encode it before + adding it. + """ + if isinstance(other, UrlEncoded): + return UrlEncoded(str.__add__(self, other), skip_encode=True) + + return UrlEncoded(str.__add__(self, parse.quote(other)), skip_encode=True) + + def __radd__(self, other): + """other + self + + If *other* is not a ``UrlEncoded``, URL _encode it before + adding it. + """ + if isinstance(other, UrlEncoded): + return UrlEncoded(str.__radd__(self, other), skip_encode=True) + + return UrlEncoded(str.__add__(parse.quote(other), self), skip_encode=True) + + def __mod__(self, fields): + """Interpolation into ``UrlEncoded``s is disabled. + + If you try to write ``UrlEncoded("%s") % "abc", will get a + ``TypeError``. + """ + raise TypeError("Cannot interpolate into a UrlEncoded object.") + + def __repr__(self): + return f"UrlEncoded({repr(parse.unquote(str(self)))})" + + +@contextmanager +def _handle_auth_error(msg): + """Handle re-raising HTTP authentication errors as something clearer. + + If an ``HTTPError`` is raised with status 401 (access denied) in + the body of this context manager, re-raise it as an + ``AuthenticationError`` instead, with *msg* as its message. + + This function adds no round trips to the server. + + :param msg: The message to be raised in ``AuthenticationError``. + :type msg: ``str`` + + **Example**:: + + with _handle_auth_error("Your login failed."): + ... # make an HTTP request + """ + try: + yield + except HTTPError as he: + if he.status == 401: + raise AuthenticationError(msg, he) + else: + raise + + +def _authentication(request_fun): + """Decorator to handle autologin and authentication errors. + + *request_fun* is a function taking no arguments that needs to + be run with this ``Context`` logged into Splunk. + + ``_authentication``'s behavior depends on whether the + ``autologin`` field of ``Context`` is set to ``True`` or + ``False``. If it's ``False``, then ``_authentication`` + aborts if the ``Context`` is not logged in, and raises an + ``AuthenticationError`` if an ``HTTPError`` of status 401 is + raised in *request_fun*. If it's ``True``, then + ``_authentication`` will try at all sensible places to + log in before issuing the request. + + If ``autologin`` is ``False``, ``_authentication`` makes + one roundtrip to the server if the ``Context`` is logged in, + or zero if it is not. If ``autologin`` is ``True``, it's less + deterministic, and may make at most three roundtrips (though + that would be a truly pathological case). + + :param request_fun: A function of no arguments encapsulating + the request to make to the server. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(..., autologin=True) + c.logout() + def f(): + c.get("/services") + return 42 + print(_authentication(f)) + """ + + @wraps(request_fun) + def wrapper(self, *args, **kwargs): + if self.token is _NoAuthenticationToken and not self.has_cookies(): + # Not yet logged in. + if self.autologin and self.username and self.password: + # This will throw an uncaught + # AuthenticationError if it fails. + self.login() + else: + # Try the request anyway without authentication. + # Most requests will fail. Some will succeed, such as + # 'GET server/info'. + with _handle_auth_error("Request aborted: not logged in."): + return request_fun(self, *args, **kwargs) + try: + # Issue the request + return request_fun(self, *args, **kwargs) + except HTTPError as he: + if he.status == 401 and self.autologin: + # Authentication failed. Try logging in, and then + # rerunning the request. If either step fails, throw + # an AuthenticationError and give up. + with _handle_auth_error("Autologin failed."): + self.login() + with _handle_auth_error("Authentication Failed! If session token is used, it seems to have been expired."): + return request_fun(self, *args, **kwargs) + elif he.status == 401 and not self.autologin: + raise AuthenticationError( + "Request failed: Session is not logged in.", he) + else: + raise + + return wrapper + + +def _authority(scheme=DEFAULT_SCHEME, host=DEFAULT_HOST, port=DEFAULT_PORT): + """Construct a URL authority from the given *scheme*, *host*, and *port*. + + Named in accordance with RFC2396_, which defines URLs as:: + + ://? + + .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt + + So ``https://localhost:8000/a/b/b?boris=hilda`` would be parsed as:: + + scheme := https + authority := localhost:8000 + path := /a/b/c + query := boris=hilda + + :param scheme: URL scheme (the default is "https") + :type scheme: "http" or "https" + :param host: The host name (the default is "localhost") + :type host: string + :param port: The port number (the default is 8089) + :type port: integer + :return: The URL authority. + :rtype: UrlEncoded (subclass of ``str``) + + **Example**:: + + _authority() == "https://localhost:8089" + + _authority(host="splunk.utopia.net") == "https://splunk.utopia.net:8089" + + _authority(host="2001:0db8:85a3:0000:0000:8a2e:0370:7334") == \ + "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089" + + _authority(scheme="http", host="splunk.utopia.net", port="471") == \ + "http://splunk.utopia.net:471" + + """ + # check if host is an IPv6 address and not enclosed in [ ] + if ':' in host and not (host.startswith('[') and host.endswith(']')): + # IPv6 addresses must be enclosed in [ ] in order to be well + # formed. + host = '[' + host + ']' + return UrlEncoded(f"{scheme}://{host}:{port}", skip_encode=True) + + +# kwargs: sharing, owner, app +def namespace(sharing=None, owner=None, app=None, **kwargs): + """This function constructs a Splunk namespace. + + Every Splunk resource belongs to a namespace. The namespace is specified by + the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. + The possible values for ``sharing`` are: "user", "app", "global" and "system", + which map to the following combinations of ``owner`` and ``app`` values: + + "user" => {owner}, {app} + + "app" => nobody, {app} + + "global" => nobody, {app} + + "system" => nobody, system + + "nobody" is a special user name that basically means no user, and "system" + is the name reserved for system resources. + + "-" is a wildcard that can be used for both ``owner`` and ``app`` values and + refers to all users and all apps, respectively. + + In general, when you specify a namespace you can specify any combination of + these three values and the library will reconcile the triple, overriding the + provided values as appropriate. + + Finally, if no namespacing is specified the library will make use of the + ``/services`` branch of the REST API, which provides a namespaced view of + Splunk resources equivelent to using ``owner={currentUser}`` and + ``app={defaultApp}``. + + The ``namespace`` function returns a representation of the namespace from + reconciling the values you provide. It ignores any keyword arguments other + than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of + configuration information without first having to extract individual keys. + + :param sharing: The sharing mode (the default is "user"). + :type sharing: "system", "global", "app", or "user" + :param owner: The owner context (the default is "None"). + :type owner: ``string`` + :param app: The app context (the default is "None"). + :type app: ``string`` + :returns: A :class:`splunklib.data.Record` containing the reconciled + namespace. + + **Example**:: + + import splunklib.binding as binding + n = binding.namespace(sharing="user", owner="boris", app="search") + n = binding.namespace(sharing="global", app="search") + """ + if sharing in ["system"]: + return record({'sharing': sharing, 'owner': "nobody", 'app': "system"}) + if sharing in ["global", "app"]: + return record({'sharing': sharing, 'owner': "nobody", 'app': app}) + if sharing in ["user", None]: + return record({'sharing': sharing, 'owner': owner, 'app': app}) + raise ValueError("Invalid value for argument: 'sharing'") + + +class Context: + """This class represents a context that encapsulates a splunkd connection. + + The ``Context`` class encapsulates the details of HTTP requests, + authentication, a default namespace, and URL prefixes to simplify access to + the REST API. + + After creating a ``Context`` object, you must call its :meth:`login` + method before you can issue requests to splunkd. Or, use the :func:`connect` + function to create an already-authenticated ``Context`` object. You can + provide a session token explicitly (the same token can be shared by multiple + ``Context`` objects) to provide authentication. + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verification for https connections. + :type verify: ``Boolean`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param owner: The owner context of the namespace (optional, the default is "None"). + :type owner: ``string`` + :param app: The app context of the namespace (optional, the default is "None"). + :type app: ``string`` + :param token: A session token. When provided, you don't need to call :meth:`login`. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param username: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param password: The password for the Splunk account. + :type password: ``string`` + :param splunkToken: Splunk authentication token + :type splunkToken: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param retires: Number of retries for each HTTP connection (optional, the default is 0). + NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER AND BLOCK THE + CURRENT THREAD WHILE RETRYING. + :type retries: ``int`` + :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). + :type retryDelay: ``int`` (in seconds) + :param handler: The HTTP request handler (optional). + :returns: A ``Context`` instance. + + **Example**:: + + import splunklib.binding as binding + c = binding.Context(username="boris", password="natasha", ...) + c.login() + # Or equivalently + c = binding.connect(username="boris", password="natasha") + # Or if you already have a session token + c = binding.Context(token="atg232342aa34324a") + # Or if you already have a valid cookie + c = binding.Context(cookie="splunkd_8089=...") + """ + + def __init__(self, handler=None, **kwargs): + self.http = HttpLib(handler, kwargs.get("verify", False), key_file=kwargs.get("key_file"), + cert_file=kwargs.get("cert_file"), context=kwargs.get("context"), + # Default to False for backward compat + retries=kwargs.get("retries", 0), retryDelay=kwargs.get("retryDelay", 10)) + self.token = kwargs.get("token", _NoAuthenticationToken) + if self.token is None: # In case someone explicitly passes token=None + self.token = _NoAuthenticationToken + self.scheme = kwargs.get("scheme", DEFAULT_SCHEME) + self.host = kwargs.get("host", DEFAULT_HOST) + self.port = int(kwargs.get("port", DEFAULT_PORT)) + self.authority = _authority(self.scheme, self.host, self.port) + self.namespace = namespace(**kwargs) + self.username = kwargs.get("username", "") + self.password = kwargs.get("password", "") + self.basic = kwargs.get("basic", False) + self.bearerToken = kwargs.get("splunkToken", "") + self.autologin = kwargs.get("autologin", False) + self.additional_headers = kwargs.get("headers", []) + + # Store any cookies in the self.http._cookies dict + if "cookie" in kwargs and kwargs['cookie'] not in [None, _NoAuthenticationToken]: + _parse_cookies(kwargs["cookie"], self.http._cookies) + + def get_cookies(self): + """Gets the dictionary of cookies from the ``HttpLib`` member of this instance. + + :return: Dictionary of cookies stored on the ``self.http``. + :rtype: ``dict`` + """ + return self.http._cookies + + def has_cookies(self): + """Returns true if the ``HttpLib`` member of this instance has auth token stored. + + :return: ``True`` if there is auth token present, else ``False`` + :rtype: ``bool`` + """ + auth_token_key = "splunkd_" + return any(auth_token_key in key for key in self.get_cookies().keys()) + + # Shared per-context request headers + @property + def _auth_headers(self): + """Headers required to authenticate a request. + + Assumes your ``Context`` already has a authentication token or + cookie, either provided explicitly or obtained by logging + into the Splunk instance. + + :returns: A list of 2-tuples containing key and value + """ + header = [] + if self.has_cookies(): + return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] + elif self.basic and (self.username and self.password): + token = f'Basic {b64encode(("%s:%s" % (self.username, self.password)).encode("utf-8")).decode("ascii")}' + elif self.bearerToken: + token = f'Bearer {self.bearerToken}' + elif self.token is _NoAuthenticationToken: + token = [] + else: + # Ensure the token is properly formatted + if self.token.startswith('Splunk '): + token = self.token + else: + token = f'Splunk {self.token}' + if token: + header.append(("Authorization", token)) + if self.get_cookies(): + header.append(("Cookie", _make_cookie_header(list(self.get_cookies().items())))) + + return header + + def connect(self): + """Returns an open connection (socket) to the Splunk instance. + + This method is used for writing bulk events to an index or similar tasks + where the overhead of opening a connection multiple times would be + prohibitive. + + :returns: A socket. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(...) + socket = c.connect() + socket.write("POST %s HTTP/1.1\\r\\n" % "some/path/to/post/to") + socket.write("Host: %s:%s\\r\\n" % (c.host, c.port)) + socket.write("Accept-Encoding: identity\\r\\n") + socket.write("Authorization: %s\\r\\n" % c.token) + socket.write("X-Splunk-Input-Mode: Streaming\\r\\n") + socket.write("\\r\\n") + """ + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + if self.scheme == "https": + sock = ssl.wrap_socket(sock) + sock.connect((socket.gethostbyname(self.host), self.port)) + return sock + + @_authentication + @_log_duration + def delete(self, path_segment, owner=None, app=None, sharing=None, **query): + """Performs a DELETE operation at the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``delete`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.delete('saved/searches/boris') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '1786'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:53:06 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.delete('nonexistant/path') # raises HTTPError + c.logout() + c.delete('apps/local') # raises AuthenticationError + """ + path = self.authority + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + logger.debug("DELETE request to %s (body: %s)", path, mask_sensitive_data(query)) + response = self.http.delete(path, self._auth_headers, **query) + return response + + @_authentication + @_log_duration + def get(self, path_segment, owner=None, app=None, headers=None, sharing=None, **query): + """Performs a GET operation from the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``get`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.get('apps/local') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.get('nonexistant/path') # raises HTTPError + c.logout() + c.get('apps/local') # raises AuthenticationError + """ + if headers is None: + headers = [] + + path = self.authority + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + logger.debug("GET request to %s (body: %s)", path, mask_sensitive_data(query)) + all_headers = headers + self.additional_headers + self._auth_headers + response = self.http.get(path, all_headers, **query) + return response + + @_authentication + @_log_duration + def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, **query): + """Performs a POST operation from the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``post`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + Some of Splunk's endpoints, such as ``receivers/simple`` and + ``receivers/stream``, require unstructured data in the POST body + and all metadata passed as GET-style arguments. If you provide + a ``body`` argument to ``post``, it will be used as the POST + body, and all other keyword arguments will be passed as + GET-style arguments in the URL. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param query: All other keyword arguments, which are used as query + parameters. + :param body: Parameters to be used in the post body. If specified, + any parameters in the query will be applied to the URL instead of + the body. If a dict is supplied, the key-value pairs will be form + encoded. If a string is supplied, the body will be passed through + in the request unchanged. + :type body: ``dict`` or ``str`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.post('saved/searches', name='boris', + search='search * earliest=-1m | head 1') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '10455'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:46:06 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'Created', + 'status': 201} + c.post('nonexistant/path') # raises HTTPError + c.logout() + # raises AuthenticationError: + c.post('saved/searches', name='boris', + search='search * earliest=-1m | head 1') + """ + if headers is None: + headers = [] + + path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) + + logger.debug("POST request to %s (body: %s)", path, mask_sensitive_data(query)) + all_headers = headers + self.additional_headers + self._auth_headers + response = self.http.post(path, all_headers, **query) + return response + + @_authentication + @_log_duration + def request(self, path_segment, method="GET", headers=None, body={}, + owner=None, app=None, sharing=None): + """Issues an arbitrary HTTP request to the REST path segment. + + This method is named to match ``httplib.request``. This function + makes a single round trip to the server. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param method: The HTTP method to use (optional). + :type method: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param body: Content of the HTTP request (optional). + :type body: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.request('saved/searches', method='GET') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '46722'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 17:24:19 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.request('nonexistant/path', method='GET') # raises HTTPError + c.logout() + c.get('apps/local') # raises AuthenticationError + """ + if headers is None: + headers = [] + + path = self.authority \ + + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + + all_headers = headers + self.additional_headers + self._auth_headers + logger.debug("%s request to %s (headers: %s, body: %s)", + method, path, str(mask_sensitive_data(dict(all_headers))), mask_sensitive_data(body)) + if body: + body = _encode(**body) + + if method == "GET": + path = path + UrlEncoded('?' + body, skip_encode=True) + message = {'method': method, + 'headers': all_headers} + else: + message = {'method': method, + 'headers': all_headers, + 'body': body} + else: + message = {'method': method, + 'headers': all_headers} + + response = self.http.request(path, message) + + return response + + def login(self): + """Logs into the Splunk instance referred to by the :class:`Context` + object. + + Unless a ``Context`` is created with an explicit authentication token + (probably obtained by logging in from a different ``Context`` object) + you must call :meth:`login` before you can issue requests. + The authentication token obtained from the server is stored in the + ``token`` field of the ``Context`` object. + + :raises AuthenticationError: Raised when login fails. + :returns: The ``Context`` object, so you can chain calls. + + **Example**:: + + import splunklib.binding as binding + c = binding.Context(...).login() + # Then issue requests... + """ + + if self.has_cookies() and \ + (not self.username and not self.password): + # If we were passed session cookie(s), but no username or + # password, then login is a nop, since we're automatically + # logged in. + return + + if self.token is not _NoAuthenticationToken and \ + (not self.username and not self.password): + # If we were passed a session token, but no username or + # password, then login is a nop, since we're automatically + # logged in. + return + + if self.basic and (self.username and self.password): + # Basic auth mode requested, so this method is a nop as long + # as credentials were passed in. + return + + if self.bearerToken: + # Bearer auth mode requested, so this method is a nop as long + # as authentication token was passed in. + return + # Only try to get a token and updated cookie if username & password are specified + try: + response = self.http.post( + self.authority + self._abspath("/services/auth/login"), + username=self.username, + password=self.password, + headers=self.additional_headers, + cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header + + body = response.body.read() + session = XML(body).findtext("./sessionKey") + self.token = f"Splunk {session}" + return self + except HTTPError as he: + if he.status == 401: + raise AuthenticationError("Login failed.", he) + else: + raise + + def logout(self): + """Forgets the current session token, and cookies.""" + self.token = _NoAuthenticationToken + self.http._cookies = {} + return self + + def _abspath(self, path_segment, + owner=None, app=None, sharing=None): + """Qualifies *path_segment* into an absolute path for a URL. + + If *path_segment* is already absolute, returns it unchanged. + If *path_segment* is relative, then qualifies it with either + the provided namespace arguments or the ``Context``'s default + namespace. Any forbidden characters in *path_segment* are URL + encoded. This function has no network activity. + + Named to be consistent with RFC2396_. + + .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt + + :param path_segment: A relative or absolute URL path segment. + :type path_segment: ``string`` + :param owner, app, sharing: Components of a namespace (defaults + to the ``Context``'s namespace if all + three are omitted) + :type owner, app, sharing: ``string`` + :return: A ``UrlEncoded`` (a subclass of ``str``). + :rtype: ``string`` + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(owner='boris', app='search', sharing='user') + c._abspath('/a/b/c') == '/a/b/c' + c._abspath('/a/b c/d') == '/a/b%20c/d' + c._abspath('apps/local/search') == \ + '/servicesNS/boris/search/apps/local/search' + c._abspath('apps/local/search', sharing='system') == \ + '/servicesNS/nobody/system/apps/local/search' + url = c.authority + c._abspath('apps/local/sharing') + """ + skip_encode = isinstance(path_segment, UrlEncoded) + # If path_segment is absolute, escape all forbidden characters + # in it and return it. + if path_segment.startswith('/'): + return UrlEncoded(path_segment, skip_encode=skip_encode) + + # path_segment is relative, so we need a namespace to build an + # absolute path. + if owner or app or sharing: + ns = namespace(owner=owner, app=app, sharing=sharing) + else: + ns = self.namespace + + # If no app or owner are specified, then use the /services + # endpoint. Otherwise, use /servicesNS with the specified + # namespace. If only one of app and owner is specified, use + # '-' for the other. + if ns.app is None and ns.owner is None: + return UrlEncoded(f"/services/{path_segment}", skip_encode=skip_encode) + + oname = "nobody" if ns.owner is None else ns.owner + aname = "system" if ns.app is None else ns.app + path = UrlEncoded(f"/servicesNS/{oname}/{aname}/{path_segment}", skip_encode=skip_encode) + return path + + +def connect(**kwargs): + """This function returns an authenticated :class:`Context` object. + + This function is a shorthand for calling :meth:`Context.login`. + + This function makes one round trip to the server. + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param owner: The owner context of the namespace (the default is "None"). + :type owner: ``string`` + :param app: The app context of the namespace (the default is "None"). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param token: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param username: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param password: The password for the Splunk account. + :type password: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param autologin: When ``True``, automatically tries to log in again if the + session terminates. + :type autologin: ``Boolean`` + :return: An initialized :class:`Context` instance. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(...) + response = c.get("apps/local") + """ + c = Context(**kwargs) + c.login() + return c + + +# Note: the error response schema supports multiple messages but we only +# return the first, although we do return the body so that an exception +# handler that wants to read multiple messages can do so. +class HTTPError(Exception): + """This exception is raised for HTTP responses that return an error.""" + + def __init__(self, response, _message=None): + status = response.status + reason = response.reason + body = response.body.read() + try: + detail = XML(body).findtext("./messages/msg") + except ParseError: + detail = body + detail_formatted = "" if detail is None else f" -- {detail}" + message = f"HTTP {status} {reason}{detail_formatted}" + Exception.__init__(self, _message or message) + self.status = status + self.reason = reason + self.headers = response.headers + self.body = body + self._response = response + + +class AuthenticationError(HTTPError): + """Raised when a login request to Splunk fails. + + If your username was unknown or you provided an incorrect password + in a call to :meth:`Context.login` or :meth:`splunklib.client.Service.login`, + this exception is raised. + """ + + def __init__(self, message, cause): + # Put the body back in the response so that HTTPError's constructor can + # read it again. + cause._response.body = BytesIO(cause.body) + + HTTPError.__init__(self, cause._response, message) + + +# +# The HTTP interface used by the Splunk binding layer abstracts the underlying +# HTTP library using request & response 'messages' which are implemented as +# dictionaries with the following structure: +# +# # HTTP request message (only method required) +# request { +# method : str, +# headers? : [(str, str)*], +# body? : str, +# } +# +# # HTTP response message (all keys present) +# response { +# status : int, +# reason : str, +# headers : [(str, str)*], +# body : file, +# } +# + +# Encode the given kwargs as a query string. This wrapper will also _encode +# a list value as a sequence of assignments to the corresponding arg name, +# for example an argument such as 'foo=[1,2,3]' will be encoded as +# 'foo=1&foo=2&foo=3'. +def _encode(**kwargs): + items = [] + for key, value in kwargs.items(): + if isinstance(value, list): + items.extend([(key, item) for item in value]) + else: + items.append((key, value)) + return parse.urlencode(items) + + +# Crack the given url into (scheme, host, port, path) +def _spliturl(url): + parsed_url = parse.urlparse(url) + host = parsed_url.hostname + port = parsed_url.port + path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path + # Strip brackets if its an IPv6 address + if host.startswith('[') and host.endswith(']'): host = host[1:-1] + if port is None: port = DEFAULT_PORT + return parsed_url.scheme, host, port, path + + +# Given an HTTP request handler, this wrapper objects provides a related +# family of convenience methods built using that handler. +class HttpLib: + """A set of convenient methods for making HTTP calls. + + ``HttpLib`` provides a general :meth:`request` method, and :meth:`delete`, + :meth:`post`, and :meth:`get` methods for the three HTTP methods that Splunk + uses. + + By default, ``HttpLib`` uses Python's built-in ``httplib`` library, + but you can replace it by passing your own handling function to the + constructor for ``HttpLib``. + + The handling function should have the type: + + ``handler(`url`, `request_dict`) -> response_dict`` + + where `url` is the URL to make the request to (including any query and + fragment sections) as a dictionary with the following keys: + + - method: The method for the request, typically ``GET``, ``POST``, or ``DELETE``. + + - headers: A list of pairs specifying the HTTP headers (for example: ``[('key': value), ...]``). + + - body: A string containing the body to send with the request (this string + should default to ''). + + and ``response_dict`` is a dictionary with the following keys: + + - status: An integer containing the HTTP status code (such as 200 or 404). + + - reason: The reason phrase, if any, returned by the server. + + - headers: A list of pairs containing the response headers (for example, ``[('key': value), ...]``). + + - body: A stream-like object supporting ``read(size=None)`` and ``close()`` + methods to get the body of the response. + + The response dictionary is returned directly by ``HttpLib``'s methods with + no further processing. By default, ``HttpLib`` calls the :func:`handler` function + to get a handler function. + + If using the default handler, SSL verification can be disabled by passing verify=False. + """ + + def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None, context=None, retries=0, + retryDelay=10): + if custom_handler is None: + self.handler = handler(verify=verify, key_file=key_file, cert_file=cert_file, context=context) + else: + self.handler = custom_handler + self._cookies = {} + self.retries = retries + self.retryDelay = retryDelay + + def delete(self, url, headers=None, **kwargs): + """Sends a DELETE request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). These arguments + are interpreted as the query part of the URL. The order of keyword + arguments is not preserved in the request, but the keywords and + their arguments will be URL encoded. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + if kwargs: + # url is already a UrlEncoded. We have to manually declare + # the query to be encoded or it will get automatically URL + # encoded by being appended to url. + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + message = { + 'method': "DELETE", + 'headers': headers, + } + return self.request(url, message) + + def get(self, url, headers=None, **kwargs): + """Sends a GET request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). These arguments + are interpreted as the query part of the URL. The order of keyword + arguments is not preserved in the request, but the keywords and + their arguments will be URL encoded. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + if kwargs: + # url is already a UrlEncoded. We have to manually declare + # the query to be encoded or it will get automatically URL + # encoded by being appended to url. + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + return self.request(url, {'method': "GET", 'headers': headers}) + + def post(self, url, headers=None, **kwargs): + """Sends a POST request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). If the argument + is ``body``, the value is used as the body for the request, and the + keywords and their arguments will be URL encoded. If there is no + ``body`` keyword argument, all the keyword arguments are encoded + into the body of the request in the format ``x-www-form-urlencoded``. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + + # We handle GET-style arguments and an unstructured body. This is here + # to support the receivers/stream endpoint. + if 'body' in kwargs: + # We only use application/x-www-form-urlencoded if there is no other + # Content-Type header present. This can happen in cases where we + # send requests as application/json, e.g. for KV Store. + if len([x for x in headers if x[0].lower() == "content-type"]) == 0: + headers.append(("Content-Type", "application/x-www-form-urlencoded")) + + body = kwargs.pop('body') + if isinstance(body, dict): + body = _encode(**body).encode('utf-8') + if len(kwargs) > 0: + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + else: + body = _encode(**kwargs).encode('utf-8') + message = { + 'method': "POST", + 'headers': headers, + 'body': body + } + return self.request(url, message) + + def request(self, url, message, **kwargs): + """Issues an HTTP request to a URL. + + :param url: The URL. + :type url: ``string`` + :param message: A dictionary with the format as described in + :class:`HttpLib`. + :type message: ``dict`` + :param kwargs: Additional keyword arguments (optional). These arguments + are passed unchanged to the handler. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + while True: + try: + response = self.handler(url, message, **kwargs) + break + except Exception: + if self.retries <= 0: + raise + else: + time.sleep(self.retryDelay) + self.retries -= 1 + response = record(response) + if 400 <= response.status: + raise HTTPError(response) + + # Update the cookie with any HTTP request + # Initially, assume list of 2-tuples + key_value_tuples = response.headers + # If response.headers is a dict, get the key-value pairs as 2-tuples + # this is the case when using urllib2 + if isinstance(response.headers, dict): + key_value_tuples = list(response.headers.items()) + for key, value in key_value_tuples: + if key.lower() == "set-cookie": + _parse_cookies(value, self._cookies) + + return response + + +# Converts an httplib response into a file-like object. +class ResponseReader(io.RawIOBase): + """This class provides a file-like interface for :class:`httplib` responses. + + The ``ResponseReader`` class is intended to be a layer to unify the different + types of HTTP libraries used with this SDK. This class also provides a + preview of the stream and a few useful predicates. + """ + + # For testing, you can use a StringIO as the argument to + # ``ResponseReader`` instead of an ``httplib.HTTPResponse``. It + # will work equally well. + def __init__(self, response, connection=None): + self._response = response + self._connection = connection + self._buffer = b'' + + def __str__(self): + return str(self.read(), 'UTF-8') + + @property + def empty(self): + """Indicates whether there is any more data in the response.""" + return self.peek(1) == b"" + + def peek(self, size): + """Nondestructively retrieves a given number of characters. + + The next :meth:`read` operation behaves as though this method was never + called. + + :param size: The number of characters to retrieve. + :type size: ``integer`` + """ + c = self.read(size) + self._buffer = self._buffer + c + return c + + def close(self): + """Closes this response.""" + if self._connection: + self._connection.close() + self._response.close() + + def read(self, size=None): + """Reads a given number of characters from the response. + + :param size: The number of characters to read, or "None" to read the + entire response. + :type size: ``integer`` or "None" + + """ + r = self._buffer + self._buffer = b'' + if size is not None: + size -= len(r) + r = r + self._response.read(size) + return r + + def readable(self): + """ Indicates that the response reader is readable.""" + return True + + def readinto(self, byte_array): + """ Read data into a byte array, upto the size of the byte array. + + :param byte_array: A byte array/memory view to pour bytes into. + :type byte_array: ``bytearray`` or ``memoryview`` + + """ + max_size = len(byte_array) + data = self.read(max_size) + bytes_read = len(data) + byte_array[:bytes_read] = data + return bytes_read + + +def handler(key_file=None, cert_file=None, timeout=None, verify=False, context=None): + """This class returns an instance of the default HTTP request handler using + the values you provide. + + :param `key_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing your private key (optional). + :type key_file: ``string`` + :param `cert_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing a certificate chain file (optional). + :type cert_file: ``string`` + :param `timeout`: The request time-out period, in seconds (optional). + :type timeout: ``integer`` or "None" + :param `verify`: Set to False to disable SSL verification on https connections. + :type verify: ``Boolean`` + :param `context`: The SSLContext that can is used with the HTTPSConnection when verify=True is enabled and context is specified + :type context: ``SSLContext` + """ + + def connect(scheme, host, port): + kwargs = {} + if timeout is not None: kwargs['timeout'] = timeout + if scheme == "http": + return client.HTTPConnection(host, port, **kwargs) + if scheme == "https": + if key_file is not None: kwargs['key_file'] = key_file + if cert_file is not None: kwargs['cert_file'] = cert_file + + if not verify: + kwargs['context'] = ssl._create_unverified_context() + elif context: + # verify is True in elif branch and context is not None + kwargs['context'] = context + + return client.HTTPSConnection(host, port, **kwargs) + raise ValueError(f"unsupported scheme: {scheme}") + + def request(url, message, **kwargs): + scheme, host, port, path = _spliturl(url) + body = message.get("body", "") + head = { + "Content-Length": str(len(body)), + "Host": host, + "User-Agent": "splunk-sdk-python/%s" % __version__, + "Accept": "*/*", + "Connection": "Close", + } # defaults + for key, value in message["headers"]: + head[key] = value + method = message.get("method", "GET") + + connection = connect(scheme, host, port) + is_keepalive = False + try: + connection.request(method, path, body, head) + if timeout is not None: + connection.sock.settimeout(timeout) + response = connection.getresponse() + is_keepalive = "keep-alive" in response.getheader("connection", default="close").lower() + finally: + if not is_keepalive: + connection.close() + + return { + "status": response.status, + "reason": response.reason, + "headers": response.getheaders(), + "body": ResponseReader(response, connection if is_keepalive else None), + } + + return request diff --git a/splunklib/client.py b/splunklib/client.py index 090f9192..48861880 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -1,3908 +1,3908 @@ -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# The purpose of this module is to provide a friendlier domain interface to -# various Splunk endpoints. The approach here is to leverage the binding -# layer to capture endpoint context and provide objects and methods that -# offer simplified access their corresponding endpoints. The design avoids -# caching resource state. From the perspective of this module, the 'policy' -# for caching resource state belongs in the application or a higher level -# framework, and its the purpose of this module to provide simplified -# access to that resource state. -# -# A side note, the objects below that provide helper methods for updating eg: -# Entity state, are written so that they may be used in a fluent style. -# - -"""The **splunklib.client** module provides a Pythonic interface to the -`Splunk REST API `_, -allowing you programmatically access Splunk's resources. - -**splunklib.client** wraps a Pythonic layer around the wire-level -binding of the **splunklib.binding** module. The core of the library is the -:class:`Service` class, which encapsulates a connection to the server, and -provides access to the various aspects of Splunk's functionality, which are -exposed via the REST API. Typically you connect to a running Splunk instance -with the :func:`connect` function:: - - import splunklib.client as client - service = client.connect(host='localhost', port=8089, - username='admin', password='...') - assert isinstance(service, client.Service) - -:class:`Service` objects have fields for the various Splunk resources (such as apps, -jobs, saved searches, inputs, and indexes). All of these fields are -:class:`Collection` objects:: - - appcollection = service.apps - my_app = appcollection.create('my_app') - my_app = appcollection['my_app'] - appcollection.delete('my_app') - -The individual elements of the collection, in this case *applications*, -are subclasses of :class:`Entity`. An ``Entity`` object has fields for its -attributes, and methods that are specific to each kind of entity. For example:: - - print(my_app['author']) # Or: print(my_app.author) - my_app.package() # Creates a compressed package of this application -""" - -import contextlib -import datetime -import json -import logging -import re -import socket -from datetime import datetime, timedelta -from time import sleep -from urllib import parse - -from splunklib import data -from splunklib.data import record -from splunklib.binding import (AuthenticationError, Context, HTTPError, UrlEncoded, - _encode, _make_cookie_header, _NoAuthenticationToken, - namespace) - -logger = logging.getLogger(__name__) - -__all__ = [ - "connect", - "NotSupportedError", - "OperationError", - "IncomparableException", - "Service", - "namespace", - "AuthenticationError" -] - -PATH_APPS = "apps/local/" -PATH_CAPABILITIES = "authorization/capabilities/" -PATH_CONF = "configs/conf-%s/" -PATH_PROPERTIES = "properties/" -PATH_DEPLOYMENT_CLIENTS = "deployment/client/" -PATH_DEPLOYMENT_TENANTS = "deployment/tenants/" -PATH_DEPLOYMENT_SERVERS = "deployment/server/" -PATH_DEPLOYMENT_SERVERCLASSES = "deployment/serverclass/" -PATH_EVENT_TYPES = "saved/eventtypes/" -PATH_FIRED_ALERTS = "alerts/fired_alerts/" -PATH_INDEXES = "data/indexes/" -PATH_INPUTS = "data/inputs/" -PATH_JOBS = "search/jobs/" -PATH_JOBS_V2 = "search/v2/jobs/" -PATH_LOGGER = "/services/server/logger/" -PATH_MESSAGES = "messages/" -PATH_MODULAR_INPUTS = "data/modular-inputs" -PATH_ROLES = "authorization/roles/" -PATH_SAVED_SEARCHES = "saved/searches/" -PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) -PATH_USERS = "authentication/users/" -PATH_RECEIVERS_STREAM = "/services/receivers/stream" -PATH_RECEIVERS_SIMPLE = "/services/receivers/simple" -PATH_STORAGE_PASSWORDS = "storage/passwords" - -XNAMEF_ATOM = "{http://www.w3.org/2005/Atom}%s" -XNAME_ENTRY = XNAMEF_ATOM % "entry" -XNAME_CONTENT = XNAMEF_ATOM % "content" - -MATCH_ENTRY_CONTENT = f"{XNAME_ENTRY}/{XNAME_CONTENT}/*" - - -class IllegalOperationException(Exception): - """Thrown when an operation is not possible on the Splunk instance that a - :class:`Service` object is connected to.""" - - -class IncomparableException(Exception): - """Thrown when trying to compare objects (using ``==``, ``<``, ``>``, and - so on) of a type that doesn't support it.""" - - -class AmbiguousReferenceException(ValueError): - """Thrown when the name used to fetch an entity matches more than one entity.""" - - -class InvalidNameException(Exception): - """Thrown when the specified name contains characters that are not allowed - in Splunk entity names.""" - - -class NoSuchCapability(Exception): - """Thrown when the capability that has been referred to doesn't exist.""" - - -class OperationError(Exception): - """Raised for a failed operation, such as a timeout.""" - - -class NotSupportedError(Exception): - """Raised for operations that are not supported on a given object.""" - - -def _trailing(template, *targets): - """Substring of *template* following all *targets*. - - **Example**:: - - template = "this is a test of the bunnies." - _trailing(template, "is", "est", "the") == " bunnies" - - Each target is matched successively in the string, and the string - remaining after the last target is returned. If one of the targets - fails to match, a ValueError is raised. - - :param template: Template to extract a trailing string from. - :type template: ``string`` - :param targets: Strings to successively match in *template*. - :type targets: list of ``string``s - :return: Trailing string after all targets are matched. - :rtype: ``string`` - :raises ValueError: Raised when one of the targets does not match. - """ - s = template - for t in targets: - n = s.find(t) - if n == -1: - raise ValueError("Target " + t + " not found in template.") - s = s[n + len(t):] - return s - - -# Filter the given state content record according to the given arg list. -def _filter_content(content, *args): - if len(args) > 0: - return record((k, content[k]) for k in args) - return record((k, v) for k, v in content.items() - if k not in ['eai:acl', 'eai:attributes', 'type']) - - -# Construct a resource path from the given base path + resource name -def _path(base, name): - if not base.endswith('/'): base = base + '/' - return base + name - - -# Load an atom record from the body of the given response -# this will ultimately be sent to an xml ElementTree so we -# should use the xmlcharrefreplace option -def _load_atom(response, match=None): - return data.load(response.body.read() - .decode('utf-8', 'xmlcharrefreplace'), match) - - -# Load an array of atom entries from the body of the given response -def _load_atom_entries(response): - r = _load_atom(response) - if 'feed' in r: - # Need this to handle a random case in the REST API - if r.feed.get('totalResults') in [0, '0']: - return [] - entries = r.feed.get('entry', None) - if entries is None: return None - return entries if isinstance(entries, list) else [entries] - # Unlike most other endpoints, the jobs endpoint does not return - # its state wrapped in another element, but at the top level. - # For example, in XML, it returns ... instead of - # .... - entries = r.get('entry', None) - if entries is None: return None - return entries if isinstance(entries, list) else [entries] - - -# Load the sid from the body of the given response -def _load_sid(response, output_mode): - if output_mode == "json": - json_obj = json.loads(response.body.read()) - return json_obj.get('sid') - return _load_atom(response).response.sid - - -# Parse the given atom entry record into a generic entity state record -def _parse_atom_entry(entry): - title = entry.get('title', None) - - elink = entry.get('link', []) - elink = elink if isinstance(elink, list) else [elink] - links = record((link.rel, link.href) for link in elink) - - # Retrieve entity content values - content = entry.get('content', {}) - - # Host entry metadata - metadata = _parse_atom_metadata(content) - - # Filter some of the noise out of the content record - content = record((k, v) for k, v in content.items() - if k not in ['eai:acl', 'eai:attributes']) - - if 'type' in content: - if isinstance(content['type'], list): - content['type'] = [t for t in content['type'] if t != 'text/xml'] - # Unset type if it was only 'text/xml' - if len(content['type']) == 0: - content.pop('type', None) - # Flatten 1 element list - if len(content['type']) == 1: - content['type'] = content['type'][0] - else: - content.pop('type', None) - - return record({ - 'title': title, - 'links': links, - 'access': metadata.access, - 'fields': metadata.fields, - 'content': content, - 'updated': entry.get("updated") - }) - - -# Parse the metadata fields out of the given atom entry content record -def _parse_atom_metadata(content): - # Hoist access metadata - access = content.get('eai:acl', None) - - # Hoist content metadata (and cleanup some naming) - attributes = content.get('eai:attributes', {}) - fields = record({ - 'required': attributes.get('requiredFields', []), - 'optional': attributes.get('optionalFields', []), - 'wildcard': attributes.get('wildcardFields', [])}) - - return record({'access': access, 'fields': fields}) - - -# kwargs: scheme, host, port, app, owner, username, password -def connect(**kwargs): - """This function connects and logs in to a Splunk instance. - - This function is a shorthand for :meth:`Service.login`. - The ``connect`` function makes one round trip to the server (for logging in). - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verification for - https connections. (optional, the default is True) - :type verify: ``Boolean`` - :param `owner`: The owner context of the namespace (optional). - :type owner: ``string`` - :param `app`: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (the default is "user"). - :type sharing: "global", "system", "app", or "user" - :param `token`: The current session token (optional). Session tokens can be - shared across multiple service instances. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param autologin: When ``True``, automatically tries to log in again if the - session terminates. - :type autologin: ``boolean`` - :param `username`: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param `password`: The password for the Splunk account. - :type password: ``string`` - :param retires: Number of retries for each HTTP connection (optional, the default is 0). - NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. - :type retries: ``int`` - :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). - :type retryDelay: ``int`` (in seconds) - :param `context`: The SSLContext that can be used when setting verify=True (optional) - :type context: ``SSLContext`` - :return: An initialized :class:`Service` connection. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - a = s.apps["my_app"] - ... - """ - s = Service(**kwargs) - s.login() - return s - - -# In preparation for adding Storm support, we added an -# intermediary class between Service and Context. Storm's -# API is not going to be the same as enterprise Splunk's -# API, so we will derive both Service (for enterprise Splunk) -# and StormService for (Splunk Storm) from _BaseService, and -# put any shared behavior on it. -class _BaseService(Context): - pass - - -class Service(_BaseService): - """A Pythonic binding to Splunk instances. - - A :class:`Service` represents a binding to a Splunk instance on an - HTTP or HTTPS port. It handles the details of authentication, wire - formats, and wraps the REST API endpoints into something more - Pythonic. All of the low-level operations on the instance from - :class:`splunklib.binding.Context` are also available in case you need - to do something beyond what is provided by this class. - - After creating a ``Service`` object, you must call its :meth:`login` - method before you can issue requests to Splunk. - Alternately, use the :func:`connect` function to create an already - authenticated :class:`Service` object, or provide a session token - when creating the :class:`Service` object explicitly (the same - token may be shared by multiple :class:`Service` objects). - - :param host: The host name (the default is "localhost"). - :type host: ``string`` - :param port: The port number (the default is 8089). - :type port: ``integer`` - :param scheme: The scheme for accessing the service (the default is "https"). - :type scheme: "https" or "http" - :param verify: Enable (True) or disable (False) SSL verification for - https connections. (optional, the default is True) - :type verify: ``Boolean`` - :param `owner`: The owner context of the namespace (optional; use "-" for wildcard). - :type owner: ``string`` - :param `app`: The app context of the namespace (optional; use "-" for wildcard). - :type app: ``string`` - :param `token`: The current session token (optional). Session tokens can be - shared across multiple service instances. - :type token: ``string`` - :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. - This parameter is only supported for Splunk 6.2+. - :type cookie: ``string`` - :param `username`: The Splunk account username, which is used to - authenticate the Splunk instance. - :type username: ``string`` - :param `password`: The password, which is used to authenticate the Splunk - instance. - :type password: ``string`` - :param retires: Number of retries for each HTTP connection (optional, the default is 0). - NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. - :type retries: ``int`` - :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). - :type retryDelay: ``int`` (in seconds) - :return: A :class:`Service` instance. - - **Example**:: - - import splunklib.client as client - s = client.Service(username="boris", password="natasha", ...) - s.login() - # Or equivalently - s = client.connect(username="boris", password="natasha") - # Or if you already have a session token - s = client.Service(token="atg232342aa34324a") - # Or if you already have a valid cookie - s = client.Service(cookie="splunkd_8089=...") - """ - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self._splunk_version = None - self._kvstore_owner = None - self._instance_type = None - - @property - def apps(self): - """Returns the collection of applications that are installed on this instance of Splunk. - - :return: A :class:`Collection` of :class:`Application` entities. - """ - return Collection(self, PATH_APPS, item=Application) - - @property - def confs(self): - """Returns the collection of configuration files for this Splunk instance. - - :return: A :class:`Configurations` collection of - :class:`ConfigurationFile` entities. - """ - return Configurations(self) - - @property - def capabilities(self): - """Returns the list of system capabilities. - - :return: A ``list`` of capabilities. - """ - response = self.get(PATH_CAPABILITIES) - return _load_atom(response, MATCH_ENTRY_CONTENT).capabilities - - @property - def event_types(self): - """Returns the collection of event types defined in this Splunk instance. - - :return: An :class:`Entity` containing the event types. - """ - return Collection(self, PATH_EVENT_TYPES) - - @property - def fired_alerts(self): - """Returns the collection of alerts that have been fired on the Splunk - instance, grouped by saved search. - - :return: A :class:`Collection` of :class:`AlertGroup` entities. - """ - return Collection(self, PATH_FIRED_ALERTS, item=AlertGroup) - - @property - def indexes(self): - """Returns the collection of indexes for this Splunk instance. - - :return: An :class:`Indexes` collection of :class:`Index` entities. - """ - return Indexes(self, PATH_INDEXES, item=Index) - - @property - def info(self): - """Returns the information about this instance of Splunk. - - :return: The system information, as key-value pairs. - :rtype: ``dict`` - """ - response = self.get("/services/server/info") - return _filter_content(_load_atom(response, MATCH_ENTRY_CONTENT)) - - def input(self, path, kind=None): - """Retrieves an input by path, and optionally kind. - - :return: A :class:`Input` object. - """ - return Input(self, path, kind=kind).refresh() - - @property - def inputs(self): - """Returns the collection of inputs configured on this Splunk instance. - - :return: An :class:`Inputs` collection of :class:`Input` entities. - """ - return Inputs(self) - - def job(self, sid): - """Retrieves a search job by sid. - - :return: A :class:`Job` object. - """ - return Job(self, sid).refresh() - - @property - def jobs(self): - """Returns the collection of current search jobs. - - :return: A :class:`Jobs` collection of :class:`Job` entities. - """ - return Jobs(self) - - @property - def loggers(self): - """Returns the collection of logging level categories and their status. - - :return: A :class:`Loggers` collection of logging levels. - """ - return Loggers(self) - - @property - def messages(self): - """Returns the collection of service messages. - - :return: A :class:`Collection` of :class:`Message` entities. - """ - return Collection(self, PATH_MESSAGES, item=Message) - - @property - def modular_input_kinds(self): - """Returns the collection of the modular input kinds on this Splunk instance. - - :return: A :class:`ReadOnlyCollection` of :class:`ModularInputKind` entities. - """ - if self.splunk_version >= (5,): - return ReadOnlyCollection(self, PATH_MODULAR_INPUTS, item=ModularInputKind) - raise IllegalOperationException("Modular inputs are not supported before Splunk version 5.") - - @property - def storage_passwords(self): - """Returns the collection of the storage passwords on this Splunk instance. - - :return: A :class:`ReadOnlyCollection` of :class:`StoragePasswords` entities. - """ - return StoragePasswords(self) - - # kwargs: enable_lookups, reload_macros, parse_only, output_mode - def parse(self, query, **kwargs): - """Parses a search query and returns a semantic map of the search. - - :param query: The search query to parse. - :type query: ``string`` - :param kwargs: Arguments to pass to the ``search/parser`` endpoint - (optional). Valid arguments are: - - * "enable_lookups" (``boolean``): If ``True``, performs reverse lookups - to expand the search expression. - - * "output_mode" (``string``): The output format (XML or JSON). - - * "parse_only" (``boolean``): If ``True``, disables the expansion of - search due to evaluation of subsearches, time term expansion, - lookups, tags, eventtypes, and sourcetype alias. - - * "reload_macros" (``boolean``): If ``True``, reloads macro - definitions from macros.conf. - - :type kwargs: ``dict`` - :return: A semantic map of the parsed search query. - """ - if not self.disable_v2_api: - return self.post("search/v2/parser", q=query, **kwargs) - return self.get("search/parser", q=query, **kwargs) - - def restart(self, timeout=None): - """Restarts this Splunk instance. - - The service is unavailable until it has successfully restarted. - - If a *timeout* value is specified, ``restart`` blocks until the service - resumes or the timeout period has been exceeded. Otherwise, ``restart`` returns - immediately. - - :param timeout: A timeout period, in seconds. - :type timeout: ``integer`` - """ - msg = {"value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} - # This message will be deleted once the server actually restarts. - self.messages.create(name="restart_required", **msg) - result = self.post("/services/server/control/restart") - if timeout is None: - return result - start = datetime.now() - diff = timedelta(seconds=timeout) - while datetime.now() - start < diff: - try: - self.login() - if not self.restart_required: - return result - except Exception as e: - sleep(1) - raise Exception("Operation time out.") - - @property - def restart_required(self): - """Indicates whether splunkd is in a state that requires a restart. - - :return: A ``boolean`` that indicates whether a restart is required. - - """ - response = self.get("messages").body.read() - messages = data.load(response)['feed'] - if 'entry' not in messages: - result = False - else: - if isinstance(messages['entry'], dict): - titles = [messages['entry']['title']] - else: - titles = [x['title'] for x in messages['entry']] - result = 'restart_required' in titles - return result - - @property - def roles(self): - """Returns the collection of user roles. - - :return: A :class:`Roles` collection of :class:`Role` entities. - """ - return Roles(self) - - def search(self, query, **kwargs): - """Runs a search using a search query and any optional arguments you - provide, and returns a `Job` object representing the search. - - :param query: A search query. - :type query: ``string`` - :param kwargs: Arguments for the search (optional): - - * "output_mode" (``string``): Specifies the output format of the - results. - - * "earliest_time" (``string``): Specifies the earliest time in the - time range to - search. The time string can be a UTC time (with fractional - seconds), a relative time specifier (to now), or a formatted - time string. - - * "latest_time" (``string``): Specifies the latest time in the time - range to - search. The time string can be a UTC time (with fractional - seconds), a relative time specifier (to now), or a formatted - time string. - - * "rf" (``string``): Specifies one or more fields to add to the - search. - - :type kwargs: ``dict`` - :rtype: class:`Job` - :returns: An object representing the created job. - """ - return self.jobs.create(query, **kwargs) - - @property - def saved_searches(self): - """Returns the collection of saved searches. - - :return: A :class:`SavedSearches` collection of :class:`SavedSearch` - entities. - """ - return SavedSearches(self) - - @property - def settings(self): - """Returns the configuration settings for this instance of Splunk. - - :return: A :class:`Settings` object containing configuration settings. - """ - return Settings(self) - - @property - def splunk_version(self): - """Returns the version of the splunkd instance this object is attached - to. - - The version is returned as a tuple of the version components as - integers (for example, `(4,3,3)` or `(5,)`). - - :return: A ``tuple`` of ``integers``. - """ - if self._splunk_version is None: - self._splunk_version = tuple(int(p) for p in self.info['version'].split('.')) - return self._splunk_version - - @property - def splunk_instance(self): - if self._instance_type is None : - splunk_info = self.info - if hasattr(splunk_info, 'instance_type') : - self._instance_type = splunk_info['instance_type'] - else: - self._instance_type = '' - return self._instance_type - - @property - def disable_v2_api(self): - if self.splunk_instance.lower() == 'cloud': - return self.splunk_version < (9,0,2209) - return self.splunk_version < (9,0,2) - - @property - def kvstore_owner(self): - """Returns the KVStore owner for this instance of Splunk. - - By default is the kvstore owner is not set, it will return "nobody" - :return: A string with the KVStore owner. - """ - if self._kvstore_owner is None: - self._kvstore_owner = "nobody" - return self._kvstore_owner - - @kvstore_owner.setter - def kvstore_owner(self, value): - """ - kvstore is refreshed, when the owner value is changed - """ - self._kvstore_owner = value - self.kvstore - - @property - def kvstore(self): - """Returns the collection of KV Store collections. - - sets the owner for the namespace, before retrieving the KVStore Collection - - :return: A :class:`KVStoreCollections` collection of :class:`KVStoreCollection` entities. - """ - self.namespace['owner'] = self.kvstore_owner - return KVStoreCollections(self) - - @property - def users(self): - """Returns the collection of users. - - :return: A :class:`Users` collection of :class:`User` entities. - """ - return Users(self) - - -class Endpoint: - """This class represents individual Splunk resources in the Splunk REST API. - - An ``Endpoint`` object represents a URI, such as ``/services/saved/searches``. - This class provides the common functionality of :class:`Collection` and - :class:`Entity` (essentially HTTP GET and POST methods). - """ - - def __init__(self, service, path): - self.service = service - self.path = path - - def get_api_version(self, path): - """Return the API version of the service used in the provided path. - - Args: - path (str): A fully-qualified endpoint path (for example, "/services/search/jobs"). - - Returns: - int: Version of the API (for example, 1) - """ - # Default to v1 if undefined in the path - # For example, "/services/search/jobs" is using API v1 - api_version = 1 - - versionSearch = re.search('(?:servicesNS\/[^/]+\/[^/]+|services)\/[^/]+\/v(\d+)\/', path) - if versionSearch: - api_version = int(versionSearch.group(1)) - - return api_version - - def get(self, path_segment="", owner=None, app=None, sharing=None, **query): - """Performs a GET operation on the path segment relative to this endpoint. - - This method is named to match the HTTP method. This method makes at least - one roundtrip to the server, one additional round trip for - each 303 status returned, plus at most two additional round - trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method takes a - default namespace from the :class:`Service` object for this :class:`Endpoint`. - All other keyword arguments are included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Service`` is not logged in. - :raises HTTPError: Raised when an error in the request occurs. - :param path_segment: A path segment relative to this endpoint. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (optional). - :type sharing: "global", "system", "app", or "user" - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - import splunklib.client - s = client.service(...) - apps = s.apps - apps.get() == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '26208'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:30:35 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - apps.get('nonexistant/path') # raises HTTPError - s.logout() - apps.get() # raises AuthenticationError - """ - # self.path to the Endpoint is relative in the SDK, so passing - # owner, app, sharing, etc. along will produce the correct - # namespace in the final request. - if path_segment.startswith('/'): - path = path_segment - else: - if not self.path.endswith('/') and path_segment != "": - self.path = self.path + '/' - path = self.service._abspath(self.path + path_segment, owner=owner, - app=app, sharing=sharing) - # ^-- This was "%s%s" % (self.path, path_segment). - # That doesn't work, because self.path may be UrlEncoded. - - # Get the API version from the path - api_version = self.get_api_version(path) - - # Search API v2+ fallback to v1: - # - In v2+, /results_preview, /events and /results do not support search params. - # - Fallback from v2+ to v1 if Splunk Version is < 9. - # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): - # path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - if api_version == 1: - if isinstance(path, UrlEncoded): - path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) - else: - path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - return self.service.get(path, - owner=owner, app=app, sharing=sharing, - **query) - - def post(self, path_segment="", owner=None, app=None, sharing=None, **query): - """Performs a POST operation on the path segment relative to this endpoint. - - This method is named to match the HTTP method. This method makes at least - one roundtrip to the server, one additional round trip for - each 303 status returned, plus at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - If *owner*, *app*, and *sharing* are omitted, this method takes a - default namespace from the :class:`Service` object for this :class:`Endpoint`. - All other keyword arguments are included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Service`` is not logged in. - :raises HTTPError: Raised when an error in the request occurs. - :param path_segment: A path segment relative to this endpoint. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode of the namespace (optional). - :type sharing: ``string`` - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - import splunklib.client - s = client.service(...) - apps = s.apps - apps.post(name='boris') == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '2908'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 18:34:50 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'Created', - 'status': 201} - apps.get('nonexistant/path') # raises HTTPError - s.logout() - apps.get() # raises AuthenticationError - """ - if path_segment.startswith('/'): - path = path_segment - else: - if not self.path.endswith('/') and path_segment != "": - self.path = self.path + '/' - path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) - - # Get the API version from the path - api_version = self.get_api_version(path) - - # Search API v2+ fallback to v1: - # - In v2+, /results_preview, /events and /results do not support search params. - # - Fallback from v2+ to v1 if Splunk Version is < 9. - # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): - # path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - if api_version == 1: - if isinstance(path, UrlEncoded): - path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) - else: - path = path.replace(PATH_JOBS_V2, PATH_JOBS) - - return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) - - -# kwargs: path, app, owner, sharing, state -class Entity(Endpoint): - """This class is a base class for Splunk entities in the REST API, such as - saved searches, jobs, indexes, and inputs. - - ``Entity`` provides the majority of functionality required by entities. - Subclasses only implement the special cases for individual entities. - For example for saved searches, the subclass makes fields like ``action.email``, - ``alert_type``, and ``search`` available. - - An ``Entity`` is addressed like a dictionary, with a few extensions, - so the following all work, for example in saved searches:: - - ent['action.email'] - ent['alert_type'] - ent['search'] - - You can also access the fields as though they were the fields of a Python - object, as in:: - - ent.alert_type - ent.search - - However, because some of the field names are not valid Python identifiers, - the dictionary-like syntax is preferable. - - The state of an :class:`Entity` object is cached, so accessing a field - does not contact the server. If you think the values on the - server have changed, call the :meth:`Entity.refresh` method. - """ - # Not every endpoint in the API is an Entity or a Collection. For - # example, a saved search at saved/searches/{name} has an additional - # method saved/searches/{name}/scheduled_times, but this isn't an - # entity in its own right. In these cases, subclasses should - # implement a method that uses the get and post methods inherited - # from Endpoint, calls the _load_atom function (it's elsewhere in - # client.py, but not a method of any object) to read the - # information, and returns the extracted data in a Pythonesque form. - # - # The primary use of subclasses of Entity is to handle specially - # named fields in the Entity. If you only need to provide a default - # value for an optional field, subclass Entity and define a - # dictionary ``defaults``. For instance,:: - # - # class Hypothetical(Entity): - # defaults = {'anOptionalField': 'foo', - # 'anotherField': 'bar'} - # - # If you have to do more than provide a default, such as rename or - # actually process values, then define a new method with the - # ``@property`` decorator. - # - # class Hypothetical(Entity): - # @property - # def foobar(self): - # return self.content['foo'] + "-" + self.content["bar"] - - # Subclasses can override defaults the default values for - # optional fields. See above. - defaults = {} - - def __init__(self, service, path, **kwargs): - Endpoint.__init__(self, service, path) - self._state = None - if not kwargs.get('skip_refresh', False): - self.refresh(kwargs.get('state', None)) # "Prefresh" - - def __contains__(self, item): - try: - self[item] - return True - except (KeyError, AttributeError): - return False - - def __eq__(self, other): - """Raises IncomparableException. - - Since Entity objects are snapshots of times on the server, no - simple definition of equality will suffice beyond instance - equality, and instance equality leads to strange situations - such as:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - x = saved_searches['asearch'] - - but then ``x != saved_searches['asearch']``. - - whether or not there was a change on the server. Rather than - try to do something fancy, we simply declare that equality is - undefined for Entities. - - Makes no roundtrips to the server. - """ - raise IncomparableException(f"Equality is undefined for objects of class {self.__class__.__name__}") - - def __getattr__(self, key): - # Called when an attribute was not found by the normal method. In this - # case we try to find it in self.content and then self.defaults. - if key in self.state.content: - return self.state.content[key] - if key in self.defaults: - return self.defaults[key] - raise AttributeError(key) - - def __getitem__(self, key): - # getattr attempts to find a field on the object in the normal way, - # then calls __getattr__ if it cannot. - return getattr(self, key) - - # Load the Atom entry record from the given response - this is a method - # because the "entry" record varies slightly by entity and this allows - # for a subclass to override and handle any special cases. - def _load_atom_entry(self, response): - elem = _load_atom(response, XNAME_ENTRY) - if isinstance(elem, list): - apps = [ele.entry.content.get('eai:appName') for ele in elem] - - raise AmbiguousReferenceException( - f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") - return elem.entry - - # Load the entity state record from the given response - def _load_state(self, response): - entry = self._load_atom_entry(response) - return _parse_atom_entry(entry) - - def _run_action(self, path_segment, **kwargs): - """Run a method and return the content Record from the returned XML. - - A method is a relative path from an Entity that is not itself - an Entity. _run_action assumes that the returned XML is an - Atom field containing one Entry, and the contents of Entry is - what should be the return value. This is right in enough cases - to make this method useful. - """ - response = self.get(path_segment, **kwargs) - data = self._load_atom_entry(response) - rec = _parse_atom_entry(data) - return rec.content - - def _proper_namespace(self, owner=None, app=None, sharing=None): - """Produce a namespace sans wildcards for use in entity requests. - - This method tries to fill in the fields of the namespace which are `None` - or wildcard (`'-'`) from the entity's namespace. If that fails, it uses - the service's namespace. - - :param owner: - :param app: - :param sharing: - :return: - """ - if owner is None and app is None and sharing is None: # No namespace provided - if self._state is not None and 'access' in self._state: - return (self._state.access.owner, - self._state.access.app, - self._state.access.sharing) - return (self.service.namespace['owner'], - self.service.namespace['app'], - self.service.namespace['sharing']) - return owner, app, sharing - - def delete(self): - owner, app, sharing = self._proper_namespace() - return self.service.delete(self.path, owner=owner, app=app, sharing=sharing) - - def get(self, path_segment="", owner=None, app=None, sharing=None, **query): - owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super().get(path_segment, owner=owner, app=app, sharing=sharing, **query) - - def post(self, path_segment="", owner=None, app=None, sharing=None, **query): - owner, app, sharing = self._proper_namespace(owner, app, sharing) - return super().post(path_segment, owner=owner, app=app, sharing=sharing, **query) - - def refresh(self, state=None): - """Refreshes the state of this entity. - - If *state* is provided, load it as the new state for this - entity. Otherwise, make a roundtrip to the server (by calling - the :meth:`read` method of ``self``) to fetch an updated state, - plus at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param state: Entity-specific arguments (optional). - :type state: ``dict`` - :raises EntityDeletedException: Raised if the entity no longer exists on - the server. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - search = s.apps['search'] - search.refresh() - """ - if state is not None: - self._state = state - else: - self._state = self.read(self.get()) - return self - - @property - def access(self): - """Returns the access metadata for this entity. - - :return: A :class:`splunklib.data.Record` object with three keys: - ``owner``, ``app``, and ``sharing``. - """ - return self.state.access - - @property - def content(self): - """Returns the contents of the entity. - - :return: A ``dict`` containing values. - """ - return self.state.content - - def disable(self): - """Disables the entity at this endpoint.""" - self.post("disable") - return self - - def enable(self): - """Enables the entity at this endpoint.""" - self.post("enable") - return self - - @property - def fields(self): - """Returns the content metadata for this entity. - - :return: A :class:`splunklib.data.Record` object with three keys: - ``required``, ``optional``, and ``wildcard``. - """ - return self.state.fields - - @property - def links(self): - """Returns a dictionary of related resources. - - :return: A ``dict`` with keys and corresponding URLs. - """ - return self.state.links - - @property - def name(self): - """Returns the entity name. - - :return: The entity name. - :rtype: ``string`` - """ - return self.state.title - - def read(self, response): - """ Reads the current state of the entity from the server. """ - results = self._load_state(response) - # In lower layers of the SDK, we end up trying to URL encode - # text to be dispatched via HTTP. However, these links are already - # URL encoded when they arrive, and we need to mark them as such. - unquoted_links = dict((k, UrlEncoded(v, skip_encode=True)) - for k, v in results['links'].items()) - results['links'] = unquoted_links - return results - - def reload(self): - """Reloads the entity.""" - self.post("_reload") - return self - - def acl_update(self, **kwargs): - """To update Access Control List (ACL) properties for an endpoint. - - :param kwargs: Additional entity-specific arguments (required). - - - "owner" (``string``): The Splunk username, such as "admin". A value of "nobody" means no specific user (required). - - - "sharing" (``string``): A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system" (required). - - :type kwargs: ``dict`` - - **Example**:: - - import splunklib.client as client - service = client.connect(...) - saved_search = service.saved_searches["name"] - saved_search.acl_update(sharing="app", owner="nobody", app="search", **{"perms.read": "admin, nobody"}) - """ - if "body" not in kwargs: - kwargs = {"body": kwargs} - - if "sharing" not in kwargs["body"]: - raise ValueError("Required argument 'sharing' is missing.") - if "owner" not in kwargs["body"]: - raise ValueError("Required argument 'owner' is missing.") - - self.post("acl", **kwargs) - self.refresh() - return self - - @property - def state(self): - """Returns the entity's state record. - - :return: A ``dict`` containing fields and metadata for the entity. - """ - if self._state is None: self.refresh() - return self._state - - def update(self, **kwargs): - """Updates the server with any changes you've made to the current entity - along with any additional arguments you specify. - - **Note**: You cannot update the ``name`` field of an entity. - - Many of the fields in the REST API are not valid Python - identifiers, which means you cannot pass them as keyword - arguments. That is, Python will fail to parse the following:: - - # This fails - x.update(check-new=False, email.to='boris@utopia.net') - - However, you can always explicitly use a dictionary to pass - such keys:: - - # This works - x.update(**{'check-new': False, 'email.to': 'boris@utopia.net'}) - - :param kwargs: Additional entity-specific arguments (optional). - :type kwargs: ``dict`` - - :return: The entity this method is called on. - :rtype: class:`Entity` - """ - # The peculiarity in question: the REST API creates a new - # Entity if we pass name in the dictionary, instead of the - # expected behavior of updating this Entity. Therefore, we - # check for 'name' in kwargs and throw an error if it is - # there. - if 'name' in kwargs: - raise IllegalOperationException('Cannot update the name of an Entity via the REST API.') - self.post(**kwargs) - return self - - -class ReadOnlyCollection(Endpoint): - """This class represents a read-only collection of entities in the Splunk - instance. - """ - - def __init__(self, service, path, item=Entity): - Endpoint.__init__(self, service, path) - self.item = item # Item accessor - self.null_count = -1 - - def __contains__(self, name): - """Is there at least one entry called *name* in this collection? - - Makes a single roundtrip to the server, plus at most two more - if - the ``autologin`` field of :func:`connect` is set to ``True``. - """ - try: - self[name] - return True - except KeyError: - return False - except AmbiguousReferenceException: - return True - - def __getitem__(self, key): - """Fetch an item named *key* from this collection. - - A name is not a unique identifier in a collection. The unique - identifier is a name plus a namespace. For example, there can - be a saved search named ``'mysearch'`` with sharing ``'app'`` - in application ``'search'``, and another with sharing - ``'user'`` with owner ``'boris'`` and application - ``'search'``. If the ``Collection`` is attached to a - ``Service`` that has ``'-'`` (wildcard) as user and app in its - namespace, then both of these may be visible under the same - name. - - Where there is no conflict, ``__getitem__`` will fetch the - entity given just the name. If there is a conflict, and you - pass just a name, it will raise a ``ValueError``. In that - case, add the namespace as a second argument. - - This function makes a single roundtrip to the server, plus at - most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param key: The name to fetch, or a tuple (name, namespace). - :return: An :class:`Entity` object. - :raises KeyError: Raised if *key* does not exist. - :raises ValueError: Raised if no namespace is specified and *key* - does not refer to a unique name. - - **Example**:: - - s = client.connect(...) - saved_searches = s.saved_searches - x1 = saved_searches.create( - 'mysearch', 'search * | head 1', - owner='admin', app='search', sharing='app') - x2 = saved_searches.create( - 'mysearch', 'search * | head 1', - owner='admin', app='search', sharing='user') - # Raises ValueError: - saved_searches['mysearch'] - # Fetches x1 - saved_searches[ - 'mysearch', - client.namespace(sharing='app', app='search')] - # Fetches x2 - saved_searches[ - 'mysearch', - client.namespace(sharing='user', owner='boris', app='search')] - """ - try: - if isinstance(key, tuple) and len(key) == 2: - # x[a,b] is translated to x.__getitem__( (a,b) ), so we - # have to extract values out. - key, ns = key - key = UrlEncoded(key, encode_slash=True) - response = self.get(key, owner=ns.owner, app=ns.app) - else: - key = UrlEncoded(key, encode_slash=True) - response = self.get(key) - entries = self._load_list(response) - if len(entries) > 1: - raise AmbiguousReferenceException( - f"Found multiple entities named '{key}'; please specify a namespace.") - if len(entries) == 0: - raise KeyError(key) - return entries[0] - except HTTPError as he: - if he.status == 404: # No entity matching key and namespace. - raise KeyError(key) - else: - raise - - def __iter__(self, **kwargs): - """Iterate over the entities in the collection. - - :param kwargs: Additional arguments. - :type kwargs: ``dict`` - :rtype: iterator over entities. - - Implemented to give Collection a listish interface. This - function always makes a roundtrip to the server, plus at most - two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - for entity in saved_searches: - print(f"Saved search named {entity.name}") - """ - - for item in self.iter(**kwargs): - yield item - - def __len__(self): - """Enable ``len(...)`` for ``Collection`` objects. - - Implemented for consistency with a listish interface. No - further failure modes beyond those possible for any method on - an Endpoint. - - This function always makes a round trip to the server, plus at - most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - n = len(saved_searches) - """ - return len(self.list()) - - def _entity_path(self, state): - """Calculate the path to an entity to be returned. - - *state* should be the dictionary returned by - :func:`_parse_atom_entry`. :func:`_entity_path` extracts the - link to this entity from *state*, and strips all the namespace - prefixes from it to leave only the relative path of the entity - itself, sans namespace. - - :rtype: ``string`` - :return: an absolute path - """ - # This has been factored out so that it can be easily - # overloaded by Configurations, which has to switch its - # entities' endpoints from its own properties/ to configs/. - raw_path = parse.unquote(state.links.alternate) - if 'servicesNS/' in raw_path: - return _trailing(raw_path, 'servicesNS/', '/', '/') - if 'services/' in raw_path: - return _trailing(raw_path, 'services/') - return raw_path - - def _load_list(self, response): - """Converts *response* to a list of entities. - - *response* is assumed to be a :class:`Record` containing an - HTTP response, of the form:: - - {'status': 200, - 'headers': [('content-length', '232642'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Tue, 29 May 2012 15:27:08 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'body': ...a stream implementing .read()...} - - The ``'body'`` key refers to a stream containing an Atom feed, - that is, an XML document with a toplevel element ````, - and within that element one or more ```` elements. - """ - # Some subclasses of Collection have to override this because - # splunkd returns something that doesn't match - # . - entries = _load_atom_entries(response) - if entries is None: return [] - entities = [] - for entry in entries: - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - self._entity_path(state), - state=state) - entities.append(entity) - - return entities - - def itemmeta(self): - """Returns metadata for members of the collection. - - Makes a single roundtrip to the server, plus two more at most if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :return: A :class:`splunklib.data.Record` object containing the metadata. - - **Example**:: - - import splunklib.client as client - import pprint - s = client.connect(...) - pprint.pprint(s.apps.itemmeta()) - {'access': {'app': 'search', - 'can_change_perms': '1', - 'can_list': '1', - 'can_share_app': '1', - 'can_share_global': '1', - 'can_share_user': '1', - 'can_write': '1', - 'modifiable': '1', - 'owner': 'admin', - 'perms': {'read': ['*'], 'write': ['admin']}, - 'removable': '0', - 'sharing': 'user'}, - 'fields': {'optional': ['author', - 'configured', - 'description', - 'label', - 'manageable', - 'template', - 'visible'], - 'required': ['name'], 'wildcard': []}} - """ - response = self.get("_new") - content = _load_atom(response, MATCH_ENTRY_CONTENT) - return _parse_atom_metadata(content) - - def iter(self, offset=0, count=None, pagesize=None, **kwargs): - """Iterates over the collection. - - This method is equivalent to the :meth:`list` method, but - it returns an iterator and can load a certain number of entities at a - time from the server. - - :param offset: The index of the first entity to return (optional). - :type offset: ``integer`` - :param count: The maximum number of entities to return (optional). - :type count: ``integer`` - :param pagesize: The number of entities to load (optional). - :type pagesize: ``integer`` - :param kwargs: Additional arguments (optional): - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - for saved_search in s.saved_searches.iter(pagesize=10): - # Loads 10 saved searches at a time from the - # server. - ... - """ - assert pagesize is None or pagesize > 0 - if count is None: - count = self.null_count - fetched = 0 - while count == self.null_count or fetched < count: - response = self.get(count=pagesize or count, offset=offset, **kwargs) - items = self._load_list(response) - N = len(items) - fetched += N - for item in items: - yield item - if pagesize is None or N < pagesize: - break - offset += N - logger.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) - - # kwargs: count, offset, search, sort_dir, sort_key, sort_mode - def list(self, count=None, **kwargs): - """Retrieves a list of entities in this collection. - - The entire collection is loaded at once and is returned as a list. This - function makes a single roundtrip to the server, plus at most two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - There is no caching--every call makes at least one round trip. - - :param count: The maximum number of entities to return (optional). - :type count: ``integer`` - :param kwargs: Additional arguments (optional): - - - "offset" (``integer``): The offset of the first item to return. - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - :return: A ``list`` of entities. - """ - # response = self.get(count=count, **kwargs) - # return self._load_list(response) - return list(self.iter(count=count, **kwargs)) - - -class Collection(ReadOnlyCollection): - """A collection of entities. - - Splunk provides a number of different collections of distinct - entity types: applications, saved searches, fired alerts, and a - number of others. Each particular type is available separately - from the Splunk instance, and the entities of that type are - returned in a :class:`Collection`. - - The interface for :class:`Collection` does not quite match either - ``list`` or ``dict`` in Python, because there are enough semantic - mismatches with either to make its behavior surprising. A unique - element in a :class:`Collection` is defined by a string giving its - name plus namespace (although the namespace is optional if the name is - unique). - - **Example**:: - - import splunklib.client as client - service = client.connect(...) - mycollection = service.saved_searches - mysearch = mycollection['my_search', client.namespace(owner='boris', app='natasha', sharing='user')] - # Or if there is only one search visible named 'my_search' - mysearch = mycollection['my_search'] - - Similarly, ``name`` in ``mycollection`` works as you might expect (though - you cannot currently pass a namespace to the ``in`` operator), as does - ``len(mycollection)``. - - However, as an aggregate, :class:`Collection` behaves more like a - list. If you iterate over a :class:`Collection`, you get an - iterator over the entities, not the names and namespaces. - - **Example**:: - - for entity in mycollection: - assert isinstance(entity, client.Entity) - - Use the :meth:`create` and :meth:`delete` methods to create and delete - entities in this collection. To view the access control list and other - metadata of the collection, use the :meth:`ReadOnlyCollection.itemmeta` method. - - :class:`Collection` does no caching. Each call makes at least one - round trip to the server to fetch data. - """ - - def create(self, name, **params): - """Creates a new entity in this collection. - - This function makes either one or two roundtrips to the - server, depending on the type of entities in this - collection, plus at most two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param name: The name of the entity to create. - :type name: ``string`` - :param namespace: A namespace, as created by the :func:`splunklib.binding.namespace` - function (optional). You can also set ``owner``, ``app``, and - ``sharing`` in ``params``. - :type namespace: A :class:`splunklib.data.Record` object with keys ``owner``, ``app``, - and ``sharing``. - :param params: Additional entity-specific arguments (optional). - :type params: ``dict`` - :return: The new entity. - :rtype: A subclass of :class:`Entity`, chosen by :meth:`Collection.self.item`. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - applications = s.apps - new_app = applications.create("my_fake_app") - """ - if not isinstance(name, str): - raise InvalidNameException(f"{name} is not a valid name for an entity.") - if 'namespace' in params: - namespace = params.pop('namespace') - params['owner'] = namespace.owner - params['app'] = namespace.app - params['sharing'] = namespace.sharing - response = self.post(name=name, **params) - atom = _load_atom(response, XNAME_ENTRY) - if atom is None: - # This endpoint doesn't return the content of the new - # item. We have to go fetch it ourselves. - return self[name] - entry = atom.entry - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - self._entity_path(state), - state=state) - return entity - - def delete(self, name, **params): - """Deletes a specified entity from the collection. - - :param name: The name of the entity to delete. - :type name: ``string`` - :return: The collection. - :rtype: ``self`` - - This method is implemented for consistency with the REST API's DELETE - method. - - If there is no *name* entity on the server, a ``KeyError`` is - thrown. This function always makes a roundtrip to the server. - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - saved_searches = c.saved_searches - saved_searches.create('my_saved_search', - 'search * | head 1') - assert 'my_saved_search' in saved_searches - saved_searches.delete('my_saved_search') - assert 'my_saved_search' not in saved_searches - """ - name = UrlEncoded(name, encode_slash=True) - if 'namespace' in params: - namespace = params.pop('namespace') - params['owner'] = namespace.owner - params['app'] = namespace.app - params['sharing'] = namespace.sharing - try: - self.service.delete(_path(self.path, name), **params) - except HTTPError as he: - # An HTTPError with status code 404 means that the entity - # has already been deleted, and we reraise it as a - # KeyError. - if he.status == 404: - raise KeyError(f"No such entity {name}") - else: - raise - return self - - def get(self, name="", owner=None, app=None, sharing=None, **query): - """Performs a GET request to the server on the collection. - - If *owner*, *app*, and *sharing* are omitted, this method takes a - default namespace from the :class:`Service` object for this :class:`Endpoint`. - All other keyword arguments are included in the URL as query parameters. - - :raises AuthenticationError: Raised when the ``Service`` is not logged in. - :raises HTTPError: Raised when an error in the request occurs. - :param path_segment: A path segment relative to this endpoint. - :type path_segment: ``string`` - :param owner: The owner context of the namespace (optional). - :type owner: ``string`` - :param app: The app context of the namespace (optional). - :type app: ``string`` - :param sharing: The sharing mode for the namespace (optional). - :type sharing: "global", "system", "app", or "user" - :param query: All other keyword arguments, which are used as query - parameters. - :type query: ``string`` - :return: The response from the server. - :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, - and ``status`` - - **Example**:: - - import splunklib.client - s = client.service(...) - saved_searches = s.saved_searches - saved_searches.get("my/saved/search") == \\ - {'body': ...a response reader object..., - 'headers': [('content-length', '26208'), - ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), - ('server', 'Splunkd'), - ('connection', 'close'), - ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), - ('date', 'Fri, 11 May 2012 16:30:35 GMT'), - ('content-type', 'text/xml; charset=utf-8')], - 'reason': 'OK', - 'status': 200} - saved_searches.get('nonexistant/search') # raises HTTPError - s.logout() - saved_searches.get() # raises AuthenticationError - - """ - name = UrlEncoded(name, encode_slash=True) - return super().get(name, owner, app, sharing, **query) - - -class ConfigurationFile(Collection): - """This class contains all of the stanzas from one configuration file. - """ - - # __init__'s arguments must match those of an Entity, not a - # Collection, since it is being created as the elements of a - # Configurations, which is a Collection subclass. - def __init__(self, service, path, **kwargs): - Collection.__init__(self, service, path, item=Stanza) - self.name = kwargs['state']['title'] - - -class Configurations(Collection): - """This class provides access to the configuration files from this Splunk - instance. Retrieve this collection using :meth:`Service.confs`. - - Splunk's configuration is divided into files, and each file into - stanzas. This collection is unusual in that the values in it are - themselves collections of :class:`ConfigurationFile` objects. - """ - - def __init__(self, service): - Collection.__init__(self, service, PATH_PROPERTIES, item=ConfigurationFile) - if self.service.namespace.owner == '-' or self.service.namespace.app == '-': - raise ValueError("Configurations cannot have wildcards in namespace.") - - def __getitem__(self, key): - # The superclass implementation is designed for collections that contain - # entities. This collection (Configurations) contains collections - # (ConfigurationFile). - # - # The configurations endpoint returns multiple entities when we ask for a single file. - # This screws up the default implementation of __getitem__ from Collection, which thinks - # that multiple entities means a name collision, so we have to override it here. - try: - self.get(key) - return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) - except HTTPError as he: - if he.status == 404: # No entity matching key - raise KeyError(key) - else: - raise - - def __contains__(self, key): - # configs/conf-{name} never returns a 404. We have to post to properties/{name} - # in order to find out if a configuration exists. - try: - self.get(key) - return True - except HTTPError as he: - if he.status == 404: # No entity matching key - return False - raise - - def create(self, name): - """ Creates a configuration file named *name*. - - If there is already a configuration file with that name, - the existing file is returned. - - :param name: The name of the configuration file. - :type name: ``string`` - - :return: The :class:`ConfigurationFile` object. - """ - # This has to be overridden to handle the plumbing of creating - # a ConfigurationFile (which is a Collection) instead of some - # Entity. - if not isinstance(name, str): - raise ValueError(f"Invalid name: {repr(name)}") - response = self.post(__conf=name) - if response.status == 303: - return self[name] - if response.status == 201: - return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) - raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") - - def delete(self, key): - """Raises `IllegalOperationException`.""" - raise IllegalOperationException("Cannot delete configuration files from the REST API.") - - def _entity_path(self, state): - # Overridden to make all the ConfigurationFile objects - # returned refer to the configs/ path instead of the - # properties/ path used by Configrations. - return PATH_CONF % state['title'] - - -class Stanza(Entity): - """This class contains a single configuration stanza.""" - - def submit(self, stanza): - """Adds keys to the current configuration stanza as a - dictionary of key-value pairs. - - :param stanza: A dictionary of key-value pairs for the stanza. - :type stanza: ``dict`` - :return: The :class:`Stanza` object. - """ - body = _encode(**stanza) - self.service.post(self.path, body=body) - return self - - def __len__(self): - # The stanza endpoint returns all the keys at the same level in the XML as the eai information - # and 'disabled', so to get an accurate length, we have to filter those out and have just - # the stanza keys. - return len([x for x in self._state.content.keys() - if not x.startswith('eai') and x != 'disabled']) - - -class StoragePassword(Entity): - """This class contains a storage password. - """ - - def __init__(self, service, path, **kwargs): - state = kwargs.get('state', None) - kwargs['skip_refresh'] = kwargs.get('skip_refresh', state is not None) - super().__init__(service, path, **kwargs) - self._state = state - - @property - def clear_password(self): - return self.content.get('clear_password') - - @property - def encrypted_password(self): - return self.content.get('encr_password') - - @property - def realm(self): - return self.content.get('realm') - - @property - def username(self): - return self.content.get('username') - - -class StoragePasswords(Collection): - """This class provides access to the storage passwords from this Splunk - instance. Retrieve this collection using :meth:`Service.storage_passwords`. - """ - - def __init__(self, service): - if service.namespace.owner == '-' or service.namespace.app == '-': - raise ValueError("StoragePasswords cannot have wildcards in namespace.") - super().__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) - - def create(self, password, username, realm=None): - """ Creates a storage password. - - A `StoragePassword` can be identified by , or by : if the - optional realm parameter is also provided. - - :param password: The password for the credentials - this is the only part of the credentials that will be stored securely. - :type name: ``string`` - :param username: The username for the credentials. - :type name: ``string`` - :param realm: The credential realm. (optional) - :type name: ``string`` - - :return: The :class:`StoragePassword` object created. - """ - if not isinstance(username, str): - raise ValueError(f"Invalid name: {repr(username)}") - - if realm is None: - response = self.post(password=password, name=username) - else: - response = self.post(password=password, realm=realm, name=username) - - if response.status != 201: - raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") - - entries = _load_atom_entries(response) - state = _parse_atom_entry(entries[0]) - storage_password = StoragePassword(self.service, self._entity_path(state), state=state, skip_refresh=True) - - return storage_password - - def delete(self, username, realm=None): - """Delete a storage password by username and/or realm. - - The identifier can be passed in through the username parameter as - or :, but the preferred way is by - passing in the username and realm parameters. - - :param username: The username for the credentials, or : if the realm parameter is omitted. - :type name: ``string`` - :param realm: The credential realm. (optional) - :type name: ``string`` - :return: The `StoragePassword` collection. - :rtype: ``self`` - """ - if realm is None: - # This case makes the username optional, so - # the full name can be passed in as realm. - # Assume it's already encoded. - name = username - else: - # Encode each component separately - name = UrlEncoded(realm, encode_slash=True) + ":" + UrlEncoded(username, encode_slash=True) - - # Append the : expected at the end of the name - if name[-1] != ":": - name = name + ":" - return Collection.delete(self, name) - - -class AlertGroup(Entity): - """This class represents a group of fired alerts for a saved search. Access - it using the :meth:`alerts` property.""" - - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - def __len__(self): - return self.count - - @property - def alerts(self): - """Returns a collection of triggered alerts. - - :return: A :class:`Collection` of triggered alerts. - """ - return Collection(self.service, self.path) - - @property - def count(self): - """Returns the count of triggered alerts. - - :return: The triggered alert count. - :rtype: ``integer`` - """ - return int(self.content.get('triggered_alert_count', 0)) - - -class Indexes(Collection): - """This class contains the collection of indexes in this Splunk instance. - Retrieve this collection using :meth:`Service.indexes`. - """ - - def get_default(self): - """ Returns the name of the default index. - - :return: The name of the default index. - - """ - index = self['_audit'] - return index['defaultDatabase'] - - def delete(self, name): - """ Deletes a given index. - - **Note**: This method is only supported in Splunk 5.0 and later. - - :param name: The name of the index to delete. - :type name: ``string`` - """ - if self.service.splunk_version >= (5,): - Collection.delete(self, name) - else: - raise IllegalOperationException("Deleting indexes via the REST API is " - "not supported before Splunk version 5.") - - -class Index(Entity): - """This class represents an index and provides different operations, such as - cleaning the index, writing to the index, and so forth.""" - - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - def attach(self, host=None, source=None, sourcetype=None): - """Opens a stream (a writable socket) for writing events to the index. - - :param host: The host value for events written to the stream. - :type host: ``string`` - :param source: The source value for events written to the stream. - :type source: ``string`` - :param sourcetype: The sourcetype value for events written to the - stream. - :type sourcetype: ``string`` - - :return: A writable socket. - """ - args = {'index': self.name} - if host is not None: args['host'] = host - if source is not None: args['source'] = source - if sourcetype is not None: args['sourcetype'] = sourcetype - path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + parse.urlencode(args), skip_encode=True) - - cookie_header = self.service.token if self.service.token is _NoAuthenticationToken else self.service.token.replace("Splunk ", "") - cookie_or_auth_header = f"Authorization: Splunk {cookie_header}\r\n" - - # If we have cookie(s), use them instead of "Authorization: ..." - if self.service.has_cookies(): - cookie_header = _make_cookie_header(self.service.get_cookies().items()) - cookie_or_auth_header = f"Cookie: {cookie_header}\r\n" - - # Since we need to stream to the index connection, we have to keep - # the connection open and use the Splunk extension headers to note - # the input mode - sock = self.service.connect() - headers = [f"POST {str(self.service._abspath(path))} HTTP/1.1\r\n".encode('utf-8'), - f"Host: {self.service.host}:{int(self.service.port)}\r\n".encode('utf-8'), - b"Accept-Encoding: identity\r\n", - cookie_or_auth_header.encode('utf-8'), - b"X-Splunk-Input-Mode: Streaming\r\n", - b"\r\n"] - - for h in headers: - sock.write(h) - return sock - - @contextlib.contextmanager - def attached_socket(self, *args, **kwargs): - """Opens a raw socket in a ``with`` block to write data to Splunk. - - The arguments are identical to those for :meth:`attach`. The socket is - automatically closed at the end of the ``with`` block, even if an - exception is raised in the block. - - :param host: The host value for events written to the stream. - :type host: ``string`` - :param source: The source value for events written to the stream. - :type source: ``string`` - :param sourcetype: The sourcetype value for events written to the - stream. - :type sourcetype: ``string`` - - :returns: Nothing. - - **Example**:: - - import splunklib.client as client - s = client.connect(...) - index = s.indexes['some_index'] - with index.attached_socket(sourcetype='test') as sock: - sock.send('Test event\\r\\n') - - """ - try: - sock = self.attach(*args, **kwargs) - yield sock - finally: - sock.shutdown(socket.SHUT_RDWR) - sock.close() - - def clean(self, timeout=60): - """Deletes the contents of the index. - - This method blocks until the index is empty, because it needs to restore - values at the end of the operation. - - :param timeout: The time-out period for the operation, in seconds (the - default is 60). - :type timeout: ``integer`` - - :return: The :class:`Index`. - """ - self.refresh() - - tds = self['maxTotalDataSizeMB'] - ftp = self['frozenTimePeriodInSecs'] - was_disabled_initially = self.disabled - try: - if not was_disabled_initially and self.service.splunk_version < (5,): - # Need to disable the index first on Splunk 4.x, - # but it doesn't work to disable it on 5.0. - self.disable() - self.update(maxTotalDataSizeMB=1, frozenTimePeriodInSecs=1) - self.roll_hot_buckets() - - # Wait until event count goes to 0. - start = datetime.now() - diff = timedelta(seconds=timeout) - while self.content.totalEventCount != '0' and datetime.now() < start + diff: - sleep(1) - self.refresh() - - if self.content.totalEventCount != '0': - raise OperationError( - f"Cleaning index {self.name} took longer than {timeout} seconds; timing out.") - finally: - # Restore original values - self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) - if not was_disabled_initially and self.service.splunk_version < (5,): - # Re-enable the index if it was originally enabled and we messed with it. - self.enable() - - return self - - def roll_hot_buckets(self): - """Performs rolling hot buckets for this index. - - :return: The :class:`Index`. - """ - self.post("roll-hot-buckets") - return self - - def submit(self, event, host=None, source=None, sourcetype=None): - """Submits a single event to the index using ``HTTP POST``. - - :param event: The event to submit. - :type event: ``string`` - :param `host`: The host value of the event. - :type host: ``string`` - :param `source`: The source value of the event. - :type source: ``string`` - :param `sourcetype`: The sourcetype value of the event. - :type sourcetype: ``string`` - - :return: The :class:`Index`. - """ - args = {'index': self.name} - if host is not None: args['host'] = host - if source is not None: args['source'] = source - if sourcetype is not None: args['sourcetype'] = sourcetype - - self.service.post(PATH_RECEIVERS_SIMPLE, body=event, **args) - return self - - # kwargs: host, host_regex, host_segment, rename-source, sourcetype - def upload(self, filename, **kwargs): - """Uploads a file for immediate indexing. - - **Note**: The file must be locally accessible from the server. - - :param filename: The name of the file to upload. The file can be a - plain, compressed, or archived file. - :type filename: ``string`` - :param kwargs: Additional arguments (optional). For more about the - available parameters, see `Index parameters `_ on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The :class:`Index`. - """ - kwargs['index'] = self.name - path = 'data/inputs/oneshot' - self.service.post(path, name=filename, **kwargs) - return self - - -class Input(Entity): - """This class represents a Splunk input. This class is the base for all - typed input classes and is also used when the client does not recognize an - input kind. - """ - - def __init__(self, service, path, kind=None, **kwargs): - # kind can be omitted (in which case it is inferred from the path) - # Otherwise, valid values are the paths from data/inputs ("udp", - # "monitor", "tcp/raw"), or two special cases: "tcp" (which is "tcp/raw") - # and "splunktcp" (which is "tcp/cooked"). - Entity.__init__(self, service, path, **kwargs) - if kind is None: - path_segments = path.split('/') - i = path_segments.index('inputs') + 1 - if path_segments[i] == 'tcp': - self.kind = path_segments[i] + '/' + path_segments[i + 1] - else: - self.kind = path_segments[i] - else: - self.kind = kind - - # Handle old input kind names. - if self.kind == 'tcp': - self.kind = 'tcp/raw' - if self.kind == 'splunktcp': - self.kind = 'tcp/cooked' - - def update(self, **kwargs): - """Updates the server with any changes you've made to the current input - along with any additional arguments you specify. - - :param kwargs: Additional arguments (optional). For more about the - available parameters, see `Input parameters `_ on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The input this method was called on. - :rtype: class:`Input` - """ - # UDP and TCP inputs require special handling due to their restrictToHost - # field. For all other inputs kinds, we can dispatch to the superclass method. - if self.kind not in ['tcp', 'splunktcp', 'tcp/raw', 'tcp/cooked', 'udp']: - return super().update(**kwargs) - else: - # The behavior of restrictToHost is inconsistent across input kinds and versions of Splunk. - # In Splunk 4.x, the name of the entity is only the port, independent of the value of - # restrictToHost. In Splunk 5.0 this changed so the name will be of the form :. - # In 5.0 and 5.0.1, if you don't supply the restrictToHost value on every update, it will - # remove the host restriction from the input. As of 5.0.2 you simply can't change restrictToHost - # on an existing input. - - # The logic to handle all these cases: - # - Throw an exception if the user tries to set restrictToHost on an existing input - # for *any* version of Splunk. - # - Set the existing restrictToHost value on the update args internally so we don't - # cause it to change in Splunk 5.0 and 5.0.1. - to_update = kwargs.copy() - - if 'restrictToHost' in kwargs: - raise IllegalOperationException("Cannot set restrictToHost on an existing input with the SDK.") - if 'restrictToHost' in self._state.content and self.kind != 'udp': - to_update['restrictToHost'] = self._state.content['restrictToHost'] - - # Do the actual update operation. - return super().update(**to_update) - - -# Inputs is a "kinded" collection, which is a heterogenous collection where -# each item is tagged with a kind, that provides a single merged view of all -# input kinds. -class Inputs(Collection): - """This class represents a collection of inputs. The collection is - heterogeneous and each member of the collection contains a *kind* property - that indicates the specific type of input. - Retrieve this collection using :meth:`Service.inputs`.""" - - def __init__(self, service, kindmap=None): - Collection.__init__(self, service, PATH_INPUTS, item=Input) - - def __getitem__(self, key): - # The key needed to retrieve the input needs it's parenthesis to be URL encoded - # based on the REST API for input - # - if isinstance(key, tuple) and len(key) == 2: - # Fetch a single kind - key, kind = key - key = UrlEncoded(key, encode_slash=True) - try: - response = self.get(self.kindpath(kind) + "/" + key) - entries = self._load_list(response) - if len(entries) > 1: - raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") - if len(entries) == 0: - raise KeyError((key, kind)) - return entries[0] - except HTTPError as he: - if he.status == 404: # No entity matching kind and key - raise KeyError((key, kind)) - else: - raise - else: - # Iterate over all the kinds looking for matches. - kind = None - candidate = None - key = UrlEncoded(key, encode_slash=True) - for kind in self.kinds: - try: - response = self.get(kind + "/" + key) - entries = self._load_list(response) - if len(entries) > 1: - raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") - if len(entries) == 0: - pass - else: - if candidate is not None: # Already found at least one candidate - raise AmbiguousReferenceException( - f"Found multiple inputs named {key}, please specify a kind") - candidate = entries[0] - except HTTPError as he: - if he.status == 404: - pass # Just carry on to the next kind. - else: - raise - if candidate is None: - raise KeyError(key) # Never found a match. - return candidate - - def __contains__(self, key): - if isinstance(key, tuple) and len(key) == 2: - # If we specify a kind, this will shortcut properly - try: - self.__getitem__(key) - return True - except KeyError: - return False - else: - # Without a kind, we want to minimize the number of round trips to the server, so we - # reimplement some of the behavior of __getitem__ in order to be able to stop searching - # on the first hit. - for kind in self.kinds: - try: - response = self.get(self.kindpath(kind) + "/" + key) - entries = self._load_list(response) - if len(entries) > 0: - return True - except HTTPError as he: - if he.status == 404: - pass # Just carry on to the next kind. - else: - raise - return False - - def create(self, name, kind, **kwargs): - """Creates an input of a specific kind in this collection, with any - arguments you specify. - - :param `name`: The input name. - :type name: ``string`` - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - :param `kwargs`: Additional arguments (optional). For more about the - available parameters, see `Input parameters `_ on Splunk Developer Portal. - - :type kwargs: ``dict`` - - :return: The new :class:`Input`. - """ - kindpath = self.kindpath(kind) - self.post(kindpath, name=name, **kwargs) - - # If we created an input with restrictToHost set, then - # its path will be :, not just , - # and we have to adjust accordingly. - - # Url encodes the name of the entity. - name = UrlEncoded(name, encode_slash=True) - path = _path( - self.path + kindpath, - f"{kwargs['restrictToHost']}:{name}" if 'restrictToHost' in kwargs else name - ) - return Input(self.service, path, kind) - - def delete(self, name, kind=None): - """Removes an input from the collection. - - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - :param name: The name of the input to remove. - :type name: ``string`` - - :return: The :class:`Inputs` collection. - """ - if kind is None: - self.service.delete(self[name].path) - else: - self.service.delete(self[name, kind].path) - return self - - def itemmeta(self, kind): - """Returns metadata for the members of a given kind. - - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - - :return: The metadata. - :rtype: class:``splunklib.data.Record`` - """ - response = self.get(f"{self._kindmap[kind]}/_new") - content = _load_atom(response, MATCH_ENTRY_CONTENT) - return _parse_atom_metadata(content) - - def _get_kind_list(self, subpath=None): - if subpath is None: - subpath = [] - - kinds = [] - response = self.get('/'.join(subpath)) - content = _load_atom_entries(response) - for entry in content: - this_subpath = subpath + [entry.title] - # The "all" endpoint doesn't work yet. - # The "tcp/ssl" endpoint is not a real input collection. - if entry.title == 'all' or this_subpath == ['tcp', 'ssl']: - continue - if 'create' in [x.rel for x in entry.link]: - path = '/'.join(subpath + [entry.title]) - kinds.append(path) - else: - subkinds = self._get_kind_list(subpath + [entry.title]) - kinds.extend(subkinds) - return kinds - - @property - def kinds(self): - """Returns the input kinds on this Splunk instance. - - :return: The list of input kinds. - :rtype: ``list`` - """ - return self._get_kind_list() - - def kindpath(self, kind): - """Returns a path to the resources for a given input kind. - - :param `kind`: The kind of input: - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kind: ``string`` - - :return: The relative endpoint path. - :rtype: ``string`` - """ - if kind == 'tcp': - return UrlEncoded('tcp/raw', skip_encode=True) - if kind == 'splunktcp': - return UrlEncoded('tcp/cooked', skip_encode=True) - return UrlEncoded(kind, skip_encode=True) - - def list(self, *kinds, **kwargs): - """Returns a list of inputs that are in the :class:`Inputs` collection. - You can also filter by one or more input kinds. - - This function iterates over all possible inputs, regardless of any arguments you - specify. Because the :class:`Inputs` collection is the union of all the inputs of each - kind, this method implements parameters such as "count", "search", and so - on at the Python level once all the data has been fetched. The exception - is when you specify a single input kind, and then this method makes a single request - with the usual semantics for parameters. - - :param kinds: The input kinds to return (optional). - - - "ad": Active Directory - - - "monitor": Files and directories - - - "registry": Windows Registry - - - "script": Scripts - - - "splunktcp": TCP, processed - - - "tcp": TCP, unprocessed - - - "udp": UDP - - - "win-event-log-collections": Windows event log - - - "win-perfmon": Performance monitoring - - - "win-wmi-collections": WMI - - :type kinds: ``string`` - :param kwargs: Additional arguments (optional): - - - "count" (``integer``): The maximum number of items to return. - - - "offset" (``integer``): The offset of the first item to return. - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - - :return: A list of input kinds. - :rtype: ``list`` - """ - if len(kinds) == 0: - kinds = self.kinds - if len(kinds) == 1: - kind = kinds[0] - logger.debug("Inputs.list taking short circuit branch for single kind.") - path = self.kindpath(kind) - logger.debug("Path for inputs: %s", path) - try: - path = UrlEncoded(path, skip_encode=True) - response = self.get(path, **kwargs) - except HTTPError as he: - if he.status == 404: # No inputs of this kind - return [] - entities = [] - entries = _load_atom_entries(response) - if entries is None: - return [] # No inputs in a collection comes back with no feed or entry in the XML - for entry in entries: - state = _parse_atom_entry(entry) - # Unquote the URL, since all URL encoded in the SDK - # should be of type UrlEncoded, and all str should not - # be URL encoded. - path = parse.unquote(state.links.alternate) - entity = Input(self.service, path, kind, state=state) - entities.append(entity) - return entities - - search = kwargs.get('search', '*') - - entities = [] - for kind in kinds: - response = None - try: - kind = UrlEncoded(kind, skip_encode=True) - response = self.get(self.kindpath(kind), search=search) - except HTTPError as e: - if e.status == 404: - continue # No inputs of this kind - else: - raise - - entries = _load_atom_entries(response) - if entries is None: continue # No inputs to process - for entry in entries: - state = _parse_atom_entry(entry) - # Unquote the URL, since all URL encoded in the SDK - # should be of type UrlEncoded, and all str should not - # be URL encoded. - path = parse.unquote(state.links.alternate) - entity = Input(self.service, path, kind, state=state) - entities.append(entity) - if 'offset' in kwargs: - entities = entities[kwargs['offset']:] - if 'count' in kwargs: - entities = entities[:kwargs['count']] - if kwargs.get('sort_mode', None) == 'alpha': - sort_field = kwargs.get('sort_field', 'name') - if sort_field == 'name': - f = lambda x: x.name.lower() - else: - f = lambda x: x[sort_field].lower() - entities = sorted(entities, key=f) - if kwargs.get('sort_mode', None) == 'alpha_case': - sort_field = kwargs.get('sort_field', 'name') - if sort_field == 'name': - f = lambda x: x.name - else: - f = lambda x: x[sort_field] - entities = sorted(entities, key=f) - if kwargs.get('sort_dir', 'asc') == 'desc': - entities = list(reversed(entities)) - return entities - - def __iter__(self, **kwargs): - for item in self.iter(**kwargs): - yield item - - def iter(self, **kwargs): - """ Iterates over the collection of inputs. - - :param kwargs: Additional arguments (optional): - - - "count" (``integer``): The maximum number of items to return. - - - "offset" (``integer``): The offset of the first item to return. - - - "search" (``string``): The search query to filter responses. - - - "sort_dir" (``string``): The direction to sort returned items: - "asc" or "desc". - - - "sort_key" (``string``): The field to use for sorting (optional). - - - "sort_mode" (``string``): The collating sequence for sorting - returned items: "auto", "alpha", "alpha_case", or "num". - - :type kwargs: ``dict`` - """ - for item in self.list(**kwargs): - yield item - - def oneshot(self, path, **kwargs): - """ Creates a oneshot data input, which is an upload of a single file - for one-time indexing. - - :param path: The path and filename. - :type path: ``string`` - :param kwargs: Additional arguments (optional). For more about the - available parameters, see `Input parameters `_ on Splunk Developer Portal. - :type kwargs: ``dict`` - """ - self.post('oneshot', name=path, **kwargs) - - -class Job(Entity): - """This class represents a search job.""" - - def __init__(self, service, sid, **kwargs): - # Default to v2 in Splunk Version 9+ - path = "{path}{sid}" - # Formatting path based on the Splunk Version - if service.disable_v2_api: - path = path.format(path=PATH_JOBS, sid=sid) - else: - path = path.format(path=PATH_JOBS_V2, sid=sid) - - Entity.__init__(self, service, path, skip_refresh=True, **kwargs) - self.sid = sid - - # The Job entry record is returned at the root of the response - def _load_atom_entry(self, response): - return _load_atom(response).entry - - def cancel(self): - """Stops the current search and deletes the results cache. - - :return: The :class:`Job`. - """ - try: - self.post("control", action="cancel") - except HTTPError as he: - if he.status == 404: - # The job has already been cancelled, so - # cancelling it twice is a nop. - pass - else: - raise - return self - - def disable_preview(self): - """Disables preview for this job. - - :return: The :class:`Job`. - """ - self.post("control", action="disablepreview") - return self - - def enable_preview(self): - """Enables preview for this job. - - **Note**: Enabling preview might slow search considerably. - - :return: The :class:`Job`. - """ - self.post("control", action="enablepreview") - return self - - def events(self, **kwargs): - """Returns a streaming handle to this job's events. - - :param kwargs: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/events - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's events. - """ - kwargs['segmentation'] = kwargs.get('segmentation', 'none') - - # Search API v1(GET) and v2(POST) - if self.service.disable_v2_api: - return self.get("events", **kwargs).body - return self.post("events", **kwargs).body - - def finalize(self): - """Stops the job and provides intermediate results for retrieval. - - :return: The :class:`Job`. - """ - self.post("control", action="finalize") - return self - - def is_done(self): - """Indicates whether this job finished running. - - :return: ``True`` if the job is done, ``False`` if not. - :rtype: ``boolean`` - """ - if not self.is_ready(): - return False - done = (self._state.content['isDone'] == '1') - return done - - def is_ready(self): - """Indicates whether this job is ready for querying. - - :return: ``True`` if the job is ready, ``False`` if not. - :rtype: ``boolean`` - - """ - response = self.get() - if response.status == 204: - return False - self._state = self.read(response) - ready = self._state.content['dispatchState'] not in ['QUEUED', 'PARSING'] - return ready - - @property - def name(self): - """Returns the name of the search job, which is the search ID (SID). - - :return: The search ID. - :rtype: ``string`` - """ - return self.sid - - def pause(self): - """Suspends the current search. - - :return: The :class:`Job`. - """ - self.post("control", action="pause") - return self - - def results(self, **query_params): - """Returns a streaming handle to this job's search results. To get a nice, Pythonic iterator, pass the handle - to :class:`splunklib.results.JSONResultsReader` along with the query param "output_mode='json'", as in:: - - import splunklib.client as client - import splunklib.results as results - from time import sleep - service = client.connect(...) - job = service.jobs.create("search * | head 5") - while not job.is_done(): - sleep(.2) - rr = results.JSONResultsReader(job.results(output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - assert rr.is_preview == False - - Results are not available until the job has finished. If called on - an unfinished job, the result is an empty event set. - - This method makes a single roundtrip - to the server, plus at most two additional round trips if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param query_params: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/results - `_. - :type query_params: ``dict`` - - :return: The ``InputStream`` IO handle to this job's results. - """ - query_params['segmentation'] = query_params.get('segmentation', 'none') - - # Search API v1(GET) and v2(POST) - if self.service.disable_v2_api: - return self.get("results", **query_params).body - return self.post("results", **query_params).body - - def preview(self, **query_params): - """Returns a streaming handle to this job's preview search results. - - Unlike :class:`splunklib.results.JSONResultsReader`along with the query param "output_mode='json'", - which requires a job to be finished to return any results, the ``preview`` method returns any results that - have been generated so far, whether the job is running or not. The returned search results are the raw data - from the server. Pass the handle returned to :class:`splunklib.results.JSONResultsReader` to get a nice, - Pythonic iterator over objects, as in:: - - import splunklib.client as client - import splunklib.results as results - service = client.connect(...) - job = service.jobs.create("search * | head 5") - rr = results.JSONResultsReader(job.preview(output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - if rr.is_preview: - print("Preview of a running search job.") - else: - print("Job is finished. Results are final.") - - This method makes one roundtrip to the server, plus at most - two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param query_params: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/results_preview - `_ - in the REST API documentation. - :type query_params: ``dict`` - - :return: The ``InputStream`` IO handle to this job's preview results. - """ - query_params['segmentation'] = query_params.get('segmentation', 'none') - - # Search API v1(GET) and v2(POST) - if self.service.disable_v2_api: - return self.get("results_preview", **query_params).body - return self.post("results_preview", **query_params).body - - def searchlog(self, **kwargs): - """Returns a streaming handle to this job's search log. - - :param `kwargs`: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/search.log - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's search log. - """ - return self.get("search.log", **kwargs).body - - def set_priority(self, value): - """Sets this job's search priority in the range of 0-10. - - Higher numbers indicate higher priority. Unless splunkd is - running as *root*, you can only decrease the priority of a running job. - - :param `value`: The search priority. - :type value: ``integer`` - - :return: The :class:`Job`. - """ - self.post('control', action="setpriority", priority=value) - return self - - def summary(self, **kwargs): - """Returns a streaming handle to this job's summary. - - :param `kwargs`: Additional parameters (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/summary - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's summary. - """ - return self.get("summary", **kwargs).body - - def timeline(self, **kwargs): - """Returns a streaming handle to this job's timeline results. - - :param `kwargs`: Additional timeline arguments (optional). For a list of valid - parameters, see `GET search/jobs/{search_id}/timeline - `_ - in the REST API documentation. - :type kwargs: ``dict`` - - :return: The ``InputStream`` IO handle to this job's timeline. - """ - return self.get("timeline", **kwargs).body - - def touch(self): - """Extends the expiration time of the search to the current time (now) plus - the time-to-live (ttl) value. - - :return: The :class:`Job`. - """ - self.post("control", action="touch") - return self - - def set_ttl(self, value): - """Set the job's time-to-live (ttl) value, which is the time before the - search job expires and is still available. - - :param `value`: The ttl value, in seconds. - :type value: ``integer`` - - :return: The :class:`Job`. - """ - self.post("control", action="setttl", ttl=value) - return self - - def unpause(self): - """Resumes the current search, if paused. - - :return: The :class:`Job`. - """ - self.post("control", action="unpause") - return self - - -class Jobs(Collection): - """This class represents a collection of search jobs. Retrieve this - collection using :meth:`Service.jobs`.""" - - def __init__(self, service): - # Splunk 9 introduces the v2 endpoint - if not service.disable_v2_api: - path = PATH_JOBS_V2 - else: - path = PATH_JOBS - Collection.__init__(self, service, path, item=Job) - # The count value to say list all the contents of this - # Collection is 0, not -1 as it is on most. - self.null_count = 0 - - def _load_list(self, response): - # Overridden because Job takes a sid instead of a path. - entries = _load_atom_entries(response) - if entries is None: return [] - entities = [] - for entry in entries: - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - entry['content']['sid'], - state=state) - entities.append(entity) - return entities - - def create(self, query, **kwargs): - """ Creates a search using a search query and any additional parameters - you provide. - - :param query: The search query. - :type query: ``string`` - :param kwargs: Additiona parameters (optional). For a list of available - parameters, see `Search job parameters - `_ - on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The :class:`Job`. - """ - if kwargs.get("exec_mode", None) == "oneshot": - raise TypeError("Cannot specify exec_mode=oneshot; use the oneshot method instead.") - response = self.post(search=query, **kwargs) - sid = _load_sid(response, kwargs.get("output_mode", None)) - return Job(self.service, sid) - - def export(self, query, **params): - """Runs a search and immediately starts streaming preview events. This method returns a streaming handle to - this job's events as an XML document from the server. To parse this stream into usable Python objects, - pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param - "output_mode='json'":: - - import splunklib.client as client - import splunklib.results as results - service = client.connect(...) - rr = results.JSONResultsReader(service.jobs.export("search * | head 5",output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - assert rr.is_preview == False - - Running an export search is more efficient as it streams the results - directly to you, rather than having to write them out to disk and make - them available later. As soon as results are ready, you will receive - them. - - The ``export`` method makes a single roundtrip to the server (as opposed - to two for :meth:`create` followed by :meth:`preview`), plus at most two - more if the ``autologin`` field of :func:`connect` is set to ``True``. - - :raises `ValueError`: Raised for invalid queries. - :param query: The search query. - :type query: ``string`` - :param params: Additional arguments (optional). For a list of valid - parameters, see `GET search/jobs/export - `_ - in the REST API documentation. - :type params: ``dict`` - - :return: The ``InputStream`` IO handle to raw XML returned from the server. - """ - if "exec_mode" in params: - raise TypeError("Cannot specify an exec_mode to export.") - params['segmentation'] = params.get('segmentation', 'none') - return self.post(path_segment="export", - search=query, - **params).body - - def itemmeta(self): - """There is no metadata available for class:``Jobs``. - - Any call to this method raises a class:``NotSupportedError``. - - :raises: class:``NotSupportedError`` - """ - raise NotSupportedError() - - def oneshot(self, query, **params): - """Run a oneshot search and returns a streaming handle to the results. - - The ``InputStream`` object streams fragments from the server. To parse this stream into usable Python - objects, pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param - "output_mode='json'" :: - - import splunklib.client as client - import splunklib.results as results - service = client.connect(...) - rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5",output_mode='json')) - for result in rr: - if isinstance(result, results.Message): - # Diagnostic messages may be returned in the results - print(f'{result.type}: {result.message}') - elif isinstance(result, dict): - # Normal events are returned as dicts - print(result) - assert rr.is_preview == False - - The ``oneshot`` method makes a single roundtrip to the server (as opposed - to two for :meth:`create` followed by :meth:`results`), plus at most two more - if the ``autologin`` field of :func:`connect` is set to ``True``. - - :raises ValueError: Raised for invalid queries. - - :param query: The search query. - :type query: ``string`` - :param params: Additional arguments (optional): - - - "output_mode": Specifies the output format of the results (XML, - JSON, or CSV). - - - "earliest_time": Specifies the earliest time in the time range to - search. The time string can be a UTC time (with fractional seconds), - a relative time specifier (to now), or a formatted time string. - - - "latest_time": Specifies the latest time in the time range to - search. The time string can be a UTC time (with fractional seconds), - a relative time specifier (to now), or a formatted time string. - - - "rf": Specifies one or more fields to add to the search. - - :type params: ``dict`` - - :return: The ``InputStream`` IO handle to raw XML returned from the server. - """ - if "exec_mode" in params: - raise TypeError("Cannot specify an exec_mode to oneshot.") - params['segmentation'] = params.get('segmentation', 'none') - return self.post(search=query, - exec_mode="oneshot", - **params).body - - -class Loggers(Collection): - """This class represents a collection of service logging categories. - Retrieve this collection using :meth:`Service.loggers`.""" - - def __init__(self, service): - Collection.__init__(self, service, PATH_LOGGER) - - def itemmeta(self): - """There is no metadata available for class:``Loggers``. - - Any call to this method raises a class:``NotSupportedError``. - - :raises: class:``NotSupportedError`` - """ - raise NotSupportedError() - - -class Message(Entity): - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - @property - def value(self): - """Returns the message value. - - :return: The message value. - :rtype: ``string`` - """ - return self[self.name] - - -class ModularInputKind(Entity): - """This class contains the different types of modular inputs. Retrieve this - collection using :meth:`Service.modular_input_kinds`. - """ - - def __contains__(self, name): - args = self.state.content['endpoints']['args'] - if name in args: - return True - return Entity.__contains__(self, name) - - def __getitem__(self, name): - args = self.state.content['endpoint']['args'] - if name in args: - return args['item'] - return Entity.__getitem__(self, name) - - @property - def arguments(self): - """A dictionary of all the arguments supported by this modular input kind. - - The keys in the dictionary are the names of the arguments. The values are - another dictionary giving the metadata about that argument. The possible - keys in that dictionary are ``"title"``, ``"description"``, ``"required_on_create``", - ``"required_on_edit"``, ``"data_type"``. Each value is a string. It should be one - of ``"true"`` or ``"false"`` for ``"required_on_create"`` and ``"required_on_edit"``, - and one of ``"boolean"``, ``"string"``, or ``"number``" for ``"data_type"``. - - :return: A dictionary describing the arguments this modular input kind takes. - :rtype: ``dict`` - """ - return self.state.content['endpoint']['args'] - - def update(self, **kwargs): - """Raises an error. Modular input kinds are read only.""" - raise IllegalOperationException("Modular input kinds cannot be updated via the REST API.") - - -class SavedSearch(Entity): - """This class represents a saved search.""" - - def __init__(self, service, path, **kwargs): - Entity.__init__(self, service, path, **kwargs) - - def acknowledge(self): - """Acknowledges the suppression of alerts from this saved search and - resumes alerting. - - :return: The :class:`SavedSearch`. - """ - self.post("acknowledge") - return self - - @property - def alert_count(self): - """Returns the number of alerts fired by this saved search. - - :return: The number of alerts fired by this saved search. - :rtype: ``integer`` - """ - return int(self._state.content.get('triggered_alert_count', 0)) - - def dispatch(self, **kwargs): - """Runs the saved search and returns the resulting search job. - - :param `kwargs`: Additional dispatch arguments (optional). For details, - see the `POST saved/searches/{name}/dispatch - `_ - endpoint in the REST API documentation. - :type kwargs: ``dict`` - :return: The :class:`Job`. - """ - response = self.post("dispatch", **kwargs) - sid = _load_sid(response, kwargs.get("output_mode", None)) - return Job(self.service, sid) - - @property - def fired_alerts(self): - """Returns the collection of fired alerts (a fired alert group) - corresponding to this saved search's alerts. - - :raises IllegalOperationException: Raised when the search is not scheduled. - - :return: A collection of fired alerts. - :rtype: :class:`AlertGroup` - """ - if self['is_scheduled'] == '0': - raise IllegalOperationException('Unscheduled saved searches have no alerts.') - c = Collection( - self.service, - self.service._abspath(PATH_FIRED_ALERTS + self.name, - owner=self._state.access.owner, - app=self._state.access.app, - sharing=self._state.access.sharing), - item=AlertGroup) - return c - - def history(self, **kwargs): - """Returns a list of search jobs corresponding to this saved search. - - :param `kwargs`: Additional arguments (optional). - :type kwargs: ``dict`` - - :return: A list of :class:`Job` objects. - """ - response = self.get("history", **kwargs) - entries = _load_atom_entries(response) - if entries is None: return [] - jobs = [] - for entry in entries: - job = Job(self.service, entry.title) - jobs.append(job) - return jobs - - def update(self, search=None, **kwargs): - """Updates the server with any changes you've made to the current saved - search along with any additional arguments you specify. - - :param `search`: The search query (optional). - :type search: ``string`` - :param `kwargs`: Additional arguments (optional). For a list of available - parameters, see `Saved search parameters - `_ - on Splunk Developer Portal. - :type kwargs: ``dict`` - - :return: The :class:`SavedSearch`. - """ - # Updates to a saved search *require* that the search string be - # passed, so we pass the current search string if a value wasn't - # provided by the caller. - if search is None: search = self.content.search - Entity.update(self, search=search, **kwargs) - return self - - def scheduled_times(self, earliest_time='now', latest_time='+1h'): - """Returns the times when this search is scheduled to run. - - By default this method returns the times in the next hour. For different - time ranges, set *earliest_time* and *latest_time*. For example, - for all times in the last day use "earliest_time=-1d" and - "latest_time=now". - - :param earliest_time: The earliest time. - :type earliest_time: ``string`` - :param latest_time: The latest time. - :type latest_time: ``string`` - - :return: The list of search times. - """ - response = self.get("scheduled_times", - earliest_time=earliest_time, - latest_time=latest_time) - data = self._load_atom_entry(response) - rec = _parse_atom_entry(data) - times = [datetime.fromtimestamp(int(t)) - for t in rec.content.scheduled_times] - return times - - def suppress(self, expiration): - """Skips any scheduled runs of this search in the next *expiration* - number of seconds. - - :param expiration: The expiration period, in seconds. - :type expiration: ``integer`` - - :return: The :class:`SavedSearch`. - """ - self.post("suppress", expiration=expiration) - return self - - @property - def suppressed(self): - """Returns the number of seconds that this search is blocked from running - (possibly 0). - - :return: The number of seconds. - :rtype: ``integer`` - """ - r = self._run_action("suppress") - if r.suppressed == "1": - return int(r.expiration) - return 0 - - def unsuppress(self): - """Cancels suppression and makes this search run as scheduled. - - :return: The :class:`SavedSearch`. - """ - self.post("suppress", expiration="0") - return self - - -class SavedSearches(Collection): - """This class represents a collection of saved searches. Retrieve this - collection using :meth:`Service.saved_searches`.""" - - def __init__(self, service): - Collection.__init__( - self, service, PATH_SAVED_SEARCHES, item=SavedSearch) - - def create(self, name, search, **kwargs): - """ Creates a saved search. - - :param name: The name for the saved search. - :type name: ``string`` - :param search: The search query. - :type search: ``string`` - :param kwargs: Additional arguments (optional). For a list of available - parameters, see `Saved search parameters - `_ - on Splunk Developer Portal. - :type kwargs: ``dict`` - :return: The :class:`SavedSearches` collection. - """ - return Collection.create(self, name, search=search, **kwargs) - - -class Settings(Entity): - """This class represents configuration settings for a Splunk service. - Retrieve this collection using :meth:`Service.settings`.""" - - def __init__(self, service, **kwargs): - Entity.__init__(self, service, "/services/server/settings", **kwargs) - - # Updates on the settings endpoint are POSTed to server/settings/settings. - def update(self, **kwargs): - """Updates the settings on the server using the arguments you provide. - - :param kwargs: Additional arguments. For a list of valid arguments, see - `POST server/settings/{name} - `_ - in the REST API documentation. - :type kwargs: ``dict`` - :return: The :class:`Settings` collection. - """ - self.service.post("/services/server/settings/settings", **kwargs) - return self - - -class User(Entity): - """This class represents a Splunk user. - """ - - @property - def role_entities(self): - """Returns a list of roles assigned to this user. - - :return: The list of roles. - :rtype: ``list`` - """ - all_role_names = [r.name for r in self.service.roles.list()] - return [self.service.roles[name] for name in self.content.roles if name in all_role_names] - - -# Splunk automatically lowercases new user names so we need to match that -# behavior here to ensure that the subsequent member lookup works correctly. -class Users(Collection): - """This class represents the collection of Splunk users for this instance of - Splunk. Retrieve this collection using :meth:`Service.users`. - """ - - def __init__(self, service): - Collection.__init__(self, service, PATH_USERS, item=User) - - def __getitem__(self, key): - return Collection.__getitem__(self, key.lower()) - - def __contains__(self, name): - return Collection.__contains__(self, name.lower()) - - def create(self, username, password, roles, **params): - """Creates a new user. - - This function makes two roundtrips to the server, plus at most - two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param username: The username. - :type username: ``string`` - :param password: The password. - :type password: ``string`` - :param roles: A single role or list of roles for the user. - :type roles: ``string`` or ``list`` - :param params: Additional arguments (optional). For a list of available - parameters, see `User authentication parameters - `_ - on Splunk Developer Portal. - :type params: ``dict`` - - :return: The new user. - :rtype: :class:`User` - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - users = c.users - boris = users.create("boris", "securepassword", roles="user") - hilda = users.create("hilda", "anotherpassword", roles=["user","power"]) - """ - if not isinstance(username, str): - raise ValueError(f"Invalid username: {str(username)}") - username = username.lower() - self.post(name=username, password=password, roles=roles, **params) - # splunkd doesn't return the user in the POST response body, - # so we have to make a second round trip to fetch it. - response = self.get(username) - entry = _load_atom(response, XNAME_ENTRY).entry - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - parse.unquote(state.links.alternate), - state=state) - return entity - - def delete(self, name): - """ Deletes the user and returns the resulting collection of users. - - :param name: The name of the user to delete. - :type name: ``string`` - - :return: - :rtype: :class:`Users` - """ - return Collection.delete(self, name.lower()) - - -class Role(Entity): - """This class represents a user role. - """ - - def grant(self, *capabilities_to_grant): - """Grants additional capabilities to this role. - - :param capabilities_to_grant: Zero or more capabilities to grant this - role. For a list of capabilities, see - `Capabilities `_ - on Splunk Developer Portal. - :type capabilities_to_grant: ``string`` or ``list`` - :return: The :class:`Role`. - - **Example**:: - - service = client.connect(...) - role = service.roles['somerole'] - role.grant('change_own_password', 'search') - """ - possible_capabilities = self.service.capabilities - for capability in capabilities_to_grant: - if capability not in possible_capabilities: - raise NoSuchCapability(capability) - new_capabilities = self['capabilities'] + list(capabilities_to_grant) - self.post(capabilities=new_capabilities) - return self - - def revoke(self, *capabilities_to_revoke): - """Revokes zero or more capabilities from this role. - - :param capabilities_to_revoke: Zero or more capabilities to grant this - role. For a list of capabilities, see - `Capabilities `_ - on Splunk Developer Portal. - :type capabilities_to_revoke: ``string`` or ``list`` - - :return: The :class:`Role`. - - **Example**:: - - service = client.connect(...) - role = service.roles['somerole'] - role.revoke('change_own_password', 'search') - """ - possible_capabilities = self.service.capabilities - for capability in capabilities_to_revoke: - if capability not in possible_capabilities: - raise NoSuchCapability(capability) - old_capabilities = self['capabilities'] - new_capabilities = [] - for c in old_capabilities: - if c not in capabilities_to_revoke: - new_capabilities.append(c) - if not new_capabilities: - new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. - self.post(capabilities=new_capabilities) - return self - - -class Roles(Collection): - """This class represents the collection of roles in the Splunk instance. - Retrieve this collection using :meth:`Service.roles`.""" - - def __init__(self, service): - Collection.__init__(self, service, PATH_ROLES, item=Role) - - def __getitem__(self, key): - return Collection.__getitem__(self, key.lower()) - - def __contains__(self, name): - return Collection.__contains__(self, name.lower()) - - def create(self, name, **params): - """Creates a new role. - - This function makes two roundtrips to the server, plus at most - two more if - the ``autologin`` field of :func:`connect` is set to ``True``. - - :param name: Name for the role. - :type name: ``string`` - :param params: Additional arguments (optional). For a list of available - parameters, see `Roles parameters - `_ - on Splunk Developer Portal. - :type params: ``dict`` - - :return: The new role. - :rtype: :class:`Role` - - **Example**:: - - import splunklib.client as client - c = client.connect(...) - roles = c.roles - paltry = roles.create("paltry", imported_roles="user", defaultApp="search") - """ - if not isinstance(name, str): - raise ValueError(f"Invalid role name: {str(name)}") - name = name.lower() - self.post(name=name, **params) - # splunkd doesn't return the user in the POST response body, - # so we have to make a second round trip to fetch it. - response = self.get(name) - entry = _load_atom(response, XNAME_ENTRY).entry - state = _parse_atom_entry(entry) - entity = self.item( - self.service, - parse.unquote(state.links.alternate), - state=state) - return entity - - def delete(self, name): - """ Deletes the role and returns the resulting collection of roles. - - :param name: The name of the role to delete. - :type name: ``string`` - - :rtype: The :class:`Roles` - """ - return Collection.delete(self, name.lower()) - - -class Application(Entity): - """Represents a locally-installed Splunk app.""" - - @property - def setupInfo(self): - """Returns the setup information for the app. - - :return: The setup information. - """ - return self.content.get('eai:setup', None) - - def package(self): - """ Creates a compressed package of the app for archiving.""" - return self._run_action("package") - - def updateInfo(self): - """Returns any update information that is available for the app.""" - return self._run_action("update") - - -class KVStoreCollections(Collection): - def __init__(self, service): - Collection.__init__(self, service, 'storage/collections/config', item=KVStoreCollection) - - def __getitem__(self, item): - res = Collection.__getitem__(self, item) - for k, v in res.content.items(): - if "accelerated_fields" in k: - res.content[k] = json.loads(v) - return res - - def create(self, name, accelerated_fields={}, fields={}, **kwargs): - """Creates a KV Store Collection. - - :param name: name of collection to create - :type name: ``string`` - :param accelerated_fields: dictionary of accelerated_fields definitions - :type accelerated_fields: ``dict`` - :param fields: dictionary of field definitions - :type fields: ``dict`` - :param kwargs: a dictionary of additional parameters specifying indexes and field definitions - :type kwargs: ``dict`` - - :return: Result of POST request - """ - for k, v in accelerated_fields.items(): - if isinstance(v, dict): - v = json.dumps(v) - kwargs['accelerated_fields.' + k] = v - for k, v in fields.items(): - kwargs['field.' + k] = v - return self.post(name=name, **kwargs) - - -class KVStoreCollection(Entity): - @property - def data(self): - """Returns data object for this Collection. - - :rtype: :class:`KVStoreCollectionData` - """ - return KVStoreCollectionData(self) - - def update_accelerated_field(self, name, value): - """Changes the definition of a KV Store accelerated_field. - - :param name: name of accelerated_fields to change - :type name: ``string`` - :param value: new accelerated_fields definition - :type value: ``dict`` - - :return: Result of POST request - """ - kwargs = {} - kwargs['accelerated_fields.' + name] = json.dumps(value) if isinstance(value, dict) else value - return self.post(**kwargs) - - def update_field(self, name, value): - """Changes the definition of a KV Store field. - - :param name: name of field to change - :type name: ``string`` - :param value: new field definition - :type value: ``string`` - - :return: Result of POST request - """ - kwargs = {} - kwargs['field.' + name] = value - return self.post(**kwargs) - - -class KVStoreCollectionData: - """This class represents the data endpoint for a KVStoreCollection. - - Retrieve using :meth:`KVStoreCollection.data` - """ - JSON_HEADER = [('Content-Type', 'application/json')] - - def __init__(self, collection): - self.service = collection.service - self.collection = collection - self.owner, self.app, self.sharing = collection._proper_namespace() - self.path = 'storage/collections/data/' + UrlEncoded(self.collection.name, encode_slash=True) + '/' - - def _get(self, url, **kwargs): - return self.service.get(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) - - def _post(self, url, **kwargs): - return self.service.post(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) - - def _delete(self, url, **kwargs): - return self.service.delete(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) - - def query(self, **query): - """ - Gets the results of query, with optional parameters sort, limit, skip, and fields. - - :param query: Optional parameters. Valid options are sort, limit, skip, and fields - :type query: ``dict`` - - :return: Array of documents retrieved by query. - :rtype: ``array`` - """ - - for key, value in query.items(): - if isinstance(query[key], dict): - query[key] = json.dumps(value) - - return json.loads(self._get('', **query).body.read().decode('utf-8')) - - def query_by_id(self, id): - """ - Returns object with _id = id. - - :param id: Value for ID. If not a string will be coerced to string. - :type id: ``string`` - - :return: Document with id - :rtype: ``dict`` - """ - return json.loads(self._get(UrlEncoded(str(id), encode_slash=True)).body.read().decode('utf-8')) - - def insert(self, data): - """ - Inserts item into this collection. An _id field will be generated if not assigned in the data. - - :param data: Document to insert - :type data: ``string`` - - :return: _id of inserted object - :rtype: ``dict`` - """ - if isinstance(data, dict): - data = json.dumps(data) - return json.loads( - self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) - - def delete(self, query=None): - """ - Deletes all data in collection if query is absent. Otherwise, deletes all data matched by query. - - :param query: Query to select documents to delete - :type query: ``string`` - - :return: Result of DELETE request - """ - return self._delete('', **({'query': query}) if query else {}) - - def delete_by_id(self, id): - """ - Deletes document that has _id = id. - - :param id: id of document to delete - :type id: ``string`` - - :return: Result of DELETE request - """ - return self._delete(UrlEncoded(str(id), encode_slash=True)) - - def update(self, id, data): - """ - Replaces document with _id = id with data. - - :param id: _id of document to update - :type id: ``string`` - :param data: the new document to insert - :type data: ``string`` - - :return: id of replaced document - :rtype: ``dict`` - """ - if isinstance(data, dict): - data = json.dumps(data) - return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, - body=data).body.read().decode('utf-8')) - - def batch_find(self, *dbqueries): - """ - Returns array of results from queries dbqueries. - - :param dbqueries: Array of individual queries as dictionaries - :type dbqueries: ``array`` of ``dict`` - - :return: Results of each query - :rtype: ``array`` of ``array`` - """ - if len(dbqueries) < 1: - raise Exception('Must have at least one query.') - - data = json.dumps(dbqueries) - - return json.loads( - self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) - - def batch_save(self, *documents): - """ - Inserts or updates every document specified in documents. - - :param documents: Array of documents to save as dictionaries - :type documents: ``array`` of ``dict`` - - :return: Results of update operation as overall stats - :rtype: ``dict`` - """ - if len(documents) < 1: - raise Exception('Must have at least one document.') - - data = json.dumps(documents) - - return json.loads( - self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# The purpose of this module is to provide a friendlier domain interface to +# various Splunk endpoints. The approach here is to leverage the binding +# layer to capture endpoint context and provide objects and methods that +# offer simplified access their corresponding endpoints. The design avoids +# caching resource state. From the perspective of this module, the 'policy' +# for caching resource state belongs in the application or a higher level +# framework, and its the purpose of this module to provide simplified +# access to that resource state. +# +# A side note, the objects below that provide helper methods for updating eg: +# Entity state, are written so that they may be used in a fluent style. +# + +"""The **splunklib.client** module provides a Pythonic interface to the +`Splunk REST API `_, +allowing you programmatically access Splunk's resources. + +**splunklib.client** wraps a Pythonic layer around the wire-level +binding of the **splunklib.binding** module. The core of the library is the +:class:`Service` class, which encapsulates a connection to the server, and +provides access to the various aspects of Splunk's functionality, which are +exposed via the REST API. Typically you connect to a running Splunk instance +with the :func:`connect` function:: + + import splunklib.client as client + service = client.connect(host='localhost', port=8089, + username='admin', password='...') + assert isinstance(service, client.Service) + +:class:`Service` objects have fields for the various Splunk resources (such as apps, +jobs, saved searches, inputs, and indexes). All of these fields are +:class:`Collection` objects:: + + appcollection = service.apps + my_app = appcollection.create('my_app') + my_app = appcollection['my_app'] + appcollection.delete('my_app') + +The individual elements of the collection, in this case *applications*, +are subclasses of :class:`Entity`. An ``Entity`` object has fields for its +attributes, and methods that are specific to each kind of entity. For example:: + + print(my_app['author']) # Or: print(my_app.author) + my_app.package() # Creates a compressed package of this application +""" + +import contextlib +import datetime +import json +import logging +import re +import socket +from datetime import datetime, timedelta +from time import sleep +from urllib import parse + +from splunklib import data +from splunklib.data import record +from splunklib.binding import (AuthenticationError, Context, HTTPError, UrlEncoded, + _encode, _make_cookie_header, _NoAuthenticationToken, + namespace) + +logger = logging.getLogger(__name__) + +__all__ = [ + "connect", + "NotSupportedError", + "OperationError", + "IncomparableException", + "Service", + "namespace", + "AuthenticationError" +] + +PATH_APPS = "apps/local/" +PATH_CAPABILITIES = "authorization/capabilities/" +PATH_CONF = "configs/conf-%s/" +PATH_PROPERTIES = "properties/" +PATH_DEPLOYMENT_CLIENTS = "deployment/client/" +PATH_DEPLOYMENT_TENANTS = "deployment/tenants/" +PATH_DEPLOYMENT_SERVERS = "deployment/server/" +PATH_DEPLOYMENT_SERVERCLASSES = "deployment/serverclass/" +PATH_EVENT_TYPES = "saved/eventtypes/" +PATH_FIRED_ALERTS = "alerts/fired_alerts/" +PATH_INDEXES = "data/indexes/" +PATH_INPUTS = "data/inputs/" +PATH_JOBS = "search/jobs/" +PATH_JOBS_V2 = "search/v2/jobs/" +PATH_LOGGER = "/services/server/logger/" +PATH_MESSAGES = "messages/" +PATH_MODULAR_INPUTS = "data/modular-inputs" +PATH_ROLES = "authorization/roles/" +PATH_SAVED_SEARCHES = "saved/searches/" +PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) +PATH_USERS = "authentication/users/" +PATH_RECEIVERS_STREAM = "/services/receivers/stream" +PATH_RECEIVERS_SIMPLE = "/services/receivers/simple" +PATH_STORAGE_PASSWORDS = "storage/passwords" + +XNAMEF_ATOM = "{http://www.w3.org/2005/Atom}%s" +XNAME_ENTRY = XNAMEF_ATOM % "entry" +XNAME_CONTENT = XNAMEF_ATOM % "content" + +MATCH_ENTRY_CONTENT = f"{XNAME_ENTRY}/{XNAME_CONTENT}/*" + + +class IllegalOperationException(Exception): + """Thrown when an operation is not possible on the Splunk instance that a + :class:`Service` object is connected to.""" + + +class IncomparableException(Exception): + """Thrown when trying to compare objects (using ``==``, ``<``, ``>``, and + so on) of a type that doesn't support it.""" + + +class AmbiguousReferenceException(ValueError): + """Thrown when the name used to fetch an entity matches more than one entity.""" + + +class InvalidNameException(Exception): + """Thrown when the specified name contains characters that are not allowed + in Splunk entity names.""" + + +class NoSuchCapability(Exception): + """Thrown when the capability that has been referred to doesn't exist.""" + + +class OperationError(Exception): + """Raised for a failed operation, such as a timeout.""" + + +class NotSupportedError(Exception): + """Raised for operations that are not supported on a given object.""" + + +def _trailing(template, *targets): + """Substring of *template* following all *targets*. + + **Example**:: + + template = "this is a test of the bunnies." + _trailing(template, "is", "est", "the") == " bunnies" + + Each target is matched successively in the string, and the string + remaining after the last target is returned. If one of the targets + fails to match, a ValueError is raised. + + :param template: Template to extract a trailing string from. + :type template: ``string`` + :param targets: Strings to successively match in *template*. + :type targets: list of ``string``s + :return: Trailing string after all targets are matched. + :rtype: ``string`` + :raises ValueError: Raised when one of the targets does not match. + """ + s = template + for t in targets: + n = s.find(t) + if n == -1: + raise ValueError("Target " + t + " not found in template.") + s = s[n + len(t):] + return s + + +# Filter the given state content record according to the given arg list. +def _filter_content(content, *args): + if len(args) > 0: + return record((k, content[k]) for k in args) + return record((k, v) for k, v in content.items() + if k not in ['eai:acl', 'eai:attributes', 'type']) + + +# Construct a resource path from the given base path + resource name +def _path(base, name): + if not base.endswith('/'): base = base + '/' + return base + name + + +# Load an atom record from the body of the given response +# this will ultimately be sent to an xml ElementTree so we +# should use the xmlcharrefreplace option +def _load_atom(response, match=None): + return data.load(response.body.read() + .decode('utf-8', 'xmlcharrefreplace'), match) + + +# Load an array of atom entries from the body of the given response +def _load_atom_entries(response): + r = _load_atom(response) + if 'feed' in r: + # Need this to handle a random case in the REST API + if r.feed.get('totalResults') in [0, '0']: + return [] + entries = r.feed.get('entry', None) + if entries is None: return None + return entries if isinstance(entries, list) else [entries] + # Unlike most other endpoints, the jobs endpoint does not return + # its state wrapped in another element, but at the top level. + # For example, in XML, it returns ... instead of + # .... + entries = r.get('entry', None) + if entries is None: return None + return entries if isinstance(entries, list) else [entries] + + +# Load the sid from the body of the given response +def _load_sid(response, output_mode): + if output_mode == "json": + json_obj = json.loads(response.body.read()) + return json_obj.get('sid') + return _load_atom(response).response.sid + + +# Parse the given atom entry record into a generic entity state record +def _parse_atom_entry(entry): + title = entry.get('title', None) + + elink = entry.get('link', []) + elink = elink if isinstance(elink, list) else [elink] + links = record((link.rel, link.href) for link in elink) + + # Retrieve entity content values + content = entry.get('content', {}) + + # Host entry metadata + metadata = _parse_atom_metadata(content) + + # Filter some of the noise out of the content record + content = record((k, v) for k, v in content.items() + if k not in ['eai:acl', 'eai:attributes']) + + if 'type' in content: + if isinstance(content['type'], list): + content['type'] = [t for t in content['type'] if t != 'text/xml'] + # Unset type if it was only 'text/xml' + if len(content['type']) == 0: + content.pop('type', None) + # Flatten 1 element list + if len(content['type']) == 1: + content['type'] = content['type'][0] + else: + content.pop('type', None) + + return record({ + 'title': title, + 'links': links, + 'access': metadata.access, + 'fields': metadata.fields, + 'content': content, + 'updated': entry.get("updated") + }) + + +# Parse the metadata fields out of the given atom entry content record +def _parse_atom_metadata(content): + # Hoist access metadata + access = content.get('eai:acl', None) + + # Hoist content metadata (and cleanup some naming) + attributes = content.get('eai:attributes', {}) + fields = record({ + 'required': attributes.get('requiredFields', []), + 'optional': attributes.get('optionalFields', []), + 'wildcard': attributes.get('wildcardFields', [])}) + + return record({'access': access, 'fields': fields}) + + +# kwargs: scheme, host, port, app, owner, username, password +def connect(**kwargs): + """This function connects and logs in to a Splunk instance. + + This function is a shorthand for :meth:`Service.login`. + The ``connect`` function makes one round trip to the server (for logging in). + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verification for + https connections. (optional, the default is True) + :type verify: ``Boolean`` + :param `owner`: The owner context of the namespace (optional). + :type owner: ``string`` + :param `app`: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param `token`: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param autologin: When ``True``, automatically tries to log in again if the + session terminates. + :type autologin: ``boolean`` + :param `username`: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param `password`: The password for the Splunk account. + :type password: ``string`` + :param retires: Number of retries for each HTTP connection (optional, the default is 0). + NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. + :type retries: ``int`` + :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). + :type retryDelay: ``int`` (in seconds) + :param `context`: The SSLContext that can be used when setting verify=True (optional) + :type context: ``SSLContext`` + :return: An initialized :class:`Service` connection. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + a = s.apps["my_app"] + ... + """ + s = Service(**kwargs) + s.login() + return s + + +# In preparation for adding Storm support, we added an +# intermediary class between Service and Context. Storm's +# API is not going to be the same as enterprise Splunk's +# API, so we will derive both Service (for enterprise Splunk) +# and StormService for (Splunk Storm) from _BaseService, and +# put any shared behavior on it. +class _BaseService(Context): + pass + + +class Service(_BaseService): + """A Pythonic binding to Splunk instances. + + A :class:`Service` represents a binding to a Splunk instance on an + HTTP or HTTPS port. It handles the details of authentication, wire + formats, and wraps the REST API endpoints into something more + Pythonic. All of the low-level operations on the instance from + :class:`splunklib.binding.Context` are also available in case you need + to do something beyond what is provided by this class. + + After creating a ``Service`` object, you must call its :meth:`login` + method before you can issue requests to Splunk. + Alternately, use the :func:`connect` function to create an already + authenticated :class:`Service` object, or provide a session token + when creating the :class:`Service` object explicitly (the same + token may be shared by multiple :class:`Service` objects). + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verification for + https connections. (optional, the default is True) + :type verify: ``Boolean`` + :param `owner`: The owner context of the namespace (optional; use "-" for wildcard). + :type owner: ``string`` + :param `app`: The app context of the namespace (optional; use "-" for wildcard). + :type app: ``string`` + :param `token`: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param `username`: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param `password`: The password, which is used to authenticate the Splunk + instance. + :type password: ``string`` + :param retires: Number of retries for each HTTP connection (optional, the default is 0). + NOTE THAT THIS MAY INCREASE THE NUMBER OF ROUND TRIP CONNECTIONS TO THE SPLUNK SERVER. + :type retries: ``int`` + :param retryDelay: How long to wait between connection attempts if `retries` > 0 (optional, defaults to 10s). + :type retryDelay: ``int`` (in seconds) + :return: A :class:`Service` instance. + + **Example**:: + + import splunklib.client as client + s = client.Service(username="boris", password="natasha", ...) + s.login() + # Or equivalently + s = client.connect(username="boris", password="natasha") + # Or if you already have a session token + s = client.Service(token="atg232342aa34324a") + # Or if you already have a valid cookie + s = client.Service(cookie="splunkd_8089=...") + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._splunk_version = None + self._kvstore_owner = None + self._instance_type = None + + @property + def apps(self): + """Returns the collection of applications that are installed on this instance of Splunk. + + :return: A :class:`Collection` of :class:`Application` entities. + """ + return Collection(self, PATH_APPS, item=Application) + + @property + def confs(self): + """Returns the collection of configuration files for this Splunk instance. + + :return: A :class:`Configurations` collection of + :class:`ConfigurationFile` entities. + """ + return Configurations(self) + + @property + def capabilities(self): + """Returns the list of system capabilities. + + :return: A ``list`` of capabilities. + """ + response = self.get(PATH_CAPABILITIES) + return _load_atom(response, MATCH_ENTRY_CONTENT).capabilities + + @property + def event_types(self): + """Returns the collection of event types defined in this Splunk instance. + + :return: An :class:`Entity` containing the event types. + """ + return Collection(self, PATH_EVENT_TYPES) + + @property + def fired_alerts(self): + """Returns the collection of alerts that have been fired on the Splunk + instance, grouped by saved search. + + :return: A :class:`Collection` of :class:`AlertGroup` entities. + """ + return Collection(self, PATH_FIRED_ALERTS, item=AlertGroup) + + @property + def indexes(self): + """Returns the collection of indexes for this Splunk instance. + + :return: An :class:`Indexes` collection of :class:`Index` entities. + """ + return Indexes(self, PATH_INDEXES, item=Index) + + @property + def info(self): + """Returns the information about this instance of Splunk. + + :return: The system information, as key-value pairs. + :rtype: ``dict`` + """ + response = self.get("/services/server/info") + return _filter_content(_load_atom(response, MATCH_ENTRY_CONTENT)) + + def input(self, path, kind=None): + """Retrieves an input by path, and optionally kind. + + :return: A :class:`Input` object. + """ + return Input(self, path, kind=kind).refresh() + + @property + def inputs(self): + """Returns the collection of inputs configured on this Splunk instance. + + :return: An :class:`Inputs` collection of :class:`Input` entities. + """ + return Inputs(self) + + def job(self, sid): + """Retrieves a search job by sid. + + :return: A :class:`Job` object. + """ + return Job(self, sid).refresh() + + @property + def jobs(self): + """Returns the collection of current search jobs. + + :return: A :class:`Jobs` collection of :class:`Job` entities. + """ + return Jobs(self) + + @property + def loggers(self): + """Returns the collection of logging level categories and their status. + + :return: A :class:`Loggers` collection of logging levels. + """ + return Loggers(self) + + @property + def messages(self): + """Returns the collection of service messages. + + :return: A :class:`Collection` of :class:`Message` entities. + """ + return Collection(self, PATH_MESSAGES, item=Message) + + @property + def modular_input_kinds(self): + """Returns the collection of the modular input kinds on this Splunk instance. + + :return: A :class:`ReadOnlyCollection` of :class:`ModularInputKind` entities. + """ + if self.splunk_version >= (5,): + return ReadOnlyCollection(self, PATH_MODULAR_INPUTS, item=ModularInputKind) + raise IllegalOperationException("Modular inputs are not supported before Splunk version 5.") + + @property + def storage_passwords(self): + """Returns the collection of the storage passwords on this Splunk instance. + + :return: A :class:`ReadOnlyCollection` of :class:`StoragePasswords` entities. + """ + return StoragePasswords(self) + + # kwargs: enable_lookups, reload_macros, parse_only, output_mode + def parse(self, query, **kwargs): + """Parses a search query and returns a semantic map of the search. + + :param query: The search query to parse. + :type query: ``string`` + :param kwargs: Arguments to pass to the ``search/parser`` endpoint + (optional). Valid arguments are: + + * "enable_lookups" (``boolean``): If ``True``, performs reverse lookups + to expand the search expression. + + * "output_mode" (``string``): The output format (XML or JSON). + + * "parse_only" (``boolean``): If ``True``, disables the expansion of + search due to evaluation of subsearches, time term expansion, + lookups, tags, eventtypes, and sourcetype alias. + + * "reload_macros" (``boolean``): If ``True``, reloads macro + definitions from macros.conf. + + :type kwargs: ``dict`` + :return: A semantic map of the parsed search query. + """ + if not self.disable_v2_api: + return self.post("search/v2/parser", q=query, **kwargs) + return self.get("search/parser", q=query, **kwargs) + + def restart(self, timeout=None): + """Restarts this Splunk instance. + + The service is unavailable until it has successfully restarted. + + If a *timeout* value is specified, ``restart`` blocks until the service + resumes or the timeout period has been exceeded. Otherwise, ``restart`` returns + immediately. + + :param timeout: A timeout period, in seconds. + :type timeout: ``integer`` + """ + msg = {"value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} + # This message will be deleted once the server actually restarts. + self.messages.create(name="restart_required", **msg) + result = self.post("/services/server/control/restart") + if timeout is None: + return result + start = datetime.now() + diff = timedelta(seconds=timeout) + while datetime.now() - start < diff: + try: + self.login() + if not self.restart_required: + return result + except Exception as e: + sleep(1) + raise Exception("Operation time out.") + + @property + def restart_required(self): + """Indicates whether splunkd is in a state that requires a restart. + + :return: A ``boolean`` that indicates whether a restart is required. + + """ + response = self.get("messages").body.read() + messages = data.load(response)['feed'] + if 'entry' not in messages: + result = False + else: + if isinstance(messages['entry'], dict): + titles = [messages['entry']['title']] + else: + titles = [x['title'] for x in messages['entry']] + result = 'restart_required' in titles + return result + + @property + def roles(self): + """Returns the collection of user roles. + + :return: A :class:`Roles` collection of :class:`Role` entities. + """ + return Roles(self) + + def search(self, query, **kwargs): + """Runs a search using a search query and any optional arguments you + provide, and returns a `Job` object representing the search. + + :param query: A search query. + :type query: ``string`` + :param kwargs: Arguments for the search (optional): + + * "output_mode" (``string``): Specifies the output format of the + results. + + * "earliest_time" (``string``): Specifies the earliest time in the + time range to + search. The time string can be a UTC time (with fractional + seconds), a relative time specifier (to now), or a formatted + time string. + + * "latest_time" (``string``): Specifies the latest time in the time + range to + search. The time string can be a UTC time (with fractional + seconds), a relative time specifier (to now), or a formatted + time string. + + * "rf" (``string``): Specifies one or more fields to add to the + search. + + :type kwargs: ``dict`` + :rtype: class:`Job` + :returns: An object representing the created job. + """ + return self.jobs.create(query, **kwargs) + + @property + def saved_searches(self): + """Returns the collection of saved searches. + + :return: A :class:`SavedSearches` collection of :class:`SavedSearch` + entities. + """ + return SavedSearches(self) + + @property + def settings(self): + """Returns the configuration settings for this instance of Splunk. + + :return: A :class:`Settings` object containing configuration settings. + """ + return Settings(self) + + @property + def splunk_version(self): + """Returns the version of the splunkd instance this object is attached + to. + + The version is returned as a tuple of the version components as + integers (for example, `(4,3,3)` or `(5,)`). + + :return: A ``tuple`` of ``integers``. + """ + if self._splunk_version is None: + self._splunk_version = tuple(int(p) for p in self.info['version'].split('.')) + return self._splunk_version + + @property + def splunk_instance(self): + if self._instance_type is None : + splunk_info = self.info + if hasattr(splunk_info, 'instance_type') : + self._instance_type = splunk_info['instance_type'] + else: + self._instance_type = '' + return self._instance_type + + @property + def disable_v2_api(self): + if self.splunk_instance.lower() == 'cloud': + return self.splunk_version < (9,0,2209) + return self.splunk_version < (9,0,2) + + @property + def kvstore_owner(self): + """Returns the KVStore owner for this instance of Splunk. + + By default is the kvstore owner is not set, it will return "nobody" + :return: A string with the KVStore owner. + """ + if self._kvstore_owner is None: + self._kvstore_owner = "nobody" + return self._kvstore_owner + + @kvstore_owner.setter + def kvstore_owner(self, value): + """ + kvstore is refreshed, when the owner value is changed + """ + self._kvstore_owner = value + self.kvstore + + @property + def kvstore(self): + """Returns the collection of KV Store collections. + + sets the owner for the namespace, before retrieving the KVStore Collection + + :return: A :class:`KVStoreCollections` collection of :class:`KVStoreCollection` entities. + """ + self.namespace['owner'] = self.kvstore_owner + return KVStoreCollections(self) + + @property + def users(self): + """Returns the collection of users. + + :return: A :class:`Users` collection of :class:`User` entities. + """ + return Users(self) + + +class Endpoint: + """This class represents individual Splunk resources in the Splunk REST API. + + An ``Endpoint`` object represents a URI, such as ``/services/saved/searches``. + This class provides the common functionality of :class:`Collection` and + :class:`Entity` (essentially HTTP GET and POST methods). + """ + + def __init__(self, service, path): + self.service = service + self.path = path + + def get_api_version(self, path): + """Return the API version of the service used in the provided path. + + Args: + path (str): A fully-qualified endpoint path (for example, "/services/search/jobs"). + + Returns: + int: Version of the API (for example, 1) + """ + # Default to v1 if undefined in the path + # For example, "/services/search/jobs" is using API v1 + api_version = 1 + + versionSearch = re.search('(?:servicesNS\/[^/]+\/[^/]+|services)\/[^/]+\/v(\d+)\/', path) + if versionSearch: + api_version = int(versionSearch.group(1)) + + return api_version + + def get(self, path_segment="", owner=None, app=None, sharing=None, **query): + """Performs a GET operation on the path segment relative to this endpoint. + + This method is named to match the HTTP method. This method makes at least + one roundtrip to the server, one additional round trip for + each 303 status returned, plus at most two additional round + trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (optional). + :type sharing: "global", "system", "app", or "user" + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + apps = s.apps + apps.get() == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + apps.get('nonexistant/path') # raises HTTPError + s.logout() + apps.get() # raises AuthenticationError + """ + # self.path to the Endpoint is relative in the SDK, so passing + # owner, app, sharing, etc. along will produce the correct + # namespace in the final request. + if path_segment.startswith('/'): + path = path_segment + else: + if not self.path.endswith('/') and path_segment != "": + self.path = self.path + '/' + path = self.service._abspath(self.path + path_segment, owner=owner, + app=app, sharing=sharing) + # ^-- This was "%s%s" % (self.path, path_segment). + # That doesn't work, because self.path may be UrlEncoded. + + # Get the API version from the path + api_version = self.get_api_version(path) + + # Search API v2+ fallback to v1: + # - In v2+, /results_preview, /events and /results do not support search params. + # - Fallback from v2+ to v1 if Splunk Version is < 9. + # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): + # path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + if api_version == 1: + if isinstance(path, UrlEncoded): + path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) + else: + path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + return self.service.get(path, + owner=owner, app=app, sharing=sharing, + **query) + + def post(self, path_segment="", owner=None, app=None, sharing=None, **query): + """Performs a POST operation on the path segment relative to this endpoint. + + This method is named to match the HTTP method. This method makes at least + one roundtrip to the server, one additional round trip for + each 303 status returned, plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + apps = s.apps + apps.post(name='boris') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '2908'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 18:34:50 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'Created', + 'status': 201} + apps.get('nonexistant/path') # raises HTTPError + s.logout() + apps.get() # raises AuthenticationError + """ + if path_segment.startswith('/'): + path = path_segment + else: + if not self.path.endswith('/') and path_segment != "": + self.path = self.path + '/' + path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) + + # Get the API version from the path + api_version = self.get_api_version(path) + + # Search API v2+ fallback to v1: + # - In v2+, /results_preview, /events and /results do not support search params. + # - Fallback from v2+ to v1 if Splunk Version is < 9. + # if api_version >= 2 and ('search' in query and path.endswith(tuple(["results_preview", "events", "results"])) or self.service.splunk_version < (9,)): + # path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + if api_version == 1: + if isinstance(path, UrlEncoded): + path = UrlEncoded(path.replace(PATH_JOBS_V2, PATH_JOBS), skip_encode=True) + else: + path = path.replace(PATH_JOBS_V2, PATH_JOBS) + + return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) + + +# kwargs: path, app, owner, sharing, state +class Entity(Endpoint): + """This class is a base class for Splunk entities in the REST API, such as + saved searches, jobs, indexes, and inputs. + + ``Entity`` provides the majority of functionality required by entities. + Subclasses only implement the special cases for individual entities. + For example for saved searches, the subclass makes fields like ``action.email``, + ``alert_type``, and ``search`` available. + + An ``Entity`` is addressed like a dictionary, with a few extensions, + so the following all work, for example in saved searches:: + + ent['action.email'] + ent['alert_type'] + ent['search'] + + You can also access the fields as though they were the fields of a Python + object, as in:: + + ent.alert_type + ent.search + + However, because some of the field names are not valid Python identifiers, + the dictionary-like syntax is preferable. + + The state of an :class:`Entity` object is cached, so accessing a field + does not contact the server. If you think the values on the + server have changed, call the :meth:`Entity.refresh` method. + """ + # Not every endpoint in the API is an Entity or a Collection. For + # example, a saved search at saved/searches/{name} has an additional + # method saved/searches/{name}/scheduled_times, but this isn't an + # entity in its own right. In these cases, subclasses should + # implement a method that uses the get and post methods inherited + # from Endpoint, calls the _load_atom function (it's elsewhere in + # client.py, but not a method of any object) to read the + # information, and returns the extracted data in a Pythonesque form. + # + # The primary use of subclasses of Entity is to handle specially + # named fields in the Entity. If you only need to provide a default + # value for an optional field, subclass Entity and define a + # dictionary ``defaults``. For instance,:: + # + # class Hypothetical(Entity): + # defaults = {'anOptionalField': 'foo', + # 'anotherField': 'bar'} + # + # If you have to do more than provide a default, such as rename or + # actually process values, then define a new method with the + # ``@property`` decorator. + # + # class Hypothetical(Entity): + # @property + # def foobar(self): + # return self.content['foo'] + "-" + self.content["bar"] + + # Subclasses can override defaults the default values for + # optional fields. See above. + defaults = {} + + def __init__(self, service, path, **kwargs): + Endpoint.__init__(self, service, path) + self._state = None + if not kwargs.get('skip_refresh', False): + self.refresh(kwargs.get('state', None)) # "Prefresh" + + def __contains__(self, item): + try: + self[item] + return True + except (KeyError, AttributeError): + return False + + def __eq__(self, other): + """Raises IncomparableException. + + Since Entity objects are snapshots of times on the server, no + simple definition of equality will suffice beyond instance + equality, and instance equality leads to strange situations + such as:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + x = saved_searches['asearch'] + + but then ``x != saved_searches['asearch']``. + + whether or not there was a change on the server. Rather than + try to do something fancy, we simply declare that equality is + undefined for Entities. + + Makes no roundtrips to the server. + """ + raise IncomparableException(f"Equality is undefined for objects of class {self.__class__.__name__}") + + def __getattr__(self, key): + # Called when an attribute was not found by the normal method. In this + # case we try to find it in self.content and then self.defaults. + if key in self.state.content: + return self.state.content[key] + if key in self.defaults: + return self.defaults[key] + raise AttributeError(key) + + def __getitem__(self, key): + # getattr attempts to find a field on the object in the normal way, + # then calls __getattr__ if it cannot. + return getattr(self, key) + + # Load the Atom entry record from the given response - this is a method + # because the "entry" record varies slightly by entity and this allows + # for a subclass to override and handle any special cases. + def _load_atom_entry(self, response): + elem = _load_atom(response, XNAME_ENTRY) + if isinstance(elem, list): + apps = [ele.entry.content.get('eai:appName') for ele in elem] + + raise AmbiguousReferenceException( + f"Fetch from server returned multiple entries for name '{elem[0].entry.title}' in apps {apps}.") + return elem.entry + + # Load the entity state record from the given response + def _load_state(self, response): + entry = self._load_atom_entry(response) + return _parse_atom_entry(entry) + + def _run_action(self, path_segment, **kwargs): + """Run a method and return the content Record from the returned XML. + + A method is a relative path from an Entity that is not itself + an Entity. _run_action assumes that the returned XML is an + Atom field containing one Entry, and the contents of Entry is + what should be the return value. This is right in enough cases + to make this method useful. + """ + response = self.get(path_segment, **kwargs) + data = self._load_atom_entry(response) + rec = _parse_atom_entry(data) + return rec.content + + def _proper_namespace(self, owner=None, app=None, sharing=None): + """Produce a namespace sans wildcards for use in entity requests. + + This method tries to fill in the fields of the namespace which are `None` + or wildcard (`'-'`) from the entity's namespace. If that fails, it uses + the service's namespace. + + :param owner: + :param app: + :param sharing: + :return: + """ + if owner is None and app is None and sharing is None: # No namespace provided + if self._state is not None and 'access' in self._state: + return (self._state.access.owner, + self._state.access.app, + self._state.access.sharing) + return (self.service.namespace['owner'], + self.service.namespace['app'], + self.service.namespace['sharing']) + return owner, app, sharing + + def delete(self): + owner, app, sharing = self._proper_namespace() + return self.service.delete(self.path, owner=owner, app=app, sharing=sharing) + + def get(self, path_segment="", owner=None, app=None, sharing=None, **query): + owner, app, sharing = self._proper_namespace(owner, app, sharing) + return super().get(path_segment, owner=owner, app=app, sharing=sharing, **query) + + def post(self, path_segment="", owner=None, app=None, sharing=None, **query): + owner, app, sharing = self._proper_namespace(owner, app, sharing) + return super().post(path_segment, owner=owner, app=app, sharing=sharing, **query) + + def refresh(self, state=None): + """Refreshes the state of this entity. + + If *state* is provided, load it as the new state for this + entity. Otherwise, make a roundtrip to the server (by calling + the :meth:`read` method of ``self``) to fetch an updated state, + plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param state: Entity-specific arguments (optional). + :type state: ``dict`` + :raises EntityDeletedException: Raised if the entity no longer exists on + the server. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + search = s.apps['search'] + search.refresh() + """ + if state is not None: + self._state = state + else: + self._state = self.read(self.get()) + return self + + @property + def access(self): + """Returns the access metadata for this entity. + + :return: A :class:`splunklib.data.Record` object with three keys: + ``owner``, ``app``, and ``sharing``. + """ + return self.state.access + + @property + def content(self): + """Returns the contents of the entity. + + :return: A ``dict`` containing values. + """ + return self.state.content + + def disable(self): + """Disables the entity at this endpoint.""" + self.post("disable") + return self + + def enable(self): + """Enables the entity at this endpoint.""" + self.post("enable") + return self + + @property + def fields(self): + """Returns the content metadata for this entity. + + :return: A :class:`splunklib.data.Record` object with three keys: + ``required``, ``optional``, and ``wildcard``. + """ + return self.state.fields + + @property + def links(self): + """Returns a dictionary of related resources. + + :return: A ``dict`` with keys and corresponding URLs. + """ + return self.state.links + + @property + def name(self): + """Returns the entity name. + + :return: The entity name. + :rtype: ``string`` + """ + return self.state.title + + def read(self, response): + """ Reads the current state of the entity from the server. """ + results = self._load_state(response) + # In lower layers of the SDK, we end up trying to URL encode + # text to be dispatched via HTTP. However, these links are already + # URL encoded when they arrive, and we need to mark them as such. + unquoted_links = dict((k, UrlEncoded(v, skip_encode=True)) + for k, v in results['links'].items()) + results['links'] = unquoted_links + return results + + def reload(self): + """Reloads the entity.""" + self.post("_reload") + return self + + def acl_update(self, **kwargs): + """To update Access Control List (ACL) properties for an endpoint. + + :param kwargs: Additional entity-specific arguments (required). + + - "owner" (``string``): The Splunk username, such as "admin". A value of "nobody" means no specific user (required). + + - "sharing" (``string``): A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system" (required). + + :type kwargs: ``dict`` + + **Example**:: + + import splunklib.client as client + service = client.connect(...) + saved_search = service.saved_searches["name"] + saved_search.acl_update(sharing="app", owner="nobody", app="search", **{"perms.read": "admin, nobody"}) + """ + if "body" not in kwargs: + kwargs = {"body": kwargs} + + if "sharing" not in kwargs["body"]: + raise ValueError("Required argument 'sharing' is missing.") + if "owner" not in kwargs["body"]: + raise ValueError("Required argument 'owner' is missing.") + + self.post("acl", **kwargs) + self.refresh() + return self + + @property + def state(self): + """Returns the entity's state record. + + :return: A ``dict`` containing fields and metadata for the entity. + """ + if self._state is None: self.refresh() + return self._state + + def update(self, **kwargs): + """Updates the server with any changes you've made to the current entity + along with any additional arguments you specify. + + **Note**: You cannot update the ``name`` field of an entity. + + Many of the fields in the REST API are not valid Python + identifiers, which means you cannot pass them as keyword + arguments. That is, Python will fail to parse the following:: + + # This fails + x.update(check-new=False, email.to='boris@utopia.net') + + However, you can always explicitly use a dictionary to pass + such keys:: + + # This works + x.update(**{'check-new': False, 'email.to': 'boris@utopia.net'}) + + :param kwargs: Additional entity-specific arguments (optional). + :type kwargs: ``dict`` + + :return: The entity this method is called on. + :rtype: class:`Entity` + """ + # The peculiarity in question: the REST API creates a new + # Entity if we pass name in the dictionary, instead of the + # expected behavior of updating this Entity. Therefore, we + # check for 'name' in kwargs and throw an error if it is + # there. + if 'name' in kwargs: + raise IllegalOperationException('Cannot update the name of an Entity via the REST API.') + self.post(**kwargs) + return self + + +class ReadOnlyCollection(Endpoint): + """This class represents a read-only collection of entities in the Splunk + instance. + """ + + def __init__(self, service, path, item=Entity): + Endpoint.__init__(self, service, path) + self.item = item # Item accessor + self.null_count = -1 + + def __contains__(self, name): + """Is there at least one entry called *name* in this collection? + + Makes a single roundtrip to the server, plus at most two more + if + the ``autologin`` field of :func:`connect` is set to ``True``. + """ + try: + self[name] + return True + except KeyError: + return False + except AmbiguousReferenceException: + return True + + def __getitem__(self, key): + """Fetch an item named *key* from this collection. + + A name is not a unique identifier in a collection. The unique + identifier is a name plus a namespace. For example, there can + be a saved search named ``'mysearch'`` with sharing ``'app'`` + in application ``'search'``, and another with sharing + ``'user'`` with owner ``'boris'`` and application + ``'search'``. If the ``Collection`` is attached to a + ``Service`` that has ``'-'`` (wildcard) as user and app in its + namespace, then both of these may be visible under the same + name. + + Where there is no conflict, ``__getitem__`` will fetch the + entity given just the name. If there is a conflict, and you + pass just a name, it will raise a ``ValueError``. In that + case, add the namespace as a second argument. + + This function makes a single roundtrip to the server, plus at + most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param key: The name to fetch, or a tuple (name, namespace). + :return: An :class:`Entity` object. + :raises KeyError: Raised if *key* does not exist. + :raises ValueError: Raised if no namespace is specified and *key* + does not refer to a unique name. + + **Example**:: + + s = client.connect(...) + saved_searches = s.saved_searches + x1 = saved_searches.create( + 'mysearch', 'search * | head 1', + owner='admin', app='search', sharing='app') + x2 = saved_searches.create( + 'mysearch', 'search * | head 1', + owner='admin', app='search', sharing='user') + # Raises ValueError: + saved_searches['mysearch'] + # Fetches x1 + saved_searches[ + 'mysearch', + client.namespace(sharing='app', app='search')] + # Fetches x2 + saved_searches[ + 'mysearch', + client.namespace(sharing='user', owner='boris', app='search')] + """ + try: + if isinstance(key, tuple) and len(key) == 2: + # x[a,b] is translated to x.__getitem__( (a,b) ), so we + # have to extract values out. + key, ns = key + key = UrlEncoded(key, encode_slash=True) + response = self.get(key, owner=ns.owner, app=ns.app) + else: + key = UrlEncoded(key, encode_slash=True) + response = self.get(key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException( + f"Found multiple entities named '{key}'; please specify a namespace.") + if len(entries) == 0: + raise KeyError(key) + return entries[0] + except HTTPError as he: + if he.status == 404: # No entity matching key and namespace. + raise KeyError(key) + else: + raise + + def __iter__(self, **kwargs): + """Iterate over the entities in the collection. + + :param kwargs: Additional arguments. + :type kwargs: ``dict`` + :rtype: iterator over entities. + + Implemented to give Collection a listish interface. This + function always makes a roundtrip to the server, plus at most + two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + for entity in saved_searches: + print(f"Saved search named {entity.name}") + """ + + for item in self.iter(**kwargs): + yield item + + def __len__(self): + """Enable ``len(...)`` for ``Collection`` objects. + + Implemented for consistency with a listish interface. No + further failure modes beyond those possible for any method on + an Endpoint. + + This function always makes a round trip to the server, plus at + most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + n = len(saved_searches) + """ + return len(self.list()) + + def _entity_path(self, state): + """Calculate the path to an entity to be returned. + + *state* should be the dictionary returned by + :func:`_parse_atom_entry`. :func:`_entity_path` extracts the + link to this entity from *state*, and strips all the namespace + prefixes from it to leave only the relative path of the entity + itself, sans namespace. + + :rtype: ``string`` + :return: an absolute path + """ + # This has been factored out so that it can be easily + # overloaded by Configurations, which has to switch its + # entities' endpoints from its own properties/ to configs/. + raw_path = parse.unquote(state.links.alternate) + if 'servicesNS/' in raw_path: + return _trailing(raw_path, 'servicesNS/', '/', '/') + if 'services/' in raw_path: + return _trailing(raw_path, 'services/') + return raw_path + + def _load_list(self, response): + """Converts *response* to a list of entities. + + *response* is assumed to be a :class:`Record` containing an + HTTP response, of the form:: + + {'status': 200, + 'headers': [('content-length', '232642'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Tue, 29 May 2012 15:27:08 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'body': ...a stream implementing .read()...} + + The ``'body'`` key refers to a stream containing an Atom feed, + that is, an XML document with a toplevel element ````, + and within that element one or more ```` elements. + """ + # Some subclasses of Collection have to override this because + # splunkd returns something that doesn't match + # . + entries = _load_atom_entries(response) + if entries is None: return [] + entities = [] + for entry in entries: + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + self._entity_path(state), + state=state) + entities.append(entity) + + return entities + + def itemmeta(self): + """Returns metadata for members of the collection. + + Makes a single roundtrip to the server, plus two more at most if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :return: A :class:`splunklib.data.Record` object containing the metadata. + + **Example**:: + + import splunklib.client as client + import pprint + s = client.connect(...) + pprint.pprint(s.apps.itemmeta()) + {'access': {'app': 'search', + 'can_change_perms': '1', + 'can_list': '1', + 'can_share_app': '1', + 'can_share_global': '1', + 'can_share_user': '1', + 'can_write': '1', + 'modifiable': '1', + 'owner': 'admin', + 'perms': {'read': ['*'], 'write': ['admin']}, + 'removable': '0', + 'sharing': 'user'}, + 'fields': {'optional': ['author', + 'configured', + 'description', + 'label', + 'manageable', + 'template', + 'visible'], + 'required': ['name'], 'wildcard': []}} + """ + response = self.get("_new") + content = _load_atom(response, MATCH_ENTRY_CONTENT) + return _parse_atom_metadata(content) + + def iter(self, offset=0, count=None, pagesize=None, **kwargs): + """Iterates over the collection. + + This method is equivalent to the :meth:`list` method, but + it returns an iterator and can load a certain number of entities at a + time from the server. + + :param offset: The index of the first entity to return (optional). + :type offset: ``integer`` + :param count: The maximum number of entities to return (optional). + :type count: ``integer`` + :param pagesize: The number of entities to load (optional). + :type pagesize: ``integer`` + :param kwargs: Additional arguments (optional): + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + for saved_search in s.saved_searches.iter(pagesize=10): + # Loads 10 saved searches at a time from the + # server. + ... + """ + assert pagesize is None or pagesize > 0 + if count is None: + count = self.null_count + fetched = 0 + while count == self.null_count or fetched < count: + response = self.get(count=pagesize or count, offset=offset, **kwargs) + items = self._load_list(response) + N = len(items) + fetched += N + for item in items: + yield item + if pagesize is None or N < pagesize: + break + offset += N + logger.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) + + # kwargs: count, offset, search, sort_dir, sort_key, sort_mode + def list(self, count=None, **kwargs): + """Retrieves a list of entities in this collection. + + The entire collection is loaded at once and is returned as a list. This + function makes a single roundtrip to the server, plus at most two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + There is no caching--every call makes at least one round trip. + + :param count: The maximum number of entities to return (optional). + :type count: ``integer`` + :param kwargs: Additional arguments (optional): + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + :return: A ``list`` of entities. + """ + # response = self.get(count=count, **kwargs) + # return self._load_list(response) + return list(self.iter(count=count, **kwargs)) + + +class Collection(ReadOnlyCollection): + """A collection of entities. + + Splunk provides a number of different collections of distinct + entity types: applications, saved searches, fired alerts, and a + number of others. Each particular type is available separately + from the Splunk instance, and the entities of that type are + returned in a :class:`Collection`. + + The interface for :class:`Collection` does not quite match either + ``list`` or ``dict`` in Python, because there are enough semantic + mismatches with either to make its behavior surprising. A unique + element in a :class:`Collection` is defined by a string giving its + name plus namespace (although the namespace is optional if the name is + unique). + + **Example**:: + + import splunklib.client as client + service = client.connect(...) + mycollection = service.saved_searches + mysearch = mycollection['my_search', client.namespace(owner='boris', app='natasha', sharing='user')] + # Or if there is only one search visible named 'my_search' + mysearch = mycollection['my_search'] + + Similarly, ``name`` in ``mycollection`` works as you might expect (though + you cannot currently pass a namespace to the ``in`` operator), as does + ``len(mycollection)``. + + However, as an aggregate, :class:`Collection` behaves more like a + list. If you iterate over a :class:`Collection`, you get an + iterator over the entities, not the names and namespaces. + + **Example**:: + + for entity in mycollection: + assert isinstance(entity, client.Entity) + + Use the :meth:`create` and :meth:`delete` methods to create and delete + entities in this collection. To view the access control list and other + metadata of the collection, use the :meth:`ReadOnlyCollection.itemmeta` method. + + :class:`Collection` does no caching. Each call makes at least one + round trip to the server to fetch data. + """ + + def create(self, name, **params): + """Creates a new entity in this collection. + + This function makes either one or two roundtrips to the + server, depending on the type of entities in this + collection, plus at most two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param name: The name of the entity to create. + :type name: ``string`` + :param namespace: A namespace, as created by the :func:`splunklib.binding.namespace` + function (optional). You can also set ``owner``, ``app``, and + ``sharing`` in ``params``. + :type namespace: A :class:`splunklib.data.Record` object with keys ``owner``, ``app``, + and ``sharing``. + :param params: Additional entity-specific arguments (optional). + :type params: ``dict`` + :return: The new entity. + :rtype: A subclass of :class:`Entity`, chosen by :meth:`Collection.self.item`. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + applications = s.apps + new_app = applications.create("my_fake_app") + """ + if not isinstance(name, str): + raise InvalidNameException(f"{name} is not a valid name for an entity.") + if 'namespace' in params: + namespace = params.pop('namespace') + params['owner'] = namespace.owner + params['app'] = namespace.app + params['sharing'] = namespace.sharing + response = self.post(name=name, **params) + atom = _load_atom(response, XNAME_ENTRY) + if atom is None: + # This endpoint doesn't return the content of the new + # item. We have to go fetch it ourselves. + return self[name] + entry = atom.entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + self._entity_path(state), + state=state) + return entity + + def delete(self, name, **params): + """Deletes a specified entity from the collection. + + :param name: The name of the entity to delete. + :type name: ``string`` + :return: The collection. + :rtype: ``self`` + + This method is implemented for consistency with the REST API's DELETE + method. + + If there is no *name* entity on the server, a ``KeyError`` is + thrown. This function always makes a roundtrip to the server. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + saved_searches.create('my_saved_search', + 'search * | head 1') + assert 'my_saved_search' in saved_searches + saved_searches.delete('my_saved_search') + assert 'my_saved_search' not in saved_searches + """ + name = UrlEncoded(name, encode_slash=True) + if 'namespace' in params: + namespace = params.pop('namespace') + params['owner'] = namespace.owner + params['app'] = namespace.app + params['sharing'] = namespace.sharing + try: + self.service.delete(_path(self.path, name), **params) + except HTTPError as he: + # An HTTPError with status code 404 means that the entity + # has already been deleted, and we reraise it as a + # KeyError. + if he.status == 404: + raise KeyError(f"No such entity {name}") + else: + raise + return self + + def get(self, name="", owner=None, app=None, sharing=None, **query): + """Performs a GET request to the server on the collection. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (optional). + :type sharing: "global", "system", "app", or "user" + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + saved_searches = s.saved_searches + saved_searches.get("my/saved/search") == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + saved_searches.get('nonexistant/search') # raises HTTPError + s.logout() + saved_searches.get() # raises AuthenticationError + + """ + name = UrlEncoded(name, encode_slash=True) + return super().get(name, owner, app, sharing, **query) + + +class ConfigurationFile(Collection): + """This class contains all of the stanzas from one configuration file. + """ + + # __init__'s arguments must match those of an Entity, not a + # Collection, since it is being created as the elements of a + # Configurations, which is a Collection subclass. + def __init__(self, service, path, **kwargs): + Collection.__init__(self, service, path, item=Stanza) + self.name = kwargs['state']['title'] + + +class Configurations(Collection): + """This class provides access to the configuration files from this Splunk + instance. Retrieve this collection using :meth:`Service.confs`. + + Splunk's configuration is divided into files, and each file into + stanzas. This collection is unusual in that the values in it are + themselves collections of :class:`ConfigurationFile` objects. + """ + + def __init__(self, service): + Collection.__init__(self, service, PATH_PROPERTIES, item=ConfigurationFile) + if self.service.namespace.owner == '-' or self.service.namespace.app == '-': + raise ValueError("Configurations cannot have wildcards in namespace.") + + def __getitem__(self, key): + # The superclass implementation is designed for collections that contain + # entities. This collection (Configurations) contains collections + # (ConfigurationFile). + # + # The configurations endpoint returns multiple entities when we ask for a single file. + # This screws up the default implementation of __getitem__ from Collection, which thinks + # that multiple entities means a name collision, so we have to override it here. + try: + self.get(key) + return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) + except HTTPError as he: + if he.status == 404: # No entity matching key + raise KeyError(key) + else: + raise + + def __contains__(self, key): + # configs/conf-{name} never returns a 404. We have to post to properties/{name} + # in order to find out if a configuration exists. + try: + self.get(key) + return True + except HTTPError as he: + if he.status == 404: # No entity matching key + return False + raise + + def create(self, name): + """ Creates a configuration file named *name*. + + If there is already a configuration file with that name, + the existing file is returned. + + :param name: The name of the configuration file. + :type name: ``string`` + + :return: The :class:`ConfigurationFile` object. + """ + # This has to be overridden to handle the plumbing of creating + # a ConfigurationFile (which is a Collection) instead of some + # Entity. + if not isinstance(name, str): + raise ValueError(f"Invalid name: {repr(name)}") + response = self.post(__conf=name) + if response.status == 303: + return self[name] + if response.status == 201: + return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") + + def delete(self, key): + """Raises `IllegalOperationException`.""" + raise IllegalOperationException("Cannot delete configuration files from the REST API.") + + def _entity_path(self, state): + # Overridden to make all the ConfigurationFile objects + # returned refer to the configs/ path instead of the + # properties/ path used by Configrations. + return PATH_CONF % state['title'] + + +class Stanza(Entity): + """This class contains a single configuration stanza.""" + + def submit(self, stanza): + """Adds keys to the current configuration stanza as a + dictionary of key-value pairs. + + :param stanza: A dictionary of key-value pairs for the stanza. + :type stanza: ``dict`` + :return: The :class:`Stanza` object. + """ + body = _encode(**stanza) + self.service.post(self.path, body=body) + return self + + def __len__(self): + # The stanza endpoint returns all the keys at the same level in the XML as the eai information + # and 'disabled', so to get an accurate length, we have to filter those out and have just + # the stanza keys. + return len([x for x in self._state.content.keys() + if not x.startswith('eai') and x != 'disabled']) + + +class StoragePassword(Entity): + """This class contains a storage password. + """ + + def __init__(self, service, path, **kwargs): + state = kwargs.get('state', None) + kwargs['skip_refresh'] = kwargs.get('skip_refresh', state is not None) + super().__init__(service, path, **kwargs) + self._state = state + + @property + def clear_password(self): + return self.content.get('clear_password') + + @property + def encrypted_password(self): + return self.content.get('encr_password') + + @property + def realm(self): + return self.content.get('realm') + + @property + def username(self): + return self.content.get('username') + + +class StoragePasswords(Collection): + """This class provides access to the storage passwords from this Splunk + instance. Retrieve this collection using :meth:`Service.storage_passwords`. + """ + + def __init__(self, service): + if service.namespace.owner == '-' or service.namespace.app == '-': + raise ValueError("StoragePasswords cannot have wildcards in namespace.") + super().__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) + + def create(self, password, username, realm=None): + """ Creates a storage password. + + A `StoragePassword` can be identified by , or by : if the + optional realm parameter is also provided. + + :param password: The password for the credentials - this is the only part of the credentials that will be stored securely. + :type name: ``string`` + :param username: The username for the credentials. + :type name: ``string`` + :param realm: The credential realm. (optional) + :type name: ``string`` + + :return: The :class:`StoragePassword` object created. + """ + if not isinstance(username, str): + raise ValueError(f"Invalid name: {repr(username)}") + + if realm is None: + response = self.post(password=password, name=username) + else: + response = self.post(password=password, realm=realm, name=username) + + if response.status != 201: + raise ValueError(f"Unexpected status code {response.status} returned from creating a stanza") + + entries = _load_atom_entries(response) + state = _parse_atom_entry(entries[0]) + storage_password = StoragePassword(self.service, self._entity_path(state), state=state, skip_refresh=True) + + return storage_password + + def delete(self, username, realm=None): + """Delete a storage password by username and/or realm. + + The identifier can be passed in through the username parameter as + or :, but the preferred way is by + passing in the username and realm parameters. + + :param username: The username for the credentials, or : if the realm parameter is omitted. + :type name: ``string`` + :param realm: The credential realm. (optional) + :type name: ``string`` + :return: The `StoragePassword` collection. + :rtype: ``self`` + """ + if realm is None: + # This case makes the username optional, so + # the full name can be passed in as realm. + # Assume it's already encoded. + name = username + else: + # Encode each component separately + name = UrlEncoded(realm, encode_slash=True) + ":" + UrlEncoded(username, encode_slash=True) + + # Append the : expected at the end of the name + if name[-1] != ":": + name = name + ":" + return Collection.delete(self, name) + + +class AlertGroup(Entity): + """This class represents a group of fired alerts for a saved search. Access + it using the :meth:`alerts` property.""" + + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def __len__(self): + return self.count + + @property + def alerts(self): + """Returns a collection of triggered alerts. + + :return: A :class:`Collection` of triggered alerts. + """ + return Collection(self.service, self.path) + + @property + def count(self): + """Returns the count of triggered alerts. + + :return: The triggered alert count. + :rtype: ``integer`` + """ + return int(self.content.get('triggered_alert_count', 0)) + + +class Indexes(Collection): + """This class contains the collection of indexes in this Splunk instance. + Retrieve this collection using :meth:`Service.indexes`. + """ + + def get_default(self): + """ Returns the name of the default index. + + :return: The name of the default index. + + """ + index = self['_audit'] + return index['defaultDatabase'] + + def delete(self, name): + """ Deletes a given index. + + **Note**: This method is only supported in Splunk 5.0 and later. + + :param name: The name of the index to delete. + :type name: ``string`` + """ + if self.service.splunk_version >= (5,): + Collection.delete(self, name) + else: + raise IllegalOperationException("Deleting indexes via the REST API is " + "not supported before Splunk version 5.") + + +class Index(Entity): + """This class represents an index and provides different operations, such as + cleaning the index, writing to the index, and so forth.""" + + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def attach(self, host=None, source=None, sourcetype=None): + """Opens a stream (a writable socket) for writing events to the index. + + :param host: The host value for events written to the stream. + :type host: ``string`` + :param source: The source value for events written to the stream. + :type source: ``string`` + :param sourcetype: The sourcetype value for events written to the + stream. + :type sourcetype: ``string`` + + :return: A writable socket. + """ + args = {'index': self.name} + if host is not None: args['host'] = host + if source is not None: args['source'] = source + if sourcetype is not None: args['sourcetype'] = sourcetype + path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + parse.urlencode(args), skip_encode=True) + + cookie_header = self.service.token if self.service.token is _NoAuthenticationToken else self.service.token.replace("Splunk ", "") + cookie_or_auth_header = f"Authorization: Splunk {cookie_header}\r\n" + + # If we have cookie(s), use them instead of "Authorization: ..." + if self.service.has_cookies(): + cookie_header = _make_cookie_header(self.service.get_cookies().items()) + cookie_or_auth_header = f"Cookie: {cookie_header}\r\n" + + # Since we need to stream to the index connection, we have to keep + # the connection open and use the Splunk extension headers to note + # the input mode + sock = self.service.connect() + headers = [f"POST {str(self.service._abspath(path))} HTTP/1.1\r\n".encode('utf-8'), + f"Host: {self.service.host}:{int(self.service.port)}\r\n".encode('utf-8'), + b"Accept-Encoding: identity\r\n", + cookie_or_auth_header.encode('utf-8'), + b"X-Splunk-Input-Mode: Streaming\r\n", + b"\r\n"] + + for h in headers: + sock.write(h) + return sock + + @contextlib.contextmanager + def attached_socket(self, *args, **kwargs): + """Opens a raw socket in a ``with`` block to write data to Splunk. + + The arguments are identical to those for :meth:`attach`. The socket is + automatically closed at the end of the ``with`` block, even if an + exception is raised in the block. + + :param host: The host value for events written to the stream. + :type host: ``string`` + :param source: The source value for events written to the stream. + :type source: ``string`` + :param sourcetype: The sourcetype value for events written to the + stream. + :type sourcetype: ``string`` + + :returns: Nothing. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + index = s.indexes['some_index'] + with index.attached_socket(sourcetype='test') as sock: + sock.send('Test event\\r\\n') + + """ + try: + sock = self.attach(*args, **kwargs) + yield sock + finally: + sock.shutdown(socket.SHUT_RDWR) + sock.close() + + def clean(self, timeout=60): + """Deletes the contents of the index. + + This method blocks until the index is empty, because it needs to restore + values at the end of the operation. + + :param timeout: The time-out period for the operation, in seconds (the + default is 60). + :type timeout: ``integer`` + + :return: The :class:`Index`. + """ + self.refresh() + + tds = self['maxTotalDataSizeMB'] + ftp = self['frozenTimePeriodInSecs'] + was_disabled_initially = self.disabled + try: + if not was_disabled_initially and self.service.splunk_version < (5,): + # Need to disable the index first on Splunk 4.x, + # but it doesn't work to disable it on 5.0. + self.disable() + self.update(maxTotalDataSizeMB=1, frozenTimePeriodInSecs=1) + self.roll_hot_buckets() + + # Wait until event count goes to 0. + start = datetime.now() + diff = timedelta(seconds=timeout) + while self.content.totalEventCount != '0' and datetime.now() < start + diff: + sleep(1) + self.refresh() + + if self.content.totalEventCount != '0': + raise OperationError( + f"Cleaning index {self.name} took longer than {timeout} seconds; timing out.") + finally: + # Restore original values + self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) + if not was_disabled_initially and self.service.splunk_version < (5,): + # Re-enable the index if it was originally enabled and we messed with it. + self.enable() + + return self + + def roll_hot_buckets(self): + """Performs rolling hot buckets for this index. + + :return: The :class:`Index`. + """ + self.post("roll-hot-buckets") + return self + + def submit(self, event, host=None, source=None, sourcetype=None): + """Submits a single event to the index using ``HTTP POST``. + + :param event: The event to submit. + :type event: ``string`` + :param `host`: The host value of the event. + :type host: ``string`` + :param `source`: The source value of the event. + :type source: ``string`` + :param `sourcetype`: The sourcetype value of the event. + :type sourcetype: ``string`` + + :return: The :class:`Index`. + """ + args = {'index': self.name} + if host is not None: args['host'] = host + if source is not None: args['source'] = source + if sourcetype is not None: args['sourcetype'] = sourcetype + + self.service.post(PATH_RECEIVERS_SIMPLE, body=event, **args) + return self + + # kwargs: host, host_regex, host_segment, rename-source, sourcetype + def upload(self, filename, **kwargs): + """Uploads a file for immediate indexing. + + **Note**: The file must be locally accessible from the server. + + :param filename: The name of the file to upload. The file can be a + plain, compressed, or archived file. + :type filename: ``string`` + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Index parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`Index`. + """ + kwargs['index'] = self.name + path = 'data/inputs/oneshot' + self.service.post(path, name=filename, **kwargs) + return self + + +class Input(Entity): + """This class represents a Splunk input. This class is the base for all + typed input classes and is also used when the client does not recognize an + input kind. + """ + + def __init__(self, service, path, kind=None, **kwargs): + # kind can be omitted (in which case it is inferred from the path) + # Otherwise, valid values are the paths from data/inputs ("udp", + # "monitor", "tcp/raw"), or two special cases: "tcp" (which is "tcp/raw") + # and "splunktcp" (which is "tcp/cooked"). + Entity.__init__(self, service, path, **kwargs) + if kind is None: + path_segments = path.split('/') + i = path_segments.index('inputs') + 1 + if path_segments[i] == 'tcp': + self.kind = path_segments[i] + '/' + path_segments[i + 1] + else: + self.kind = path_segments[i] + else: + self.kind = kind + + # Handle old input kind names. + if self.kind == 'tcp': + self.kind = 'tcp/raw' + if self.kind == 'splunktcp': + self.kind = 'tcp/cooked' + + def update(self, **kwargs): + """Updates the server with any changes you've made to the current input + along with any additional arguments you specify. + + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The input this method was called on. + :rtype: class:`Input` + """ + # UDP and TCP inputs require special handling due to their restrictToHost + # field. For all other inputs kinds, we can dispatch to the superclass method. + if self.kind not in ['tcp', 'splunktcp', 'tcp/raw', 'tcp/cooked', 'udp']: + return super().update(**kwargs) + else: + # The behavior of restrictToHost is inconsistent across input kinds and versions of Splunk. + # In Splunk 4.x, the name of the entity is only the port, independent of the value of + # restrictToHost. In Splunk 5.0 this changed so the name will be of the form :. + # In 5.0 and 5.0.1, if you don't supply the restrictToHost value on every update, it will + # remove the host restriction from the input. As of 5.0.2 you simply can't change restrictToHost + # on an existing input. + + # The logic to handle all these cases: + # - Throw an exception if the user tries to set restrictToHost on an existing input + # for *any* version of Splunk. + # - Set the existing restrictToHost value on the update args internally so we don't + # cause it to change in Splunk 5.0 and 5.0.1. + to_update = kwargs.copy() + + if 'restrictToHost' in kwargs: + raise IllegalOperationException("Cannot set restrictToHost on an existing input with the SDK.") + if 'restrictToHost' in self._state.content and self.kind != 'udp': + to_update['restrictToHost'] = self._state.content['restrictToHost'] + + # Do the actual update operation. + return super().update(**to_update) + + +# Inputs is a "kinded" collection, which is a heterogenous collection where +# each item is tagged with a kind, that provides a single merged view of all +# input kinds. +class Inputs(Collection): + """This class represents a collection of inputs. The collection is + heterogeneous and each member of the collection contains a *kind* property + that indicates the specific type of input. + Retrieve this collection using :meth:`Service.inputs`.""" + + def __init__(self, service, kindmap=None): + Collection.__init__(self, service, PATH_INPUTS, item=Input) + + def __getitem__(self, key): + # The key needed to retrieve the input needs it's parenthesis to be URL encoded + # based on the REST API for input + # + if isinstance(key, tuple) and len(key) == 2: + # Fetch a single kind + key, kind = key + key = UrlEncoded(key, encode_slash=True) + try: + response = self.get(self.kindpath(kind) + "/" + key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") + if len(entries) == 0: + raise KeyError((key, kind)) + return entries[0] + except HTTPError as he: + if he.status == 404: # No entity matching kind and key + raise KeyError((key, kind)) + else: + raise + else: + # Iterate over all the kinds looking for matches. + kind = None + candidate = None + key = UrlEncoded(key, encode_slash=True) + for kind in self.kinds: + try: + response = self.get(kind + "/" + key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException(f"Found multiple inputs of kind {kind} named {key}.") + if len(entries) == 0: + pass + else: + if candidate is not None: # Already found at least one candidate + raise AmbiguousReferenceException( + f"Found multiple inputs named {key}, please specify a kind") + candidate = entries[0] + except HTTPError as he: + if he.status == 404: + pass # Just carry on to the next kind. + else: + raise + if candidate is None: + raise KeyError(key) # Never found a match. + return candidate + + def __contains__(self, key): + if isinstance(key, tuple) and len(key) == 2: + # If we specify a kind, this will shortcut properly + try: + self.__getitem__(key) + return True + except KeyError: + return False + else: + # Without a kind, we want to minimize the number of round trips to the server, so we + # reimplement some of the behavior of __getitem__ in order to be able to stop searching + # on the first hit. + for kind in self.kinds: + try: + response = self.get(self.kindpath(kind) + "/" + key) + entries = self._load_list(response) + if len(entries) > 0: + return True + except HTTPError as he: + if he.status == 404: + pass # Just carry on to the next kind. + else: + raise + return False + + def create(self, name, kind, **kwargs): + """Creates an input of a specific kind in this collection, with any + arguments you specify. + + :param `name`: The input name. + :type name: ``string`` + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + :param `kwargs`: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + + :type kwargs: ``dict`` + + :return: The new :class:`Input`. + """ + kindpath = self.kindpath(kind) + self.post(kindpath, name=name, **kwargs) + + # If we created an input with restrictToHost set, then + # its path will be :, not just , + # and we have to adjust accordingly. + + # Url encodes the name of the entity. + name = UrlEncoded(name, encode_slash=True) + path = _path( + self.path + kindpath, + f"{kwargs['restrictToHost']}:{name}" if 'restrictToHost' in kwargs else name + ) + return Input(self.service, path, kind) + + def delete(self, name, kind=None): + """Removes an input from the collection. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + :param name: The name of the input to remove. + :type name: ``string`` + + :return: The :class:`Inputs` collection. + """ + if kind is None: + self.service.delete(self[name].path) + else: + self.service.delete(self[name, kind].path) + return self + + def itemmeta(self, kind): + """Returns metadata for the members of a given kind. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + + :return: The metadata. + :rtype: class:``splunklib.data.Record`` + """ + response = self.get(f"{self._kindmap[kind]}/_new") + content = _load_atom(response, MATCH_ENTRY_CONTENT) + return _parse_atom_metadata(content) + + def _get_kind_list(self, subpath=None): + if subpath is None: + subpath = [] + + kinds = [] + response = self.get('/'.join(subpath)) + content = _load_atom_entries(response) + for entry in content: + this_subpath = subpath + [entry.title] + # The "all" endpoint doesn't work yet. + # The "tcp/ssl" endpoint is not a real input collection. + if entry.title == 'all' or this_subpath == ['tcp', 'ssl']: + continue + if 'create' in [x.rel for x in entry.link]: + path = '/'.join(subpath + [entry.title]) + kinds.append(path) + else: + subkinds = self._get_kind_list(subpath + [entry.title]) + kinds.extend(subkinds) + return kinds + + @property + def kinds(self): + """Returns the input kinds on this Splunk instance. + + :return: The list of input kinds. + :rtype: ``list`` + """ + return self._get_kind_list() + + def kindpath(self, kind): + """Returns a path to the resources for a given input kind. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + + :return: The relative endpoint path. + :rtype: ``string`` + """ + if kind == 'tcp': + return UrlEncoded('tcp/raw', skip_encode=True) + if kind == 'splunktcp': + return UrlEncoded('tcp/cooked', skip_encode=True) + return UrlEncoded(kind, skip_encode=True) + + def list(self, *kinds, **kwargs): + """Returns a list of inputs that are in the :class:`Inputs` collection. + You can also filter by one or more input kinds. + + This function iterates over all possible inputs, regardless of any arguments you + specify. Because the :class:`Inputs` collection is the union of all the inputs of each + kind, this method implements parameters such as "count", "search", and so + on at the Python level once all the data has been fetched. The exception + is when you specify a single input kind, and then this method makes a single request + with the usual semantics for parameters. + + :param kinds: The input kinds to return (optional). + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kinds: ``string`` + :param kwargs: Additional arguments (optional): + + - "count" (``integer``): The maximum number of items to return. + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + + :return: A list of input kinds. + :rtype: ``list`` + """ + if len(kinds) == 0: + kinds = self.kinds + if len(kinds) == 1: + kind = kinds[0] + logger.debug("Inputs.list taking short circuit branch for single kind.") + path = self.kindpath(kind) + logger.debug("Path for inputs: %s", path) + try: + path = UrlEncoded(path, skip_encode=True) + response = self.get(path, **kwargs) + except HTTPError as he: + if he.status == 404: # No inputs of this kind + return [] + entities = [] + entries = _load_atom_entries(response) + if entries is None: + return [] # No inputs in a collection comes back with no feed or entry in the XML + for entry in entries: + state = _parse_atom_entry(entry) + # Unquote the URL, since all URL encoded in the SDK + # should be of type UrlEncoded, and all str should not + # be URL encoded. + path = parse.unquote(state.links.alternate) + entity = Input(self.service, path, kind, state=state) + entities.append(entity) + return entities + + search = kwargs.get('search', '*') + + entities = [] + for kind in kinds: + response = None + try: + kind = UrlEncoded(kind, skip_encode=True) + response = self.get(self.kindpath(kind), search=search) + except HTTPError as e: + if e.status == 404: + continue # No inputs of this kind + else: + raise + + entries = _load_atom_entries(response) + if entries is None: continue # No inputs to process + for entry in entries: + state = _parse_atom_entry(entry) + # Unquote the URL, since all URL encoded in the SDK + # should be of type UrlEncoded, and all str should not + # be URL encoded. + path = parse.unquote(state.links.alternate) + entity = Input(self.service, path, kind, state=state) + entities.append(entity) + if 'offset' in kwargs: + entities = entities[kwargs['offset']:] + if 'count' in kwargs: + entities = entities[:kwargs['count']] + if kwargs.get('sort_mode', None) == 'alpha': + sort_field = kwargs.get('sort_field', 'name') + if sort_field == 'name': + f = lambda x: x.name.lower() + else: + f = lambda x: x[sort_field].lower() + entities = sorted(entities, key=f) + if kwargs.get('sort_mode', None) == 'alpha_case': + sort_field = kwargs.get('sort_field', 'name') + if sort_field == 'name': + f = lambda x: x.name + else: + f = lambda x: x[sort_field] + entities = sorted(entities, key=f) + if kwargs.get('sort_dir', 'asc') == 'desc': + entities = list(reversed(entities)) + return entities + + def __iter__(self, **kwargs): + for item in self.iter(**kwargs): + yield item + + def iter(self, **kwargs): + """ Iterates over the collection of inputs. + + :param kwargs: Additional arguments (optional): + + - "count" (``integer``): The maximum number of items to return. + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + """ + for item in self.list(**kwargs): + yield item + + def oneshot(self, path, **kwargs): + """ Creates a oneshot data input, which is an upload of a single file + for one-time indexing. + + :param path: The path and filename. + :type path: ``string`` + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + """ + self.post('oneshot', name=path, **kwargs) + + +class Job(Entity): + """This class represents a search job.""" + + def __init__(self, service, sid, **kwargs): + # Default to v2 in Splunk Version 9+ + path = "{path}{sid}" + # Formatting path based on the Splunk Version + if service.disable_v2_api: + path = path.format(path=PATH_JOBS, sid=sid) + else: + path = path.format(path=PATH_JOBS_V2, sid=sid) + + Entity.__init__(self, service, path, skip_refresh=True, **kwargs) + self.sid = sid + + # The Job entry record is returned at the root of the response + def _load_atom_entry(self, response): + return _load_atom(response).entry + + def cancel(self): + """Stops the current search and deletes the results cache. + + :return: The :class:`Job`. + """ + try: + self.post("control", action="cancel") + except HTTPError as he: + if he.status == 404: + # The job has already been cancelled, so + # cancelling it twice is a nop. + pass + else: + raise + return self + + def disable_preview(self): + """Disables preview for this job. + + :return: The :class:`Job`. + """ + self.post("control", action="disablepreview") + return self + + def enable_preview(self): + """Enables preview for this job. + + **Note**: Enabling preview might slow search considerably. + + :return: The :class:`Job`. + """ + self.post("control", action="enablepreview") + return self + + def events(self, **kwargs): + """Returns a streaming handle to this job's events. + + :param kwargs: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/events + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's events. + """ + kwargs['segmentation'] = kwargs.get('segmentation', 'none') + + # Search API v1(GET) and v2(POST) + if self.service.disable_v2_api: + return self.get("events", **kwargs).body + return self.post("events", **kwargs).body + + def finalize(self): + """Stops the job and provides intermediate results for retrieval. + + :return: The :class:`Job`. + """ + self.post("control", action="finalize") + return self + + def is_done(self): + """Indicates whether this job finished running. + + :return: ``True`` if the job is done, ``False`` if not. + :rtype: ``boolean`` + """ + if not self.is_ready(): + return False + done = (self._state.content['isDone'] == '1') + return done + + def is_ready(self): + """Indicates whether this job is ready for querying. + + :return: ``True`` if the job is ready, ``False`` if not. + :rtype: ``boolean`` + + """ + response = self.get() + if response.status == 204: + return False + self._state = self.read(response) + ready = self._state.content['dispatchState'] not in ['QUEUED', 'PARSING'] + return ready + + @property + def name(self): + """Returns the name of the search job, which is the search ID (SID). + + :return: The search ID. + :rtype: ``string`` + """ + return self.sid + + def pause(self): + """Suspends the current search. + + :return: The :class:`Job`. + """ + self.post("control", action="pause") + return self + + def results(self, **query_params): + """Returns a streaming handle to this job's search results. To get a nice, Pythonic iterator, pass the handle + to :class:`splunklib.results.JSONResultsReader` along with the query param "output_mode='json'", as in:: + + import splunklib.client as client + import splunklib.results as results + from time import sleep + service = client.connect(...) + job = service.jobs.create("search * | head 5") + while not job.is_done(): + sleep(.2) + rr = results.JSONResultsReader(job.results(output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + assert rr.is_preview == False + + Results are not available until the job has finished. If called on + an unfinished job, the result is an empty event set. + + This method makes a single roundtrip + to the server, plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param query_params: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/results + `_. + :type query_params: ``dict`` + + :return: The ``InputStream`` IO handle to this job's results. + """ + query_params['segmentation'] = query_params.get('segmentation', 'none') + + # Search API v1(GET) and v2(POST) + if self.service.disable_v2_api: + return self.get("results", **query_params).body + return self.post("results", **query_params).body + + def preview(self, **query_params): + """Returns a streaming handle to this job's preview search results. + + Unlike :class:`splunklib.results.JSONResultsReader`along with the query param "output_mode='json'", + which requires a job to be finished to return any results, the ``preview`` method returns any results that + have been generated so far, whether the job is running or not. The returned search results are the raw data + from the server. Pass the handle returned to :class:`splunklib.results.JSONResultsReader` to get a nice, + Pythonic iterator over objects, as in:: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + job = service.jobs.create("search * | head 5") + rr = results.JSONResultsReader(job.preview(output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + if rr.is_preview: + print("Preview of a running search job.") + else: + print("Job is finished. Results are final.") + + This method makes one roundtrip to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param query_params: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/results_preview + `_ + in the REST API documentation. + :type query_params: ``dict`` + + :return: The ``InputStream`` IO handle to this job's preview results. + """ + query_params['segmentation'] = query_params.get('segmentation', 'none') + + # Search API v1(GET) and v2(POST) + if self.service.disable_v2_api: + return self.get("results_preview", **query_params).body + return self.post("results_preview", **query_params).body + + def searchlog(self, **kwargs): + """Returns a streaming handle to this job's search log. + + :param `kwargs`: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/search.log + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's search log. + """ + return self.get("search.log", **kwargs).body + + def set_priority(self, value): + """Sets this job's search priority in the range of 0-10. + + Higher numbers indicate higher priority. Unless splunkd is + running as *root*, you can only decrease the priority of a running job. + + :param `value`: The search priority. + :type value: ``integer`` + + :return: The :class:`Job`. + """ + self.post('control', action="setpriority", priority=value) + return self + + def summary(self, **kwargs): + """Returns a streaming handle to this job's summary. + + :param `kwargs`: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/summary + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's summary. + """ + return self.get("summary", **kwargs).body + + def timeline(self, **kwargs): + """Returns a streaming handle to this job's timeline results. + + :param `kwargs`: Additional timeline arguments (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/timeline + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's timeline. + """ + return self.get("timeline", **kwargs).body + + def touch(self): + """Extends the expiration time of the search to the current time (now) plus + the time-to-live (ttl) value. + + :return: The :class:`Job`. + """ + self.post("control", action="touch") + return self + + def set_ttl(self, value): + """Set the job's time-to-live (ttl) value, which is the time before the + search job expires and is still available. + + :param `value`: The ttl value, in seconds. + :type value: ``integer`` + + :return: The :class:`Job`. + """ + self.post("control", action="setttl", ttl=value) + return self + + def unpause(self): + """Resumes the current search, if paused. + + :return: The :class:`Job`. + """ + self.post("control", action="unpause") + return self + + +class Jobs(Collection): + """This class represents a collection of search jobs. Retrieve this + collection using :meth:`Service.jobs`.""" + + def __init__(self, service): + # Splunk 9 introduces the v2 endpoint + if not service.disable_v2_api: + path = PATH_JOBS_V2 + else: + path = PATH_JOBS + Collection.__init__(self, service, path, item=Job) + # The count value to say list all the contents of this + # Collection is 0, not -1 as it is on most. + self.null_count = 0 + + def _load_list(self, response): + # Overridden because Job takes a sid instead of a path. + entries = _load_atom_entries(response) + if entries is None: return [] + entities = [] + for entry in entries: + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + entry['content']['sid'], + state=state) + entities.append(entity) + return entities + + def create(self, query, **kwargs): + """ Creates a search using a search query and any additional parameters + you provide. + + :param query: The search query. + :type query: ``string`` + :param kwargs: Additiona parameters (optional). For a list of available + parameters, see `Search job parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`Job`. + """ + if kwargs.get("exec_mode", None) == "oneshot": + raise TypeError("Cannot specify exec_mode=oneshot; use the oneshot method instead.") + response = self.post(search=query, **kwargs) + sid = _load_sid(response, kwargs.get("output_mode", None)) + return Job(self.service, sid) + + def export(self, query, **params): + """Runs a search and immediately starts streaming preview events. This method returns a streaming handle to + this job's events as an XML document from the server. To parse this stream into usable Python objects, + pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'":: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + rr = results.JSONResultsReader(service.jobs.export("search * | head 5",output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + assert rr.is_preview == False + + Running an export search is more efficient as it streams the results + directly to you, rather than having to write them out to disk and make + them available later. As soon as results are ready, you will receive + them. + + The ``export`` method makes a single roundtrip to the server (as opposed + to two for :meth:`create` followed by :meth:`preview`), plus at most two + more if the ``autologin`` field of :func:`connect` is set to ``True``. + + :raises `ValueError`: Raised for invalid queries. + :param query: The search query. + :type query: ``string`` + :param params: Additional arguments (optional). For a list of valid + parameters, see `GET search/jobs/export + `_ + in the REST API documentation. + :type params: ``dict`` + + :return: The ``InputStream`` IO handle to raw XML returned from the server. + """ + if "exec_mode" in params: + raise TypeError("Cannot specify an exec_mode to export.") + params['segmentation'] = params.get('segmentation', 'none') + return self.post(path_segment="export", + search=query, + **params).body + + def itemmeta(self): + """There is no metadata available for class:``Jobs``. + + Any call to this method raises a class:``NotSupportedError``. + + :raises: class:``NotSupportedError`` + """ + raise NotSupportedError() + + def oneshot(self, query, **params): + """Run a oneshot search and returns a streaming handle to the results. + + The ``InputStream`` object streams fragments from the server. To parse this stream into usable Python + objects, pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'" :: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5",output_mode='json')) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print(f'{result.type}: {result.message}') + elif isinstance(result, dict): + # Normal events are returned as dicts + print(result) + assert rr.is_preview == False + + The ``oneshot`` method makes a single roundtrip to the server (as opposed + to two for :meth:`create` followed by :meth:`results`), plus at most two more + if the ``autologin`` field of :func:`connect` is set to ``True``. + + :raises ValueError: Raised for invalid queries. + + :param query: The search query. + :type query: ``string`` + :param params: Additional arguments (optional): + + - "output_mode": Specifies the output format of the results (XML, + JSON, or CSV). + + - "earliest_time": Specifies the earliest time in the time range to + search. The time string can be a UTC time (with fractional seconds), + a relative time specifier (to now), or a formatted time string. + + - "latest_time": Specifies the latest time in the time range to + search. The time string can be a UTC time (with fractional seconds), + a relative time specifier (to now), or a formatted time string. + + - "rf": Specifies one or more fields to add to the search. + + :type params: ``dict`` + + :return: The ``InputStream`` IO handle to raw XML returned from the server. + """ + if "exec_mode" in params: + raise TypeError("Cannot specify an exec_mode to oneshot.") + params['segmentation'] = params.get('segmentation', 'none') + return self.post(search=query, + exec_mode="oneshot", + **params).body + + +class Loggers(Collection): + """This class represents a collection of service logging categories. + Retrieve this collection using :meth:`Service.loggers`.""" + + def __init__(self, service): + Collection.__init__(self, service, PATH_LOGGER) + + def itemmeta(self): + """There is no metadata available for class:``Loggers``. + + Any call to this method raises a class:``NotSupportedError``. + + :raises: class:``NotSupportedError`` + """ + raise NotSupportedError() + + +class Message(Entity): + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + @property + def value(self): + """Returns the message value. + + :return: The message value. + :rtype: ``string`` + """ + return self[self.name] + + +class ModularInputKind(Entity): + """This class contains the different types of modular inputs. Retrieve this + collection using :meth:`Service.modular_input_kinds`. + """ + + def __contains__(self, name): + args = self.state.content['endpoints']['args'] + if name in args: + return True + return Entity.__contains__(self, name) + + def __getitem__(self, name): + args = self.state.content['endpoint']['args'] + if name in args: + return args['item'] + return Entity.__getitem__(self, name) + + @property + def arguments(self): + """A dictionary of all the arguments supported by this modular input kind. + + The keys in the dictionary are the names of the arguments. The values are + another dictionary giving the metadata about that argument. The possible + keys in that dictionary are ``"title"``, ``"description"``, ``"required_on_create``", + ``"required_on_edit"``, ``"data_type"``. Each value is a string. It should be one + of ``"true"`` or ``"false"`` for ``"required_on_create"`` and ``"required_on_edit"``, + and one of ``"boolean"``, ``"string"``, or ``"number``" for ``"data_type"``. + + :return: A dictionary describing the arguments this modular input kind takes. + :rtype: ``dict`` + """ + return self.state.content['endpoint']['args'] + + def update(self, **kwargs): + """Raises an error. Modular input kinds are read only.""" + raise IllegalOperationException("Modular input kinds cannot be updated via the REST API.") + + +class SavedSearch(Entity): + """This class represents a saved search.""" + + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def acknowledge(self): + """Acknowledges the suppression of alerts from this saved search and + resumes alerting. + + :return: The :class:`SavedSearch`. + """ + self.post("acknowledge") + return self + + @property + def alert_count(self): + """Returns the number of alerts fired by this saved search. + + :return: The number of alerts fired by this saved search. + :rtype: ``integer`` + """ + return int(self._state.content.get('triggered_alert_count', 0)) + + def dispatch(self, **kwargs): + """Runs the saved search and returns the resulting search job. + + :param `kwargs`: Additional dispatch arguments (optional). For details, + see the `POST saved/searches/{name}/dispatch + `_ + endpoint in the REST API documentation. + :type kwargs: ``dict`` + :return: The :class:`Job`. + """ + response = self.post("dispatch", **kwargs) + sid = _load_sid(response, kwargs.get("output_mode", None)) + return Job(self.service, sid) + + @property + def fired_alerts(self): + """Returns the collection of fired alerts (a fired alert group) + corresponding to this saved search's alerts. + + :raises IllegalOperationException: Raised when the search is not scheduled. + + :return: A collection of fired alerts. + :rtype: :class:`AlertGroup` + """ + if self['is_scheduled'] == '0': + raise IllegalOperationException('Unscheduled saved searches have no alerts.') + c = Collection( + self.service, + self.service._abspath(PATH_FIRED_ALERTS + self.name, + owner=self._state.access.owner, + app=self._state.access.app, + sharing=self._state.access.sharing), + item=AlertGroup) + return c + + def history(self, **kwargs): + """Returns a list of search jobs corresponding to this saved search. + + :param `kwargs`: Additional arguments (optional). + :type kwargs: ``dict`` + + :return: A list of :class:`Job` objects. + """ + response = self.get("history", **kwargs) + entries = _load_atom_entries(response) + if entries is None: return [] + jobs = [] + for entry in entries: + job = Job(self.service, entry.title) + jobs.append(job) + return jobs + + def update(self, search=None, **kwargs): + """Updates the server with any changes you've made to the current saved + search along with any additional arguments you specify. + + :param `search`: The search query (optional). + :type search: ``string`` + :param `kwargs`: Additional arguments (optional). For a list of available + parameters, see `Saved search parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`SavedSearch`. + """ + # Updates to a saved search *require* that the search string be + # passed, so we pass the current search string if a value wasn't + # provided by the caller. + if search is None: search = self.content.search + Entity.update(self, search=search, **kwargs) + return self + + def scheduled_times(self, earliest_time='now', latest_time='+1h'): + """Returns the times when this search is scheduled to run. + + By default this method returns the times in the next hour. For different + time ranges, set *earliest_time* and *latest_time*. For example, + for all times in the last day use "earliest_time=-1d" and + "latest_time=now". + + :param earliest_time: The earliest time. + :type earliest_time: ``string`` + :param latest_time: The latest time. + :type latest_time: ``string`` + + :return: The list of search times. + """ + response = self.get("scheduled_times", + earliest_time=earliest_time, + latest_time=latest_time) + data = self._load_atom_entry(response) + rec = _parse_atom_entry(data) + times = [datetime.fromtimestamp(int(t)) + for t in rec.content.scheduled_times] + return times + + def suppress(self, expiration): + """Skips any scheduled runs of this search in the next *expiration* + number of seconds. + + :param expiration: The expiration period, in seconds. + :type expiration: ``integer`` + + :return: The :class:`SavedSearch`. + """ + self.post("suppress", expiration=expiration) + return self + + @property + def suppressed(self): + """Returns the number of seconds that this search is blocked from running + (possibly 0). + + :return: The number of seconds. + :rtype: ``integer`` + """ + r = self._run_action("suppress") + if r.suppressed == "1": + return int(r.expiration) + return 0 + + def unsuppress(self): + """Cancels suppression and makes this search run as scheduled. + + :return: The :class:`SavedSearch`. + """ + self.post("suppress", expiration="0") + return self + + +class SavedSearches(Collection): + """This class represents a collection of saved searches. Retrieve this + collection using :meth:`Service.saved_searches`.""" + + def __init__(self, service): + Collection.__init__( + self, service, PATH_SAVED_SEARCHES, item=SavedSearch) + + def create(self, name, search, **kwargs): + """ Creates a saved search. + + :param name: The name for the saved search. + :type name: ``string`` + :param search: The search query. + :type search: ``string`` + :param kwargs: Additional arguments (optional). For a list of available + parameters, see `Saved search parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + :return: The :class:`SavedSearches` collection. + """ + return Collection.create(self, name, search=search, **kwargs) + + +class Settings(Entity): + """This class represents configuration settings for a Splunk service. + Retrieve this collection using :meth:`Service.settings`.""" + + def __init__(self, service, **kwargs): + Entity.__init__(self, service, "/services/server/settings", **kwargs) + + # Updates on the settings endpoint are POSTed to server/settings/settings. + def update(self, **kwargs): + """Updates the settings on the server using the arguments you provide. + + :param kwargs: Additional arguments. For a list of valid arguments, see + `POST server/settings/{name} + `_ + in the REST API documentation. + :type kwargs: ``dict`` + :return: The :class:`Settings` collection. + """ + self.service.post("/services/server/settings/settings", **kwargs) + return self + + +class User(Entity): + """This class represents a Splunk user. + """ + + @property + def role_entities(self): + """Returns a list of roles assigned to this user. + + :return: The list of roles. + :rtype: ``list`` + """ + all_role_names = [r.name for r in self.service.roles.list()] + return [self.service.roles[name] for name in self.content.roles if name in all_role_names] + + +# Splunk automatically lowercases new user names so we need to match that +# behavior here to ensure that the subsequent member lookup works correctly. +class Users(Collection): + """This class represents the collection of Splunk users for this instance of + Splunk. Retrieve this collection using :meth:`Service.users`. + """ + + def __init__(self, service): + Collection.__init__(self, service, PATH_USERS, item=User) + + def __getitem__(self, key): + return Collection.__getitem__(self, key.lower()) + + def __contains__(self, name): + return Collection.__contains__(self, name.lower()) + + def create(self, username, password, roles, **params): + """Creates a new user. + + This function makes two roundtrips to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param username: The username. + :type username: ``string`` + :param password: The password. + :type password: ``string`` + :param roles: A single role or list of roles for the user. + :type roles: ``string`` or ``list`` + :param params: Additional arguments (optional). For a list of available + parameters, see `User authentication parameters + `_ + on Splunk Developer Portal. + :type params: ``dict`` + + :return: The new user. + :rtype: :class:`User` + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + users = c.users + boris = users.create("boris", "securepassword", roles="user") + hilda = users.create("hilda", "anotherpassword", roles=["user","power"]) + """ + if not isinstance(username, str): + raise ValueError(f"Invalid username: {str(username)}") + username = username.lower() + self.post(name=username, password=password, roles=roles, **params) + # splunkd doesn't return the user in the POST response body, + # so we have to make a second round trip to fetch it. + response = self.get(username) + entry = _load_atom(response, XNAME_ENTRY).entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + parse.unquote(state.links.alternate), + state=state) + return entity + + def delete(self, name): + """ Deletes the user and returns the resulting collection of users. + + :param name: The name of the user to delete. + :type name: ``string`` + + :return: + :rtype: :class:`Users` + """ + return Collection.delete(self, name.lower()) + + +class Role(Entity): + """This class represents a user role. + """ + + def grant(self, *capabilities_to_grant): + """Grants additional capabilities to this role. + + :param capabilities_to_grant: Zero or more capabilities to grant this + role. For a list of capabilities, see + `Capabilities `_ + on Splunk Developer Portal. + :type capabilities_to_grant: ``string`` or ``list`` + :return: The :class:`Role`. + + **Example**:: + + service = client.connect(...) + role = service.roles['somerole'] + role.grant('change_own_password', 'search') + """ + possible_capabilities = self.service.capabilities + for capability in capabilities_to_grant: + if capability not in possible_capabilities: + raise NoSuchCapability(capability) + new_capabilities = self['capabilities'] + list(capabilities_to_grant) + self.post(capabilities=new_capabilities) + return self + + def revoke(self, *capabilities_to_revoke): + """Revokes zero or more capabilities from this role. + + :param capabilities_to_revoke: Zero or more capabilities to grant this + role. For a list of capabilities, see + `Capabilities `_ + on Splunk Developer Portal. + :type capabilities_to_revoke: ``string`` or ``list`` + + :return: The :class:`Role`. + + **Example**:: + + service = client.connect(...) + role = service.roles['somerole'] + role.revoke('change_own_password', 'search') + """ + possible_capabilities = self.service.capabilities + for capability in capabilities_to_revoke: + if capability not in possible_capabilities: + raise NoSuchCapability(capability) + old_capabilities = self['capabilities'] + new_capabilities = [] + for c in old_capabilities: + if c not in capabilities_to_revoke: + new_capabilities.append(c) + if not new_capabilities: + new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. + self.post(capabilities=new_capabilities) + return self + + +class Roles(Collection): + """This class represents the collection of roles in the Splunk instance. + Retrieve this collection using :meth:`Service.roles`.""" + + def __init__(self, service): + Collection.__init__(self, service, PATH_ROLES, item=Role) + + def __getitem__(self, key): + return Collection.__getitem__(self, key.lower()) + + def __contains__(self, name): + return Collection.__contains__(self, name.lower()) + + def create(self, name, **params): + """Creates a new role. + + This function makes two roundtrips to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param name: Name for the role. + :type name: ``string`` + :param params: Additional arguments (optional). For a list of available + parameters, see `Roles parameters + `_ + on Splunk Developer Portal. + :type params: ``dict`` + + :return: The new role. + :rtype: :class:`Role` + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + roles = c.roles + paltry = roles.create("paltry", imported_roles="user", defaultApp="search") + """ + if not isinstance(name, str): + raise ValueError(f"Invalid role name: {str(name)}") + name = name.lower() + self.post(name=name, **params) + # splunkd doesn't return the user in the POST response body, + # so we have to make a second round trip to fetch it. + response = self.get(name) + entry = _load_atom(response, XNAME_ENTRY).entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + parse.unquote(state.links.alternate), + state=state) + return entity + + def delete(self, name): + """ Deletes the role and returns the resulting collection of roles. + + :param name: The name of the role to delete. + :type name: ``string`` + + :rtype: The :class:`Roles` + """ + return Collection.delete(self, name.lower()) + + +class Application(Entity): + """Represents a locally-installed Splunk app.""" + + @property + def setupInfo(self): + """Returns the setup information for the app. + + :return: The setup information. + """ + return self.content.get('eai:setup', None) + + def package(self): + """ Creates a compressed package of the app for archiving.""" + return self._run_action("package") + + def updateInfo(self): + """Returns any update information that is available for the app.""" + return self._run_action("update") + + +class KVStoreCollections(Collection): + def __init__(self, service): + Collection.__init__(self, service, 'storage/collections/config', item=KVStoreCollection) + + def __getitem__(self, item): + res = Collection.__getitem__(self, item) + for k, v in res.content.items(): + if "accelerated_fields" in k: + res.content[k] = json.loads(v) + return res + + def create(self, name, accelerated_fields={}, fields={}, **kwargs): + """Creates a KV Store Collection. + + :param name: name of collection to create + :type name: ``string`` + :param accelerated_fields: dictionary of accelerated_fields definitions + :type accelerated_fields: ``dict`` + :param fields: dictionary of field definitions + :type fields: ``dict`` + :param kwargs: a dictionary of additional parameters specifying indexes and field definitions + :type kwargs: ``dict`` + + :return: Result of POST request + """ + for k, v in accelerated_fields.items(): + if isinstance(v, dict): + v = json.dumps(v) + kwargs['accelerated_fields.' + k] = v + for k, v in fields.items(): + kwargs['field.' + k] = v + return self.post(name=name, **kwargs) + + +class KVStoreCollection(Entity): + @property + def data(self): + """Returns data object for this Collection. + + :rtype: :class:`KVStoreCollectionData` + """ + return KVStoreCollectionData(self) + + def update_accelerated_field(self, name, value): + """Changes the definition of a KV Store accelerated_field. + + :param name: name of accelerated_fields to change + :type name: ``string`` + :param value: new accelerated_fields definition + :type value: ``dict`` + + :return: Result of POST request + """ + kwargs = {} + kwargs['accelerated_fields.' + name] = json.dumps(value) if isinstance(value, dict) else value + return self.post(**kwargs) + + def update_field(self, name, value): + """Changes the definition of a KV Store field. + + :param name: name of field to change + :type name: ``string`` + :param value: new field definition + :type value: ``string`` + + :return: Result of POST request + """ + kwargs = {} + kwargs['field.' + name] = value + return self.post(**kwargs) + + +class KVStoreCollectionData: + """This class represents the data endpoint for a KVStoreCollection. + + Retrieve using :meth:`KVStoreCollection.data` + """ + JSON_HEADER = [('Content-Type', 'application/json')] + + def __init__(self, collection): + self.service = collection.service + self.collection = collection + self.owner, self.app, self.sharing = collection._proper_namespace() + self.path = 'storage/collections/data/' + UrlEncoded(self.collection.name, encode_slash=True) + '/' + + def _get(self, url, **kwargs): + return self.service.get(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def _post(self, url, **kwargs): + return self.service.post(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def _delete(self, url, **kwargs): + return self.service.delete(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def query(self, **query): + """ + Gets the results of query, with optional parameters sort, limit, skip, and fields. + + :param query: Optional parameters. Valid options are sort, limit, skip, and fields + :type query: ``dict`` + + :return: Array of documents retrieved by query. + :rtype: ``array`` + """ + + for key, value in query.items(): + if isinstance(query[key], dict): + query[key] = json.dumps(value) + + return json.loads(self._get('', **query).body.read().decode('utf-8')) + + def query_by_id(self, id): + """ + Returns object with _id = id. + + :param id: Value for ID. If not a string will be coerced to string. + :type id: ``string`` + + :return: Document with id + :rtype: ``dict`` + """ + return json.loads(self._get(UrlEncoded(str(id), encode_slash=True)).body.read().decode('utf-8')) + + def insert(self, data): + """ + Inserts item into this collection. An _id field will be generated if not assigned in the data. + + :param data: Document to insert + :type data: ``string`` + + :return: _id of inserted object + :rtype: ``dict`` + """ + if isinstance(data, dict): + data = json.dumps(data) + return json.loads( + self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + + def delete(self, query=None): + """ + Deletes all data in collection if query is absent. Otherwise, deletes all data matched by query. + + :param query: Query to select documents to delete + :type query: ``string`` + + :return: Result of DELETE request + """ + return self._delete('', **({'query': query}) if query else {}) + + def delete_by_id(self, id): + """ + Deletes document that has _id = id. + + :param id: id of document to delete + :type id: ``string`` + + :return: Result of DELETE request + """ + return self._delete(UrlEncoded(str(id), encode_slash=True)) + + def update(self, id, data): + """ + Replaces document with _id = id with data. + + :param id: _id of document to update + :type id: ``string`` + :param data: the new document to insert + :type data: ``string`` + + :return: id of replaced document + :rtype: ``dict`` + """ + if isinstance(data, dict): + data = json.dumps(data) + return json.loads(self._post(UrlEncoded(str(id), encode_slash=True), headers=KVStoreCollectionData.JSON_HEADER, + body=data).body.read().decode('utf-8')) + + def batch_find(self, *dbqueries): + """ + Returns array of results from queries dbqueries. + + :param dbqueries: Array of individual queries as dictionaries + :type dbqueries: ``array`` of ``dict`` + + :return: Results of each query + :rtype: ``array`` of ``array`` + """ + if len(dbqueries) < 1: + raise Exception('Must have at least one query.') + + data = json.dumps(dbqueries) + + return json.loads( + self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + + def batch_save(self, *documents): + """ + Inserts or updates every document specified in documents. + + :param documents: Array of documents to save as dictionaries + :type documents: ``array`` of ``dict`` + + :return: Results of update operation as overall stats + :rtype: ``dict`` + """ + if len(documents) < 1: + raise Exception('Must have at least one document.') + + data = json.dumps(documents) + + return json.loads( + self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 16999a2a..7e8f771e 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -1,1143 +1,1143 @@ -# coding=utf-8 -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Absolute imports - -import csv -import io -import os -import re -import sys -import tempfile -import traceback -from collections import namedtuple, OrderedDict -from copy import deepcopy -from io import StringIO -from itertools import chain, islice -from logging import _nameToLevel as _levelNames, getLevelName, getLogger -from shutil import make_archive -from time import time -from urllib.parse import unquote -from urllib.parse import urlsplit -from warnings import warn -from xml.etree import ElementTree -from splunklib.utils import ensure_str - - -# Relative imports -import splunklib -from . import Boolean, Option, environment -from .internals import ( - CommandLineParser, - CsvDialect, - InputHeader, - Message, - MetadataDecoder, - MetadataEncoder, - ObjectView, - Recorder, - RecordWriterV1, - RecordWriterV2, - json_encode_string) -from ..client import Service - - -# ---------------------------------------------------------------------------------------------------------------------- - -# P1 [ ] TODO: Log these issues against ChunkedExternProcessor -# -# 1. Implement requires_preop configuration setting. -# This configuration setting is currently rejected by ChunkedExternProcessor. -# -# 2. Rename type=events as type=eventing for symmetry with type=reporting and type=streaming -# Eventing commands process records on the events pipeline. This change effects ChunkedExternProcessor.cpp, -# eventing_command.py, and generating_command.py. -# -# 3. For consistency with SCPV1, commands.conf should not require filename setting when chunked = true -# The SCPV1 processor uses .py as the default filename. The ChunkedExternProcessor should do the same. - -# P1 [ ] TODO: Verify that ChunkedExternProcessor complains if a streaming_preop has a type other than 'streaming' -# It once looked like sending type='reporting' for the streaming_preop was accepted. - -# ---------------------------------------------------------------------------------------------------------------------- - -# P2 [ ] TODO: Consider bumping None formatting up to Option.Item.__str__ - - -class SearchCommand: - """ Represents a custom search command. - - """ - - def __init__(self): - - # Variables that may be used, but not altered by derived classes - - class_name = self.__class__.__name__ - - self._logger, self._logging_configuration = getLogger(class_name), environment.logging_configuration - - # Variables backing option/property values - - self._configuration = self.ConfigurationSettings(self) - self._input_header = InputHeader() - self._fieldnames = None - self._finished = None - self._metadata = None - self._options = None - self._protocol_version = None - self._search_results_info = None - self._service = None - - # Internal variables - - self._default_logging_level = self._logger.level - self._record_writer = None - self._records = None - self._allow_empty_input = True - - def __str__(self): - text = ' '.join(chain((type(self).name, str(self.options)), [] if self.fieldnames is None else self.fieldnames)) - return text - - # region Options - - @Option - def logging_configuration(self): - """ **Syntax:** logging_configuration= - - **Description:** Loads an alternative logging configuration file for - a command invocation. The logging configuration file must be in Python - ConfigParser-format. Path names are relative to the app root directory. - - """ - return self._logging_configuration - - @logging_configuration.setter - def logging_configuration(self, value): - self._logger, self._logging_configuration = environment.configure_logging(self.__class__.__name__, value) - - @Option - def logging_level(self): - """ **Syntax:** logging_level=[CRITICAL|ERROR|WARNING|INFO|DEBUG|NOTSET] - - **Description:** Sets the threshold for the logger of this command invocation. Logging messages less severe than - `logging_level` will be ignored. - - """ - return getLevelName(self._logger.getEffectiveLevel()) - - @logging_level.setter - def logging_level(self, value): - if value is None: - value = self._default_logging_level - if isinstance(value, (bytes, str)): - try: - level = _levelNames[value.upper()] - except KeyError: - raise ValueError(f'Unrecognized logging level: {value}') - else: - try: - level = int(value) - except ValueError: - raise ValueError(f'Unrecognized logging level: {value}') - self._logger.setLevel(level) - - def add_field(self, current_record, field_name, field_value): - self._record_writer.custom_fields.add(field_name) - current_record[field_name] = field_value - - def gen_record(self, **record): - self._record_writer.custom_fields |= set(record.keys()) - return record - - record = Option(doc=''' - **Syntax: record= - - **Description:** When `true`, records the interaction between the command and splunkd. Defaults to `false`. - - ''', default=False, validate=Boolean()) - - show_configuration = Option(doc=''' - **Syntax:** show_configuration= - - **Description:** When `true`, reports command configuration as an informational message. Defaults to `false`. - - ''', default=False, validate=Boolean()) - - # endregion - - # region Properties - - @property - def configuration(self): - """ Returns the configuration settings for this command. - - """ - return self._configuration - - @property - def fieldnames(self): - """ Returns the fieldnames specified as argument to this command. - - """ - return self._fieldnames - - @fieldnames.setter - def fieldnames(self, value): - self._fieldnames = value - - @property - def input_header(self): - """ Returns the input header for this command. - - :return: The input header for this command. - :rtype: InputHeader - - """ - warn( - 'SearchCommand.input_header is deprecated and will be removed in a future release. ' - 'Please use SearchCommand.metadata instead.', DeprecationWarning, 2) - return self._input_header - - @property - def logger(self): - """ Returns the logger for this command. - - :return: The logger for this command. - :rtype: - - """ - return self._logger - - @property - def metadata(self): - return self._metadata - - @property - def options(self): - """ Returns the options specified as argument to this command. - - """ - if self._options is None: - self._options = Option.View(self) - return self._options - - @property - def protocol_version(self): - return self._protocol_version - - @property - def search_results_info(self): - """ Returns the search results info for this command invocation. - - The search results info object is created from the search results info file associated with the command - invocation. - - :return: Search results info:const:`None`, if the search results info file associated with the command - invocation is inaccessible. - :rtype: SearchResultsInfo or NoneType - - """ - if self._search_results_info is not None: - return self._search_results_info - - if self._protocol_version == 1: - try: - path = self._input_header['infoPath'] - except KeyError: - return None - else: - assert self._protocol_version == 2 - - try: - dispatch_dir = self._metadata.searchinfo.dispatch_dir - except AttributeError: - return None - - path = os.path.join(dispatch_dir, 'info.csv') - - try: - with io.open(path, 'r') as f: - reader = csv.reader(f, dialect=CsvDialect) - fields = next(reader) - values = next(reader) - except IOError as error: - if error.errno == 2: - self.logger.error(f'Search results info file {json_encode_string(path)} does not exist.') - return - raise - - def convert_field(field): - return (field[1:] if field[0] == '_' else field).replace('.', '_') - - decode = MetadataDecoder().decode - - def convert_value(value): - try: - return decode(value) if len(value) > 0 else value - except ValueError: - return value - - info = ObjectView(dict((convert_field(f_v[0]), convert_value(f_v[1])) for f_v in zip(fields, values))) - - try: - count_map = info.countMap - except AttributeError: - pass - else: - count_map = count_map.split(';') - n = len(count_map) - info.countMap = dict(list(zip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2)))) - - try: - msg_type = info.msgType - msg_text = info.msg - except AttributeError: - pass - else: - messages = [t_m for t_m in zip(msg_type.split('\n'), msg_text.split('\n')) if t_m[0] or t_m[1]] - info.msg = [Message(message) for message in messages] - del info.msgType - - try: - info.vix_families = ElementTree.fromstring(info.vix_families) - except AttributeError: - pass - - self._search_results_info = info - return info - - @property - def service(self): - """ Returns a Splunk service object for this command invocation or None. - - The service object is created from the Splunkd URI and authentication token passed to the command invocation in - the search results info file. This data is not passed to a command invocation by default. You must request it by - specifying this pair of configuration settings in commands.conf: - - .. code-block:: python - - enableheader = true - requires_srinfo = true - - The :code:`enableheader` setting is :code:`true` by default. Hence, you need not set it. The - :code:`requires_srinfo` setting is false by default. Hence, you must set it. - - :return: :class:`splunklib.client.Service`, if :code:`enableheader` and :code:`requires_srinfo` are both - :code:`true`. Otherwise, if either :code:`enableheader` or :code:`requires_srinfo` are :code:`false`, a value - of :code:`None` is returned. - - """ - if self._service is not None: - return self._service - - metadata = self._metadata - - if metadata is None: - return None - - try: - searchinfo = self._metadata.searchinfo - except AttributeError: - return None - - splunkd_uri = searchinfo.splunkd_uri - - if splunkd_uri is None: - return None - - uri = urlsplit(splunkd_uri, allow_fragments=False) - - self._service = Service( - scheme=uri.scheme, host=uri.hostname, port=uri.port, app=searchinfo.app, token=searchinfo.session_key) - - return self._service - - # endregion - - # region Methods - - def error_exit(self, error, message=None): - self.write_error(error.message if message is None else message) - self.logger.error('Abnormal exit: %s', error) - exit(1) - - def finish(self): - """ Flushes the output buffer and signals that this command has finished processing data. - - :return: :const:`None` - - """ - self._record_writer.flush(finished=True) - - def flush(self): - """ Flushes the output buffer. - - :return: :const:`None` - - """ - self._record_writer.flush(finished=False) - - def prepare(self): - """ Prepare for execution. - - This method should be overridden in search command classes that wish to examine and update their configuration - or option settings prior to execution. It is called during the getinfo exchange before command metadata is sent - to splunkd. - - :return: :const:`None` - :rtype: NoneType - - """ - - def process(self, argv=sys.argv, ifile=sys.stdin, ofile=sys.stdout, allow_empty_input=True): - """ Process data. - - :param argv: Command line arguments. - :type argv: list or tuple - - :param ifile: Input data file. - :type ifile: file - - :param ofile: Output data file. - :type ofile: file - - :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read - :type allow_empty_input: bool - - :return: :const:`None` - :rtype: NoneType - - """ - - self._allow_empty_input = allow_empty_input - - if len(argv) > 1: - self._process_protocol_v1(argv, ifile, ofile) - else: - self._process_protocol_v2(argv, ifile, ofile) - - def _map_input_header(self): - metadata = self._metadata - searchinfo = metadata.searchinfo - self._input_header.update( - allowStream=None, - infoPath=os.path.join(searchinfo.dispatch_dir, 'info.csv'), - keywords=None, - preview=metadata.preview, - realtime=searchinfo.earliest_time != 0 and searchinfo.latest_time != 0, - search=searchinfo.search, - sid=searchinfo.sid, - splunkVersion=searchinfo.splunk_version, - truncated=None) - - def _map_metadata(self, argv): - source = SearchCommand._MetadataSource(argv, self._input_header, self.search_results_info) - - def _map(metadata_map): - metadata = {} - - for name, value in metadata_map.items(): - if isinstance(value, dict): - value = _map(value) - else: - transform, extract = value - if extract is None: - value = None - else: - value = extract(source) - if not (value is None or transform is None): - value = transform(value) - metadata[name] = value - - return ObjectView(metadata) - - self._metadata = _map(SearchCommand._metadata_map) - - _metadata_map = { - 'action': - (lambda v: 'getinfo' if v == '__GETINFO__' else 'execute' if v == '__EXECUTE__' else None, - lambda s: s.argv[1]), - 'preview': - (bool, lambda s: s.input_header.get('preview')), - 'searchinfo': { - 'app': - (lambda v: v.ppc_app, lambda s: s.search_results_info), - 'args': - (None, lambda s: s.argv), - 'dispatch_dir': - (os.path.dirname, lambda s: s.input_header.get('infoPath')), - 'earliest_time': - (lambda v: float(v.rt_earliest) if len(v.rt_earliest) > 0 else 0.0, lambda s: s.search_results_info), - 'latest_time': - (lambda v: float(v.rt_latest) if len(v.rt_latest) > 0 else 0.0, lambda s: s.search_results_info), - 'owner': - (None, None), - 'raw_args': - (None, lambda s: s.argv), - 'search': - (unquote, lambda s: s.input_header.get('search')), - 'session_key': - (lambda v: v.auth_token, lambda s: s.search_results_info), - 'sid': - (None, lambda s: s.input_header.get('sid')), - 'splunk_version': - (None, lambda s: s.input_header.get('splunkVersion')), - 'splunkd_uri': - (lambda v: v.splunkd_uri, lambda s: s.search_results_info), - 'username': - (lambda v: v.ppc_user, lambda s: s.search_results_info)}} - - _MetadataSource = namedtuple('Source', ('argv', 'input_header', 'search_results_info')) - - def _prepare_protocol_v1(self, argv, ifile, ofile): - - debug = environment.splunklib_logger.debug - - # Provide as much context as possible in advance of parsing the command line and preparing for execution - - self._input_header.read(ifile) - self._protocol_version = 1 - self._map_metadata(argv) - - debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) - - try: - tempfile.tempdir = self._metadata.searchinfo.dispatch_dir - except AttributeError: - raise RuntimeError(f'{self.__class__.__name__}.metadata.searchinfo.dispatch_dir is undefined') - - debug(' tempfile.tempdir=%r', tempfile.tempdir) - - CommandLineParser.parse(self, argv[2:]) - self.prepare() - - if self.record: - self.record = False - - record_argv = [argv[0], argv[1], str(self._options), ' '.join(self.fieldnames)] - ifile, ofile = self._prepare_recording(record_argv, ifile, ofile) - self._record_writer.ofile = ofile - ifile.record(str(self._input_header), '\n\n') - - if self.show_configuration: - self.write_info(self.name + ' command configuration: ' + str(self._configuration)) - - return ifile # wrapped, if self.record is True - - def _prepare_recording(self, argv, ifile, ofile): - - # Create the recordings directory, if it doesn't already exist - - recordings = os.path.join(environment.splunk_home, 'var', 'run', 'splunklib.searchcommands', 'recordings') - - if not os.path.isdir(recordings): - os.makedirs(recordings) - - # Create input/output recorders from ifile and ofile - - recording = os.path.join(recordings, self.__class__.__name__ + '-' + repr(time()) + '.' + self._metadata.action) - ifile = Recorder(recording + '.input', ifile) - ofile = Recorder(recording + '.output', ofile) - - # Archive the dispatch directory--if it exists--so that it can be used as a baseline in mocks) - - dispatch_dir = self._metadata.searchinfo.dispatch_dir - - if dispatch_dir is not None: # __GETINFO__ action does not include a dispatch_dir - root_dir, base_dir = os.path.split(dispatch_dir) - make_archive(recording + '.dispatch_dir', 'gztar', root_dir, base_dir, logger=self.logger) - - # Save a splunk command line because it is useful for developing tests - - with open(recording + '.splunk_cmd', 'wb') as f: - f.write('splunk cmd python '.encode()) - f.write(os.path.basename(argv[0]).encode()) - for arg in islice(argv, 1, len(argv)): - f.write(' '.encode()) - f.write(arg.encode()) - - return ifile, ofile - - def _process_protocol_v1(self, argv, ifile, ofile): - - debug = environment.splunklib_logger.debug - class_name = self.__class__.__name__ - - debug('%s.process started under protocol_version=1', class_name) - self._record_writer = RecordWriterV1(ofile) - - # noinspection PyBroadException - try: - if argv[1] == '__GETINFO__': - - debug('Writing configuration settings') - - ifile = self._prepare_protocol_v1(argv, ifile, ofile) - self._record_writer.write_record(dict( - (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in - self._configuration.items())) - self.finish() - - elif argv[1] == '__EXECUTE__': - - debug('Executing') - - ifile = self._prepare_protocol_v1(argv, ifile, ofile) - self._records = self._records_protocol_v1 - self._metadata.action = 'execute' - self._execute(ifile, None) - - else: - message = ( - f'Command {self.name} appears to be statically configured for search command protocol version 1 and static ' - 'configuration is unsupported by splunklib.searchcommands. Please ensure that ' - 'default/commands.conf contains this stanza:\n' - f'[{self.name}]\n' - f'filename = {os.path.basename(argv[0])}\n' - 'enableheader = true\n' - 'outputheader = true\n' - 'requires_srinfo = true\n' - 'supports_getinfo = true\n' - 'supports_multivalues = true\n' - 'supports_rawargs = true') - raise RuntimeError(message) - - except (SyntaxError, ValueError) as error: - self.write_error(str(error)) - self.flush() - exit(0) - - except SystemExit: - self.flush() - raise - - except: - self._report_unexpected_error() - self.flush() - exit(1) - - debug('%s.process finished under protocol_version=1', class_name) - - def _protocol_v2_option_parser(self, arg): - """ Determines if an argument is an Option/Value pair, or just a Positional Argument. - Method so different search commands can handle parsing of arguments differently. - - :param arg: A single argument provided to the command from SPL - :type arg: str - - :return: [OptionName, OptionValue] OR [PositionalArgument] - :rtype: List[str] - - """ - return arg.split('=', 1) - - def _process_protocol_v2(self, argv, ifile, ofile): - """ Processes records on the `input stream optionally writing records to the output stream. - - :param ifile: Input file object. - :type ifile: file or InputType - - :param ofile: Output file object. - :type ofile: file or OutputType - - :return: :const:`None` - - """ - debug = environment.splunklib_logger.debug - class_name = self.__class__.__name__ - - debug('%s.process started under protocol_version=2', class_name) - self._protocol_version = 2 - - # Read search command metadata from splunkd - # noinspection PyBroadException - try: - debug('Reading metadata') - metadata, body = self._read_chunk(self._as_binary_stream(ifile)) - - action = getattr(metadata, 'action', None) - - if action != 'getinfo': - raise RuntimeError(f'Expected getinfo action, not {action}') - - if len(body) > 0: - raise RuntimeError('Did not expect data for getinfo action') - - self._metadata = deepcopy(metadata) - - searchinfo = self._metadata.searchinfo - - searchinfo.earliest_time = float(searchinfo.earliest_time) - searchinfo.latest_time = float(searchinfo.latest_time) - searchinfo.search = unquote(searchinfo.search) - - self._map_input_header() - - debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) - - try: - tempfile.tempdir = self._metadata.searchinfo.dispatch_dir - except AttributeError: - raise RuntimeError(f'{class_name}.metadata.searchinfo.dispatch_dir is undefined') - - debug(' tempfile.tempdir=%r', tempfile.tempdir) - except: - self._record_writer = RecordWriterV2(ofile) - self._report_unexpected_error() - self.finish() - exit(1) - - # Write search command configuration for consumption by splunkd - # noinspection PyBroadException - try: - self._record_writer = RecordWriterV2(ofile, getattr(self._metadata.searchinfo, 'maxresultrows', None)) - self.fieldnames = [] - self.options.reset() - - args = self.metadata.searchinfo.args - error_count = 0 - - debug('Parsing arguments') - - if args and isinstance(args, list): - for arg in args: - result = self._protocol_v2_option_parser(arg) - if len(result) == 1: - self.fieldnames.append(str(result[0])) - else: - name, value = result - name = str(name) - try: - option = self.options[name] - except KeyError: - self.write_error(f'Unrecognized option: {name}={value}') - error_count += 1 - continue - try: - option.value = value - except ValueError: - self.write_error(f'Illegal value: {name}={value}') - error_count += 1 - continue - - missing = self.options.get_missing() - - if missing is not None: - if len(missing) == 1: - self.write_error(f'A value for "{missing[0]}" is required') - else: - self.write_error(f'Values for these required options are missing: {", ".join(missing)}') - error_count += 1 - - if error_count > 0: - exit(1) - - debug(' command: %s', str(self)) - - debug('Preparing for execution') - self.prepare() - - if self.record: - - ifile, ofile = self._prepare_recording(argv, ifile, ofile) - self._record_writer.ofile = ofile - - # Record the metadata that initiated this command after removing the record option from args/raw_args - - info = self._metadata.searchinfo - - for attr in 'args', 'raw_args': - setattr(info, attr, [arg for arg in getattr(info, attr) if not arg.startswith('record=')]) - - metadata = MetadataEncoder().encode(self._metadata) - ifile.record('chunked 1.0,', str(len(metadata)), ',0\n', metadata) - - if self.show_configuration: - self.write_info(self.name + ' command configuration: ' + str(self._configuration)) - - debug(' command configuration: %s', self._configuration) - - except SystemExit: - self._record_writer.write_metadata(self._configuration) - self.finish() - raise - except: - self._record_writer.write_metadata(self._configuration) - self._report_unexpected_error() - self.finish() - exit(1) - - self._record_writer.write_metadata(self._configuration) - - # Execute search command on data passing through the pipeline - # noinspection PyBroadException - try: - debug('Executing under protocol_version=2') - self._metadata.action = 'execute' - self._execute(ifile, None) - except SystemExit: - self.finish() - raise - except: - self._report_unexpected_error() - self.finish() - exit(1) - - debug('%s.process completed', class_name) - - def write_debug(self, message, *args): - self._record_writer.write_message('DEBUG', message, *args) - - def write_error(self, message, *args): - self._record_writer.write_message('ERROR', message, *args) - - def write_fatal(self, message, *args): - self._record_writer.write_message('FATAL', message, *args) - - def write_info(self, message, *args): - self._record_writer.write_message('INFO', message, *args) - - def write_warning(self, message, *args): - self._record_writer.write_message('WARN', message, *args) - - def write_metric(self, name, value): - """ Writes a metric that will be added to the search inspector. - - :param name: Name of the metric. - :type name: basestring - - :param value: A 4-tuple containing the value of metric ``name`` where - - value[0] = Elapsed seconds or :const:`None`. - value[1] = Number of invocations or :const:`None`. - value[2] = Input count or :const:`None`. - value[3] = Output count or :const:`None`. - - The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. - The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. - - :return: :const:`None`. - - """ - self._record_writer.write_metric(name, value) - - # P2 [ ] TODO: Support custom inspector values - - @staticmethod - def _decode_list(mv): - return [match.replace('$$', '$') for match in SearchCommand._encoded_value.findall(mv)] - - _encoded_value = re.compile(r'\$(?P(?:\$\$|[^$])*)\$(?:;|$)') # matches a single value in an encoded list - - # Note: Subclasses must override this method so that it can be called - # called as self._execute(ifile, None) - def _execute(self, ifile, process): - """ Default processing loop - - :param ifile: Input file object. - :type ifile: file - - :param process: Bound method to call in processing loop. - :type process: instancemethod - - :return: :const:`None`. - :rtype: NoneType - - """ - if self.protocol_version == 1: - self._record_writer.write_records(process(self._records(ifile))) - self.finish() - else: - assert self._protocol_version == 2 - self._execute_v2(ifile, process) - - @staticmethod - def _as_binary_stream(ifile): - naught = ifile.read(0) - if isinstance(naught, bytes): - return ifile - - try: - return ifile.buffer - except AttributeError as error: - raise RuntimeError(f'Failed to get underlying buffer: {error}') - - @staticmethod - def _read_chunk(istream): - # noinspection PyBroadException - assert isinstance(istream.read(0), bytes), 'Stream must be binary' - - try: - header = istream.readline() - except Exception as error: - raise RuntimeError(f'Failed to read transport header: {error}') - - if not header: - return None - - match = SearchCommand._header.match(ensure_str(header)) - - if match is None: - raise RuntimeError(f'Failed to parse transport header: {header}') - - metadata_length, body_length = match.groups() - metadata_length = int(metadata_length) - body_length = int(body_length) - - try: - metadata = istream.read(metadata_length) - except Exception as error: - raise RuntimeError(f'Failed to read metadata of length {metadata_length}: {error}') - - decoder = MetadataDecoder() - - try: - metadata = decoder.decode(ensure_str(metadata)) - except Exception as error: - raise RuntimeError(f'Failed to parse metadata of length {metadata_length}: {error}') - - # if body_length <= 0: - # return metadata, '' - - body = "" - try: - if body_length > 0: - body = istream.read(body_length) - except Exception as error: - raise RuntimeError(f'Failed to read body of length {body_length}: {error}') - - return metadata, ensure_str(body,errors="replace") - - _header = re.compile(r'chunked\s+1.0\s*,\s*(\d+)\s*,\s*(\d+)\s*\n') - - def _records_protocol_v1(self, ifile): - return self._read_csv_records(ifile) - - def _read_csv_records(self, ifile): - reader = csv.reader(ifile, dialect=CsvDialect) - - try: - fieldnames = next(reader) - except StopIteration: - return - - mv_fieldnames = dict((name, name[len('__mv_'):]) for name in fieldnames if name.startswith('__mv_')) - - if len(mv_fieldnames) == 0: - for values in reader: - yield OrderedDict(list(zip(fieldnames, values))) - return - - for values in reader: - record = OrderedDict() - for fieldname, value in zip(fieldnames, values): - if fieldname.startswith('__mv_'): - if len(value) > 0: - record[mv_fieldnames[fieldname]] = self._decode_list(value) - elif fieldname not in record: - record[fieldname] = value - yield record - - def _execute_v2(self, ifile, process): - istream = self._as_binary_stream(ifile) - - while True: - result = self._read_chunk(istream) - - if not result: - return - - metadata, body = result - action = getattr(metadata, 'action', None) - if action != 'execute': - raise RuntimeError(f'Expected execute action, not {action}') - - self._finished = getattr(metadata, 'finished', False) - self._record_writer.is_flushed = False - self._metadata.update(metadata) - self._execute_chunk_v2(process, result) - - self._record_writer.write_chunk(finished=self._finished) - - def _execute_chunk_v2(self, process, chunk): - metadata, body = chunk - - if len(body) <= 0 and not self._allow_empty_input: - raise ValueError( - "No records found to process. Set allow_empty_input=True in dispatch function to move forward " - "with empty records.") - - records = self._read_csv_records(StringIO(body)) - self._record_writer.write_records(process(records)) - - def _report_unexpected_error(self): - - error_type, error, tb = sys.exc_info() - origin = tb - - while origin.tb_next is not None: - origin = origin.tb_next - - filename = origin.tb_frame.f_code.co_filename - lineno = origin.tb_lineno - message = f'{error_type.__name__} at "{filename}", line {str(lineno)} : {error}' - - environment.splunklib_logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) - self.write_error(message) - - # endregion - - # region Types - - class ConfigurationSettings: - """ Represents the configuration settings common to all :class:`SearchCommand` classes. - - """ - - def __init__(self, command): - self.command = command - - def __repr__(self): - """ Converts the value of this instance to its string representation. - - The value of this ConfigurationSettings instance is represented as a string of comma-separated - :code:`(name, value)` pairs. - - :return: String representation of this instance - - """ - definitions = type(self).configuration_setting_definitions - settings = [repr((setting.name, setting.__get__(self), setting.supporting_protocols)) for setting in - definitions] - return '[' + ', '.join(settings) + ']' - - def __str__(self): - """ Converts the value of this instance to its string representation. - - The value of this ConfigurationSettings instance is represented as a string of comma-separated - :code:`name=value` pairs. Items with values of :const:`None` are filtered from the list. - - :return: String representation of this instance - - """ - # text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) - text = ', '.join([f'{name}={json_encode_string(str(value))}' for (name, value) in self.items()]) - return text - - # region Methods - - @classmethod - def fix_up(cls, command_class): - """ Adjusts and checks this class and its search command class. - - Derived classes typically override this method. It is used by the :decorator:`Configuration` decorator to - fix up the :class:`SearchCommand` class it adorns. This method is overridden by :class:`EventingCommand`, - :class:`GeneratingCommand`, :class:`ReportingCommand`, and :class:`StreamingCommand`, the base types for - all other search commands. - - :param command_class: Command class targeted by this class - - """ - return - - # TODO: Stop looking like a dictionary because we don't obey the semantics - # N.B.: Does not use Python 2 dict copy semantics - def iteritems(self): - definitions = type(self).configuration_setting_definitions - version = self.command.protocol_version - return [name_value1 for name_value1 in [(setting.name, setting.__get__(self)) for setting in - [setting for setting in definitions if - setting.is_supported_by_protocol(version)]] if - name_value1[1] is not None] - - # N.B.: Does not use Python 3 dict view semantics - - items = iteritems - - # endregion - - # endregion - - -SearchMetric = namedtuple('SearchMetric', ('elapsed_seconds', 'invocation_count', 'input_count', 'output_count')) - - -def dispatch(command_class, argv=sys.argv, input_file=sys.stdin, output_file=sys.stdout, module_name=None, - allow_empty_input=True): - """ Instantiates and executes a search command class - - This function implements a `conditional script stanza `_ based on the value of - :code:`module_name`:: - - if module_name is None or module_name == '__main__': - # execute command - - Call this function at module scope with :code:`module_name=__name__`, if you would like your module to act as either - a reusable module or a standalone program. Otherwise, if you wish this function to unconditionally instantiate and - execute :code:`command_class`, pass :const:`None` as the value of :code:`module_name`. - - :param command_class: Search command class to instantiate and execute. - :type command_class: type - :param argv: List of arguments to the command. - :type argv: list or tuple - :param input_file: File from which the command will read data. - :type input_file: :code:`file` - :param output_file: File to which the command will write data. - :type output_file: :code:`file` - :param module_name: Name of the module calling :code:`dispatch` or :const:`None`. - :type module_name: :code:`basestring` - :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read - :type allow_empty_input: bool - :returns: :const:`None` - - **Example** - - .. code-block:: python - :linenos: - - #!/usr/bin/env python - from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators - @Configuration() - class SomeStreamingCommand(StreamingCommand): - ... - def stream(records): - ... - dispatch(SomeStreamingCommand, module_name=__name__) - - Dispatches the :code:`SomeStreamingCommand`, if and only if :code:`__name__` is equal to :code:`'__main__'`. - - **Example** - - .. code-block:: python - :linenos: - - from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators - @Configuration() - class SomeStreamingCommand(StreamingCommand): - ... - def stream(records): - ... - dispatch(SomeStreamingCommand) - - Unconditionally dispatches :code:`SomeStreamingCommand`. - - """ - assert issubclass(command_class, SearchCommand) - - if module_name is None or module_name == '__main__': - command_class().process(argv, input_file, output_file, allow_empty_input) +# coding=utf-8 +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Absolute imports + +import csv +import io +import os +import re +import sys +import tempfile +import traceback +from collections import namedtuple, OrderedDict +from copy import deepcopy +from io import StringIO +from itertools import chain, islice +from logging import _nameToLevel as _levelNames, getLevelName, getLogger +from shutil import make_archive +from time import time +from urllib.parse import unquote +from urllib.parse import urlsplit +from warnings import warn +from xml.etree import ElementTree +from splunklib.utils import ensure_str + + +# Relative imports +import splunklib +from . import Boolean, Option, environment +from .internals import ( + CommandLineParser, + CsvDialect, + InputHeader, + Message, + MetadataDecoder, + MetadataEncoder, + ObjectView, + Recorder, + RecordWriterV1, + RecordWriterV2, + json_encode_string) +from ..client import Service + + +# ---------------------------------------------------------------------------------------------------------------------- + +# P1 [ ] TODO: Log these issues against ChunkedExternProcessor +# +# 1. Implement requires_preop configuration setting. +# This configuration setting is currently rejected by ChunkedExternProcessor. +# +# 2. Rename type=events as type=eventing for symmetry with type=reporting and type=streaming +# Eventing commands process records on the events pipeline. This change effects ChunkedExternProcessor.cpp, +# eventing_command.py, and generating_command.py. +# +# 3. For consistency with SCPV1, commands.conf should not require filename setting when chunked = true +# The SCPV1 processor uses .py as the default filename. The ChunkedExternProcessor should do the same. + +# P1 [ ] TODO: Verify that ChunkedExternProcessor complains if a streaming_preop has a type other than 'streaming' +# It once looked like sending type='reporting' for the streaming_preop was accepted. + +# ---------------------------------------------------------------------------------------------------------------------- + +# P2 [ ] TODO: Consider bumping None formatting up to Option.Item.__str__ + + +class SearchCommand: + """ Represents a custom search command. + + """ + + def __init__(self): + + # Variables that may be used, but not altered by derived classes + + class_name = self.__class__.__name__ + + self._logger, self._logging_configuration = getLogger(class_name), environment.logging_configuration + + # Variables backing option/property values + + self._configuration = self.ConfigurationSettings(self) + self._input_header = InputHeader() + self._fieldnames = None + self._finished = None + self._metadata = None + self._options = None + self._protocol_version = None + self._search_results_info = None + self._service = None + + # Internal variables + + self._default_logging_level = self._logger.level + self._record_writer = None + self._records = None + self._allow_empty_input = True + + def __str__(self): + text = ' '.join(chain((type(self).name, str(self.options)), [] if self.fieldnames is None else self.fieldnames)) + return text + + # region Options + + @Option + def logging_configuration(self): + """ **Syntax:** logging_configuration= + + **Description:** Loads an alternative logging configuration file for + a command invocation. The logging configuration file must be in Python + ConfigParser-format. Path names are relative to the app root directory. + + """ + return self._logging_configuration + + @logging_configuration.setter + def logging_configuration(self, value): + self._logger, self._logging_configuration = environment.configure_logging(self.__class__.__name__, value) + + @Option + def logging_level(self): + """ **Syntax:** logging_level=[CRITICAL|ERROR|WARNING|INFO|DEBUG|NOTSET] + + **Description:** Sets the threshold for the logger of this command invocation. Logging messages less severe than + `logging_level` will be ignored. + + """ + return getLevelName(self._logger.getEffectiveLevel()) + + @logging_level.setter + def logging_level(self, value): + if value is None: + value = self._default_logging_level + if isinstance(value, (bytes, str)): + try: + level = _levelNames[value.upper()] + except KeyError: + raise ValueError(f'Unrecognized logging level: {value}') + else: + try: + level = int(value) + except ValueError: + raise ValueError(f'Unrecognized logging level: {value}') + self._logger.setLevel(level) + + def add_field(self, current_record, field_name, field_value): + self._record_writer.custom_fields.add(field_name) + current_record[field_name] = field_value + + def gen_record(self, **record): + self._record_writer.custom_fields |= set(record.keys()) + return record + + record = Option(doc=''' + **Syntax: record= + + **Description:** When `true`, records the interaction between the command and splunkd. Defaults to `false`. + + ''', default=False, validate=Boolean()) + + show_configuration = Option(doc=''' + **Syntax:** show_configuration= + + **Description:** When `true`, reports command configuration as an informational message. Defaults to `false`. + + ''', default=False, validate=Boolean()) + + # endregion + + # region Properties + + @property + def configuration(self): + """ Returns the configuration settings for this command. + + """ + return self._configuration + + @property + def fieldnames(self): + """ Returns the fieldnames specified as argument to this command. + + """ + return self._fieldnames + + @fieldnames.setter + def fieldnames(self, value): + self._fieldnames = value + + @property + def input_header(self): + """ Returns the input header for this command. + + :return: The input header for this command. + :rtype: InputHeader + + """ + warn( + 'SearchCommand.input_header is deprecated and will be removed in a future release. ' + 'Please use SearchCommand.metadata instead.', DeprecationWarning, 2) + return self._input_header + + @property + def logger(self): + """ Returns the logger for this command. + + :return: The logger for this command. + :rtype: + + """ + return self._logger + + @property + def metadata(self): + return self._metadata + + @property + def options(self): + """ Returns the options specified as argument to this command. + + """ + if self._options is None: + self._options = Option.View(self) + return self._options + + @property + def protocol_version(self): + return self._protocol_version + + @property + def search_results_info(self): + """ Returns the search results info for this command invocation. + + The search results info object is created from the search results info file associated with the command + invocation. + + :return: Search results info:const:`None`, if the search results info file associated with the command + invocation is inaccessible. + :rtype: SearchResultsInfo or NoneType + + """ + if self._search_results_info is not None: + return self._search_results_info + + if self._protocol_version == 1: + try: + path = self._input_header['infoPath'] + except KeyError: + return None + else: + assert self._protocol_version == 2 + + try: + dispatch_dir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + return None + + path = os.path.join(dispatch_dir, 'info.csv') + + try: + with io.open(path, 'r') as f: + reader = csv.reader(f, dialect=CsvDialect) + fields = next(reader) + values = next(reader) + except IOError as error: + if error.errno == 2: + self.logger.error(f'Search results info file {json_encode_string(path)} does not exist.') + return + raise + + def convert_field(field): + return (field[1:] if field[0] == '_' else field).replace('.', '_') + + decode = MetadataDecoder().decode + + def convert_value(value): + try: + return decode(value) if len(value) > 0 else value + except ValueError: + return value + + info = ObjectView(dict((convert_field(f_v[0]), convert_value(f_v[1])) for f_v in zip(fields, values))) + + try: + count_map = info.countMap + except AttributeError: + pass + else: + count_map = count_map.split(';') + n = len(count_map) + info.countMap = dict(list(zip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2)))) + + try: + msg_type = info.msgType + msg_text = info.msg + except AttributeError: + pass + else: + messages = [t_m for t_m in zip(msg_type.split('\n'), msg_text.split('\n')) if t_m[0] or t_m[1]] + info.msg = [Message(message) for message in messages] + del info.msgType + + try: + info.vix_families = ElementTree.fromstring(info.vix_families) + except AttributeError: + pass + + self._search_results_info = info + return info + + @property + def service(self): + """ Returns a Splunk service object for this command invocation or None. + + The service object is created from the Splunkd URI and authentication token passed to the command invocation in + the search results info file. This data is not passed to a command invocation by default. You must request it by + specifying this pair of configuration settings in commands.conf: + + .. code-block:: python + + enableheader = true + requires_srinfo = true + + The :code:`enableheader` setting is :code:`true` by default. Hence, you need not set it. The + :code:`requires_srinfo` setting is false by default. Hence, you must set it. + + :return: :class:`splunklib.client.Service`, if :code:`enableheader` and :code:`requires_srinfo` are both + :code:`true`. Otherwise, if either :code:`enableheader` or :code:`requires_srinfo` are :code:`false`, a value + of :code:`None` is returned. + + """ + if self._service is not None: + return self._service + + metadata = self._metadata + + if metadata is None: + return None + + try: + searchinfo = self._metadata.searchinfo + except AttributeError: + return None + + splunkd_uri = searchinfo.splunkd_uri + + if splunkd_uri is None: + return None + + uri = urlsplit(splunkd_uri, allow_fragments=False) + + self._service = Service( + scheme=uri.scheme, host=uri.hostname, port=uri.port, app=searchinfo.app, token=searchinfo.session_key) + + return self._service + + # endregion + + # region Methods + + def error_exit(self, error, message=None): + self.write_error(error.message if message is None else message) + self.logger.error('Abnormal exit: %s', error) + exit(1) + + def finish(self): + """ Flushes the output buffer and signals that this command has finished processing data. + + :return: :const:`None` + + """ + self._record_writer.flush(finished=True) + + def flush(self): + """ Flushes the output buffer. + + :return: :const:`None` + + """ + self._record_writer.flush(finished=False) + + def prepare(self): + """ Prepare for execution. + + This method should be overridden in search command classes that wish to examine and update their configuration + or option settings prior to execution. It is called during the getinfo exchange before command metadata is sent + to splunkd. + + :return: :const:`None` + :rtype: NoneType + + """ + + def process(self, argv=sys.argv, ifile=sys.stdin, ofile=sys.stdout, allow_empty_input=True): + """ Process data. + + :param argv: Command line arguments. + :type argv: list or tuple + + :param ifile: Input data file. + :type ifile: file + + :param ofile: Output data file. + :type ofile: file + + :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read + :type allow_empty_input: bool + + :return: :const:`None` + :rtype: NoneType + + """ + + self._allow_empty_input = allow_empty_input + + if len(argv) > 1: + self._process_protocol_v1(argv, ifile, ofile) + else: + self._process_protocol_v2(argv, ifile, ofile) + + def _map_input_header(self): + metadata = self._metadata + searchinfo = metadata.searchinfo + self._input_header.update( + allowStream=None, + infoPath=os.path.join(searchinfo.dispatch_dir, 'info.csv'), + keywords=None, + preview=metadata.preview, + realtime=searchinfo.earliest_time != 0 and searchinfo.latest_time != 0, + search=searchinfo.search, + sid=searchinfo.sid, + splunkVersion=searchinfo.splunk_version, + truncated=None) + + def _map_metadata(self, argv): + source = SearchCommand._MetadataSource(argv, self._input_header, self.search_results_info) + + def _map(metadata_map): + metadata = {} + + for name, value in metadata_map.items(): + if isinstance(value, dict): + value = _map(value) + else: + transform, extract = value + if extract is None: + value = None + else: + value = extract(source) + if not (value is None or transform is None): + value = transform(value) + metadata[name] = value + + return ObjectView(metadata) + + self._metadata = _map(SearchCommand._metadata_map) + + _metadata_map = { + 'action': + (lambda v: 'getinfo' if v == '__GETINFO__' else 'execute' if v == '__EXECUTE__' else None, + lambda s: s.argv[1]), + 'preview': + (bool, lambda s: s.input_header.get('preview')), + 'searchinfo': { + 'app': + (lambda v: v.ppc_app, lambda s: s.search_results_info), + 'args': + (None, lambda s: s.argv), + 'dispatch_dir': + (os.path.dirname, lambda s: s.input_header.get('infoPath')), + 'earliest_time': + (lambda v: float(v.rt_earliest) if len(v.rt_earliest) > 0 else 0.0, lambda s: s.search_results_info), + 'latest_time': + (lambda v: float(v.rt_latest) if len(v.rt_latest) > 0 else 0.0, lambda s: s.search_results_info), + 'owner': + (None, None), + 'raw_args': + (None, lambda s: s.argv), + 'search': + (unquote, lambda s: s.input_header.get('search')), + 'session_key': + (lambda v: v.auth_token, lambda s: s.search_results_info), + 'sid': + (None, lambda s: s.input_header.get('sid')), + 'splunk_version': + (None, lambda s: s.input_header.get('splunkVersion')), + 'splunkd_uri': + (lambda v: v.splunkd_uri, lambda s: s.search_results_info), + 'username': + (lambda v: v.ppc_user, lambda s: s.search_results_info)}} + + _MetadataSource = namedtuple('Source', ('argv', 'input_header', 'search_results_info')) + + def _prepare_protocol_v1(self, argv, ifile, ofile): + + debug = environment.splunklib_logger.debug + + # Provide as much context as possible in advance of parsing the command line and preparing for execution + + self._input_header.read(ifile) + self._protocol_version = 1 + self._map_metadata(argv) + + debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) + + try: + tempfile.tempdir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + raise RuntimeError(f'{self.__class__.__name__}.metadata.searchinfo.dispatch_dir is undefined') + + debug(' tempfile.tempdir=%r', tempfile.tempdir) + + CommandLineParser.parse(self, argv[2:]) + self.prepare() + + if self.record: + self.record = False + + record_argv = [argv[0], argv[1], str(self._options), ' '.join(self.fieldnames)] + ifile, ofile = self._prepare_recording(record_argv, ifile, ofile) + self._record_writer.ofile = ofile + ifile.record(str(self._input_header), '\n\n') + + if self.show_configuration: + self.write_info(self.name + ' command configuration: ' + str(self._configuration)) + + return ifile # wrapped, if self.record is True + + def _prepare_recording(self, argv, ifile, ofile): + + # Create the recordings directory, if it doesn't already exist + + recordings = os.path.join(environment.splunk_home, 'var', 'run', 'splunklib.searchcommands', 'recordings') + + if not os.path.isdir(recordings): + os.makedirs(recordings) + + # Create input/output recorders from ifile and ofile + + recording = os.path.join(recordings, self.__class__.__name__ + '-' + repr(time()) + '.' + self._metadata.action) + ifile = Recorder(recording + '.input', ifile) + ofile = Recorder(recording + '.output', ofile) + + # Archive the dispatch directory--if it exists--so that it can be used as a baseline in mocks) + + dispatch_dir = self._metadata.searchinfo.dispatch_dir + + if dispatch_dir is not None: # __GETINFO__ action does not include a dispatch_dir + root_dir, base_dir = os.path.split(dispatch_dir) + make_archive(recording + '.dispatch_dir', 'gztar', root_dir, base_dir, logger=self.logger) + + # Save a splunk command line because it is useful for developing tests + + with open(recording + '.splunk_cmd', 'wb') as f: + f.write('splunk cmd python '.encode()) + f.write(os.path.basename(argv[0]).encode()) + for arg in islice(argv, 1, len(argv)): + f.write(' '.encode()) + f.write(arg.encode()) + + return ifile, ofile + + def _process_protocol_v1(self, argv, ifile, ofile): + + debug = environment.splunklib_logger.debug + class_name = self.__class__.__name__ + + debug('%s.process started under protocol_version=1', class_name) + self._record_writer = RecordWriterV1(ofile) + + # noinspection PyBroadException + try: + if argv[1] == '__GETINFO__': + + debug('Writing configuration settings') + + ifile = self._prepare_protocol_v1(argv, ifile, ofile) + self._record_writer.write_record(dict( + (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in + self._configuration.items())) + self.finish() + + elif argv[1] == '__EXECUTE__': + + debug('Executing') + + ifile = self._prepare_protocol_v1(argv, ifile, ofile) + self._records = self._records_protocol_v1 + self._metadata.action = 'execute' + self._execute(ifile, None) + + else: + message = ( + f'Command {self.name} appears to be statically configured for search command protocol version 1 and static ' + 'configuration is unsupported by splunklib.searchcommands. Please ensure that ' + 'default/commands.conf contains this stanza:\n' + f'[{self.name}]\n' + f'filename = {os.path.basename(argv[0])}\n' + 'enableheader = true\n' + 'outputheader = true\n' + 'requires_srinfo = true\n' + 'supports_getinfo = true\n' + 'supports_multivalues = true\n' + 'supports_rawargs = true') + raise RuntimeError(message) + + except (SyntaxError, ValueError) as error: + self.write_error(str(error)) + self.flush() + exit(0) + + except SystemExit: + self.flush() + raise + + except: + self._report_unexpected_error() + self.flush() + exit(1) + + debug('%s.process finished under protocol_version=1', class_name) + + def _protocol_v2_option_parser(self, arg): + """ Determines if an argument is an Option/Value pair, or just a Positional Argument. + Method so different search commands can handle parsing of arguments differently. + + :param arg: A single argument provided to the command from SPL + :type arg: str + + :return: [OptionName, OptionValue] OR [PositionalArgument] + :rtype: List[str] + + """ + return arg.split('=', 1) + + def _process_protocol_v2(self, argv, ifile, ofile): + """ Processes records on the `input stream optionally writing records to the output stream. + + :param ifile: Input file object. + :type ifile: file or InputType + + :param ofile: Output file object. + :type ofile: file or OutputType + + :return: :const:`None` + + """ + debug = environment.splunklib_logger.debug + class_name = self.__class__.__name__ + + debug('%s.process started under protocol_version=2', class_name) + self._protocol_version = 2 + + # Read search command metadata from splunkd + # noinspection PyBroadException + try: + debug('Reading metadata') + metadata, body = self._read_chunk(self._as_binary_stream(ifile)) + + action = getattr(metadata, 'action', None) + + if action != 'getinfo': + raise RuntimeError(f'Expected getinfo action, not {action}') + + if len(body) > 0: + raise RuntimeError('Did not expect data for getinfo action') + + self._metadata = deepcopy(metadata) + + searchinfo = self._metadata.searchinfo + + searchinfo.earliest_time = float(searchinfo.earliest_time) + searchinfo.latest_time = float(searchinfo.latest_time) + searchinfo.search = unquote(searchinfo.search) + + self._map_input_header() + + debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) + + try: + tempfile.tempdir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + raise RuntimeError(f'{class_name}.metadata.searchinfo.dispatch_dir is undefined') + + debug(' tempfile.tempdir=%r', tempfile.tempdir) + except: + self._record_writer = RecordWriterV2(ofile) + self._report_unexpected_error() + self.finish() + exit(1) + + # Write search command configuration for consumption by splunkd + # noinspection PyBroadException + try: + self._record_writer = RecordWriterV2(ofile, getattr(self._metadata.searchinfo, 'maxresultrows', None)) + self.fieldnames = [] + self.options.reset() + + args = self.metadata.searchinfo.args + error_count = 0 + + debug('Parsing arguments') + + if args and isinstance(args, list): + for arg in args: + result = self._protocol_v2_option_parser(arg) + if len(result) == 1: + self.fieldnames.append(str(result[0])) + else: + name, value = result + name = str(name) + try: + option = self.options[name] + except KeyError: + self.write_error(f'Unrecognized option: {name}={value}') + error_count += 1 + continue + try: + option.value = value + except ValueError: + self.write_error(f'Illegal value: {name}={value}') + error_count += 1 + continue + + missing = self.options.get_missing() + + if missing is not None: + if len(missing) == 1: + self.write_error(f'A value for "{missing[0]}" is required') + else: + self.write_error(f'Values for these required options are missing: {", ".join(missing)}') + error_count += 1 + + if error_count > 0: + exit(1) + + debug(' command: %s', str(self)) + + debug('Preparing for execution') + self.prepare() + + if self.record: + + ifile, ofile = self._prepare_recording(argv, ifile, ofile) + self._record_writer.ofile = ofile + + # Record the metadata that initiated this command after removing the record option from args/raw_args + + info = self._metadata.searchinfo + + for attr in 'args', 'raw_args': + setattr(info, attr, [arg for arg in getattr(info, attr) if not arg.startswith('record=')]) + + metadata = MetadataEncoder().encode(self._metadata) + ifile.record('chunked 1.0,', str(len(metadata)), ',0\n', metadata) + + if self.show_configuration: + self.write_info(self.name + ' command configuration: ' + str(self._configuration)) + + debug(' command configuration: %s', self._configuration) + + except SystemExit: + self._record_writer.write_metadata(self._configuration) + self.finish() + raise + except: + self._record_writer.write_metadata(self._configuration) + self._report_unexpected_error() + self.finish() + exit(1) + + self._record_writer.write_metadata(self._configuration) + + # Execute search command on data passing through the pipeline + # noinspection PyBroadException + try: + debug('Executing under protocol_version=2') + self._metadata.action = 'execute' + self._execute(ifile, None) + except SystemExit: + self.finish() + raise + except: + self._report_unexpected_error() + self.finish() + exit(1) + + debug('%s.process completed', class_name) + + def write_debug(self, message, *args): + self._record_writer.write_message('DEBUG', message, *args) + + def write_error(self, message, *args): + self._record_writer.write_message('ERROR', message, *args) + + def write_fatal(self, message, *args): + self._record_writer.write_message('FATAL', message, *args) + + def write_info(self, message, *args): + self._record_writer.write_message('INFO', message, *args) + + def write_warning(self, message, *args): + self._record_writer.write_message('WARN', message, *args) + + def write_metric(self, name, value): + """ Writes a metric that will be added to the search inspector. + + :param name: Name of the metric. + :type name: basestring + + :param value: A 4-tuple containing the value of metric ``name`` where + + value[0] = Elapsed seconds or :const:`None`. + value[1] = Number of invocations or :const:`None`. + value[2] = Input count or :const:`None`. + value[3] = Output count or :const:`None`. + + The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. + The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. + + :return: :const:`None`. + + """ + self._record_writer.write_metric(name, value) + + # P2 [ ] TODO: Support custom inspector values + + @staticmethod + def _decode_list(mv): + return [match.replace('$$', '$') for match in SearchCommand._encoded_value.findall(mv)] + + _encoded_value = re.compile(r'\$(?P(?:\$\$|[^$])*)\$(?:;|$)') # matches a single value in an encoded list + + # Note: Subclasses must override this method so that it can be called + # called as self._execute(ifile, None) + def _execute(self, ifile, process): + """ Default processing loop + + :param ifile: Input file object. + :type ifile: file + + :param process: Bound method to call in processing loop. + :type process: instancemethod + + :return: :const:`None`. + :rtype: NoneType + + """ + if self.protocol_version == 1: + self._record_writer.write_records(process(self._records(ifile))) + self.finish() + else: + assert self._protocol_version == 2 + self._execute_v2(ifile, process) + + @staticmethod + def _as_binary_stream(ifile): + naught = ifile.read(0) + if isinstance(naught, bytes): + return ifile + + try: + return ifile.buffer + except AttributeError as error: + raise RuntimeError(f'Failed to get underlying buffer: {error}') + + @staticmethod + def _read_chunk(istream): + # noinspection PyBroadException + assert isinstance(istream.read(0), bytes), 'Stream must be binary' + + try: + header = istream.readline() + except Exception as error: + raise RuntimeError(f'Failed to read transport header: {error}') + + if not header: + return None + + match = SearchCommand._header.match(ensure_str(header)) + + if match is None: + raise RuntimeError(f'Failed to parse transport header: {header}') + + metadata_length, body_length = match.groups() + metadata_length = int(metadata_length) + body_length = int(body_length) + + try: + metadata = istream.read(metadata_length) + except Exception as error: + raise RuntimeError(f'Failed to read metadata of length {metadata_length}: {error}') + + decoder = MetadataDecoder() + + try: + metadata = decoder.decode(ensure_str(metadata)) + except Exception as error: + raise RuntimeError(f'Failed to parse metadata of length {metadata_length}: {error}') + + # if body_length <= 0: + # return metadata, '' + + body = "" + try: + if body_length > 0: + body = istream.read(body_length) + except Exception as error: + raise RuntimeError(f'Failed to read body of length {body_length}: {error}') + + return metadata, ensure_str(body,errors="replace") + + _header = re.compile(r'chunked\s+1.0\s*,\s*(\d+)\s*,\s*(\d+)\s*\n') + + def _records_protocol_v1(self, ifile): + return self._read_csv_records(ifile) + + def _read_csv_records(self, ifile): + reader = csv.reader(ifile, dialect=CsvDialect) + + try: + fieldnames = next(reader) + except StopIteration: + return + + mv_fieldnames = dict((name, name[len('__mv_'):]) for name in fieldnames if name.startswith('__mv_')) + + if len(mv_fieldnames) == 0: + for values in reader: + yield OrderedDict(list(zip(fieldnames, values))) + return + + for values in reader: + record = OrderedDict() + for fieldname, value in zip(fieldnames, values): + if fieldname.startswith('__mv_'): + if len(value) > 0: + record[mv_fieldnames[fieldname]] = self._decode_list(value) + elif fieldname not in record: + record[fieldname] = value + yield record + + def _execute_v2(self, ifile, process): + istream = self._as_binary_stream(ifile) + + while True: + result = self._read_chunk(istream) + + if not result: + return + + metadata, body = result + action = getattr(metadata, 'action', None) + if action != 'execute': + raise RuntimeError(f'Expected execute action, not {action}') + + self._finished = getattr(metadata, 'finished', False) + self._record_writer.is_flushed = False + self._metadata.update(metadata) + self._execute_chunk_v2(process, result) + + self._record_writer.write_chunk(finished=self._finished) + + def _execute_chunk_v2(self, process, chunk): + metadata, body = chunk + + if len(body) <= 0 and not self._allow_empty_input: + raise ValueError( + "No records found to process. Set allow_empty_input=True in dispatch function to move forward " + "with empty records.") + + records = self._read_csv_records(StringIO(body)) + self._record_writer.write_records(process(records)) + + def _report_unexpected_error(self): + + error_type, error, tb = sys.exc_info() + origin = tb + + while origin.tb_next is not None: + origin = origin.tb_next + + filename = origin.tb_frame.f_code.co_filename + lineno = origin.tb_lineno + message = f'{error_type.__name__} at "{filename}", line {str(lineno)} : {error}' + + environment.splunklib_logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) + self.write_error(message) + + # endregion + + # region Types + + class ConfigurationSettings: + """ Represents the configuration settings common to all :class:`SearchCommand` classes. + + """ + + def __init__(self, command): + self.command = command + + def __repr__(self): + """ Converts the value of this instance to its string representation. + + The value of this ConfigurationSettings instance is represented as a string of comma-separated + :code:`(name, value)` pairs. + + :return: String representation of this instance + + """ + definitions = type(self).configuration_setting_definitions + settings = [repr((setting.name, setting.__get__(self), setting.supporting_protocols)) for setting in + definitions] + return '[' + ', '.join(settings) + ']' + + def __str__(self): + """ Converts the value of this instance to its string representation. + + The value of this ConfigurationSettings instance is represented as a string of comma-separated + :code:`name=value` pairs. Items with values of :const:`None` are filtered from the list. + + :return: String representation of this instance + + """ + # text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) + text = ', '.join([f'{name}={json_encode_string(str(value))}' for (name, value) in self.items()]) + return text + + # region Methods + + @classmethod + def fix_up(cls, command_class): + """ Adjusts and checks this class and its search command class. + + Derived classes typically override this method. It is used by the :decorator:`Configuration` decorator to + fix up the :class:`SearchCommand` class it adorns. This method is overridden by :class:`EventingCommand`, + :class:`GeneratingCommand`, :class:`ReportingCommand`, and :class:`StreamingCommand`, the base types for + all other search commands. + + :param command_class: Command class targeted by this class + + """ + return + + # TODO: Stop looking like a dictionary because we don't obey the semantics + # N.B.: Does not use Python 2 dict copy semantics + def iteritems(self): + definitions = type(self).configuration_setting_definitions + version = self.command.protocol_version + return [name_value1 for name_value1 in [(setting.name, setting.__get__(self)) for setting in + [setting for setting in definitions if + setting.is_supported_by_protocol(version)]] if + name_value1[1] is not None] + + # N.B.: Does not use Python 3 dict view semantics + + items = iteritems + + # endregion + + # endregion + + +SearchMetric = namedtuple('SearchMetric', ('elapsed_seconds', 'invocation_count', 'input_count', 'output_count')) + + +def dispatch(command_class, argv=sys.argv, input_file=sys.stdin, output_file=sys.stdout, module_name=None, + allow_empty_input=True): + """ Instantiates and executes a search command class + + This function implements a `conditional script stanza `_ based on the value of + :code:`module_name`:: + + if module_name is None or module_name == '__main__': + # execute command + + Call this function at module scope with :code:`module_name=__name__`, if you would like your module to act as either + a reusable module or a standalone program. Otherwise, if you wish this function to unconditionally instantiate and + execute :code:`command_class`, pass :const:`None` as the value of :code:`module_name`. + + :param command_class: Search command class to instantiate and execute. + :type command_class: type + :param argv: List of arguments to the command. + :type argv: list or tuple + :param input_file: File from which the command will read data. + :type input_file: :code:`file` + :param output_file: File to which the command will write data. + :type output_file: :code:`file` + :param module_name: Name of the module calling :code:`dispatch` or :const:`None`. + :type module_name: :code:`basestring` + :param allow_empty_input: Allow empty input records for the command, if False an Error will be returned if empty chunk body is encountered when read + :type allow_empty_input: bool + :returns: :const:`None` + + **Example** + + .. code-block:: python + :linenos: + + #!/usr/bin/env python + from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators + @Configuration() + class SomeStreamingCommand(StreamingCommand): + ... + def stream(records): + ... + dispatch(SomeStreamingCommand, module_name=__name__) + + Dispatches the :code:`SomeStreamingCommand`, if and only if :code:`__name__` is equal to :code:`'__main__'`. + + **Example** + + .. code-block:: python + :linenos: + + from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators + @Configuration() + class SomeStreamingCommand(StreamingCommand): + ... + def stream(records): + ... + dispatch(SomeStreamingCommand) + + Unconditionally dispatches :code:`SomeStreamingCommand`. + + """ + assert issubclass(command_class, SearchCommand) + + if module_name is None or module_name == '__main__': + command_class().process(argv, input_file, output_file, allow_empty_input) diff --git a/tests/searchcommands/chunked_data_stream.py b/tests/searchcommands/chunked_data_stream.py index fcd0de7b..02d890af 100644 --- a/tests/searchcommands/chunked_data_stream.py +++ b/tests/searchcommands/chunked_data_stream.py @@ -1,100 +1,100 @@ -import collections -import csv -import io -import json - -import splunklib.searchcommands.internals -from splunklib.utils import ensure_binary, ensure_str - - -class Chunk: - def __init__(self, version, meta, data): - self.version = ensure_str(version) - self.meta = json.loads(meta) - dialect = splunklib.searchcommands.internals.CsvDialect - self.data = csv.DictReader(io.StringIO(data.decode("utf-8")), - dialect=dialect) - - -class ChunkedDataStreamIter(collections.abc.Iterator): - def __init__(self, chunk_stream): - self.chunk_stream = chunk_stream - - def __next__(self): - return self.next() - - def next(self): - try: - return self.chunk_stream.read_chunk() - except EOFError: - raise StopIteration - - -class ChunkedDataStream(collections.abc.Iterable): - def __iter__(self): - return ChunkedDataStreamIter(self) - - def __init__(self, stream): - empty = stream.read(0) - assert isinstance(empty, bytes) - self.stream = stream - - def read_chunk(self): - header = self.stream.readline() - - while len(header) > 0 and header.strip() == b'': - header = self.stream.readline() # Skip empty lines - if len(header) == 0: - raise EOFError - - version, meta, data = header.rstrip().split(b',') - metabytes = self.stream.read(int(meta)) - databytes = self.stream.read(int(data)) - return Chunk(version, metabytes, databytes) - - -def build_chunk(keyval, data=None): - metadata = ensure_binary(json.dumps(keyval)) - data_output = _build_data_csv(data) - return b"chunked 1.0,%d,%d\n%s%s" % (len(metadata), len(data_output), metadata, data_output) - - -def build_empty_searchinfo(): - return { - 'earliest_time': 0, - 'latest_time': 0, - 'search': "", - 'dispatch_dir': "", - 'sid': "", - 'args': [], - 'splunk_version': "42.3.4", - } - - -def build_getinfo_chunk(): - return build_chunk({ - 'action': 'getinfo', - 'preview': False, - 'searchinfo': build_empty_searchinfo()}) - - -def build_data_chunk(data, finished=True): - return build_chunk({'action': 'execute', 'finished': finished}, data) - - -def _build_data_csv(data): - if data is None: - return b'' - if isinstance(data, bytes): - return data - csvout = io.StringIO() - - headers = set() - for datum in data: - headers.update(datum.keys()) - writer = csv.DictWriter(csvout, headers, - dialect=splunklib.searchcommands.internals.CsvDialect) - writer.writeheader() - for datum in data: - writer.writerow(datum) - return ensure_binary(csvout.getvalue()) +import collections +import csv +import io +import json + +import splunklib.searchcommands.internals +from splunklib.utils import ensure_binary, ensure_str + + +class Chunk: + def __init__(self, version, meta, data): + self.version = ensure_str(version) + self.meta = json.loads(meta) + dialect = splunklib.searchcommands.internals.CsvDialect + self.data = csv.DictReader(io.StringIO(data.decode("utf-8")), + dialect=dialect) + + +class ChunkedDataStreamIter(collections.abc.Iterator): + def __init__(self, chunk_stream): + self.chunk_stream = chunk_stream + + def __next__(self): + return self.next() + + def next(self): + try: + return self.chunk_stream.read_chunk() + except EOFError: + raise StopIteration + + +class ChunkedDataStream(collections.abc.Iterable): + def __iter__(self): + return ChunkedDataStreamIter(self) + + def __init__(self, stream): + empty = stream.read(0) + assert isinstance(empty, bytes) + self.stream = stream + + def read_chunk(self): + header = self.stream.readline() + + while len(header) > 0 and header.strip() == b'': + header = self.stream.readline() # Skip empty lines + if len(header) == 0: + raise EOFError + + version, meta, data = header.rstrip().split(b',') + metabytes = self.stream.read(int(meta)) + databytes = self.stream.read(int(data)) + return Chunk(version, metabytes, databytes) + + +def build_chunk(keyval, data=None): + metadata = ensure_binary(json.dumps(keyval)) + data_output = _build_data_csv(data) + return b"chunked 1.0,%d,%d\n%s%s" % (len(metadata), len(data_output), metadata, data_output) + + +def build_empty_searchinfo(): + return { + 'earliest_time': 0, + 'latest_time': 0, + 'search': "", + 'dispatch_dir': "", + 'sid': "", + 'args': [], + 'splunk_version': "42.3.4", + } + + +def build_getinfo_chunk(): + return build_chunk({ + 'action': 'getinfo', + 'preview': False, + 'searchinfo': build_empty_searchinfo()}) + + +def build_data_chunk(data, finished=True): + return build_chunk({'action': 'execute', 'finished': finished}, data) + + +def _build_data_csv(data): + if data is None: + return b'' + if isinstance(data, bytes): + return data + csvout = io.StringIO() + + headers = set() + for datum in data: + headers.update(datum.keys()) + writer = csv.DictWriter(csvout, headers, + dialect=splunklib.searchcommands.internals.CsvDialect) + writer.writeheader() + for datum in data: + writer.writerow(datum) + return ensure_binary(csvout.getvalue()) diff --git a/tests/searchcommands/test_internals_v1.py b/tests/searchcommands/test_internals_v1.py old mode 100644 new mode 100755 index 1e3cf25e..6e41844f --- a/tests/searchcommands/test_internals_v1.py +++ b/tests/searchcommands/test_internals_v1.py @@ -1,343 +1,343 @@ -#!/usr/bin/env python -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from contextlib import closing -from unittest import main, TestCase -import os -from io import StringIO, BytesIO -from functools import reduce -import pytest - -from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1 -from splunklib.searchcommands.decorators import Configuration, Option -from splunklib.searchcommands.validators import Boolean - -from splunklib.searchcommands.search_command import SearchCommand - - -@pytest.mark.smoke -class TestInternals(TestCase): - def setUp(self): - TestCase.setUp(self) - - def test_command_line_parser(self): - - @Configuration() - class TestCommandLineParserCommand(SearchCommand): - - required_option = Option(validate=Boolean(), require=True) - unnecessary_option = Option(validate=Boolean(), default=True, require=False) - - class ConfigurationSettings(SearchCommand.ConfigurationSettings): - - @classmethod - def fix_up(cls, command_class): pass - - # Command line without fieldnames - - options = ['required_option=true', 'unnecessary_option=false'] - - command = TestCommandLineParserCommand() - CommandLineParser.parse(command, options) - - for option in command.options.values(): - if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: - self.assertFalse(option.is_set) - continue - self.assertTrue(option.is_set) - - expected = 'testcommandlineparser required_option="t" unnecessary_option="f"' - self.assertEqual(expected, str(command)) - self.assertEqual(command.fieldnames, []) - - # Command line with fieldnames - - fieldnames = ['field_1', 'field_2', 'field_3'] - - command = TestCommandLineParserCommand() - CommandLineParser.parse(command, options + fieldnames) - - for option in command.options.values(): - if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: - self.assertFalse(option.is_set) - continue - self.assertTrue(option.is_set) - - expected = 'testcommandlineparser required_option="t" unnecessary_option="f" field_1 field_2 field_3' - self.assertEqual(expected, str(command)) - self.assertEqual(command.fieldnames, fieldnames) - - # Command line without any unnecessary options - - command = TestCommandLineParserCommand() - CommandLineParser.parse(command, ['required_option=true'] + fieldnames) - - for option in command.options.values(): - if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', - 'show_configuration']: - self.assertFalse(option.is_set) - continue - self.assertTrue(option.is_set) - - expected = 'testcommandlineparser required_option="t" field_1 field_2 field_3' - self.assertEqual(expected, str(command)) - self.assertEqual(command.fieldnames, fieldnames) - - # Command line with missing required options, with or without fieldnames or unnecessary options - - options = ['unnecessary_option=true'] - self.assertRaises(ValueError, CommandLineParser.parse, command, options + fieldnames) - self.assertRaises(ValueError, CommandLineParser.parse, command, options) - self.assertRaises(ValueError, CommandLineParser.parse, command, []) - - # Command line with unrecognized options - - self.assertRaises(ValueError, CommandLineParser.parse, command, - ['unrecognized_option_1=foo', 'unrecognized_option_2=bar']) - - # Command line with a variety of quoted/escaped text options - - @Configuration() - class TestCommandLineParserCommand(SearchCommand): - - text = Option() - - class ConfigurationSettings(SearchCommand.ConfigurationSettings): - - @classmethod - def fix_up(cls, command_class): pass - - strings = [ - r'"foo bar"', - r'"foo/bar"', - r'"foo\\bar"', - r'"""foo bar"""', - r'"\"foo bar\""', - r'Hello\ World!', - r'\"Hello\ World!\"'] - - expected_values = [ - r'foo bar', - r'foo/bar', - r'foo\bar', - r'"foo bar"', - r'"foo bar"', - r'Hello World!', - r'"Hello World!"' - ] - - for string, expected_value in zip(strings, expected_values): - command = TestCommandLineParserCommand() - argv = ['text', '=', string] - CommandLineParser.parse(command, argv) - self.assertEqual(command.text, expected_value) - - for string, expected_value in zip(strings, expected_values): - command = TestCommandLineParserCommand() - argv = [string] - CommandLineParser.parse(command, argv) - self.assertEqual(command.fieldnames[0], expected_value) - - for string, expected_value in zip(strings, expected_values): - command = TestCommandLineParserCommand() - argv = ['text', '=', string] + strings - CommandLineParser.parse(command, argv) - self.assertEqual(command.text, expected_value) - self.assertEqual(command.fieldnames, expected_values) - - strings = [ - 'some\\ string\\', - r'some\ string"', - r'"some string', - r'some"string' - ] - - for string in strings: - command = TestCommandLineParserCommand() - argv = [string] - self.assertRaises(SyntaxError, CommandLineParser.parse, command, argv) - - def test_command_line_parser_unquote(self): - parser = CommandLineParser - - options = [ - r'foo', # unquoted string with no escaped characters - r'fo\o\ b\"a\\r', # unquoted string with some escaped characters - r'"foo"', # quoted string with no special characters - r'"""foobar1"""', # quoted string with quotes escaped like this: "" - r'"\"foobar2\""', # quoted string with quotes escaped like this: \" - r'"foo ""x"" bar"', # quoted string with quotes escaped like this: "" - r'"foo \"x\" bar"', # quoted string with quotes escaped like this: \" - r'"\\foobar"', # quoted string with an escaped backslash - r'"foo \\ bar"', # quoted string with an escaped backslash - r'"foobar\\"', # quoted string with an escaped backslash - r'foo\\\bar', # quoted string with an escaped backslash and an escaped 'b' - r'""', # pair of quotes - r''] # empty string - - expected = [ - r'foo', - r'foo b"a\r', - r'foo', - r'"foobar1"', - r'"foobar2"', - r'foo "x" bar', - r'foo "x" bar', - '\\foobar', - r'foo \ bar', - 'foobar\\', - r'foo\bar', - r'', - r''] - - # Command line with an assortment of string values - - self.assertEqual(expected[-4], parser.unquote(options[-4])) - - for i in range(0, len(options)): - self.assertEqual(expected[i], parser.unquote(options[i])) - - self.assertRaises(SyntaxError, parser.unquote, '"') - self.assertRaises(SyntaxError, parser.unquote, '"foo') - self.assertRaises(SyntaxError, parser.unquote, 'foo"') - self.assertRaises(SyntaxError, parser.unquote, 'foo\\') - - def test_input_header(self): - - # No items - - input_header = InputHeader() - - with closing(StringIO('\r\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 0) - - # One unnamed single-line item (same as no items) - - input_header = InputHeader() - - with closing(StringIO('this%20is%20an%20unnamed%20single-line%20item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 0) - - input_header = InputHeader() - - with closing(StringIO('this%20is%20an%20unnamed\nmulti-\nline%20item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 0) - - # One named single-line item - - input_header = InputHeader() - - with closing(StringIO('Foo:this%20is%20a%20single-line%20item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 1) - self.assertEqual(input_header['Foo'], 'this is a single-line item') - - input_header = InputHeader() - - with closing(StringIO('Bar:this is a\nmulti-\nline item\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 1) - self.assertEqual(input_header['Bar'], 'this is a\nmulti-\nline item') - - # The infoPath item (which is the path to a file that we open for reads) - - input_header = InputHeader() - - with closing(StringIO('infoPath:non-existent.csv\n\n')) as input_file: - input_header.read(input_file) - - self.assertEqual(len(input_header), 1) - self.assertEqual(input_header['infoPath'], 'non-existent.csv') - - # Set of named items - - collection = { - 'word_list': 'hello\nworld\n!', - 'word_1': 'hello', - 'word_2': 'world', - 'word_3': '!', - 'sentence': 'hello world!'} - - input_header = InputHeader() - text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', collection.items(), '') + '\n' - - with closing(StringIO(text)) as input_file: - input_header.read(input_file) - - self.assertDictEqual(input_header, collection) - - # Set of named items with an unnamed item at the beginning (the only place that an unnamed item can appear) - - with closing(StringIO('unnamed item\n' + text)) as input_file: - input_header.read(input_file) - - self.assertDictEqual(input_header, collection) - - # Test iterators, indirectly through items, keys, and values - - self.assertEqual(sorted(input_header.items()), sorted(collection.items())) - self.assertEqual(sorted(input_header.keys()), sorted(collection.keys())) - self.assertEqual(sorted(input_header.values()), sorted(collection.values())) - - def test_messages_header(self): - - @Configuration() - class TestMessagesHeaderCommand(SearchCommand): - class ConfigurationSettings(SearchCommand.ConfigurationSettings): - - @classmethod - def fix_up(cls, command_class): pass - - command = TestMessagesHeaderCommand() - command._protocol_version = 1 - output_buffer = BytesIO() - command._record_writer = RecordWriterV1(output_buffer) - - messages = [ - (command.write_debug, 'debug_message'), - (command.write_error, 'error_message'), - (command.write_fatal, 'fatal_message'), - (command.write_info, 'info_message'), - (command.write_warning, 'warning_message')] - - for write, message in messages: - write(message) - - command.finish() - - expected = ( - 'debug_message=debug_message\r\n' - 'error_message=error_message\r\n' - 'error_message=fatal_message\r\n' - 'info_message=info_message\r\n' - 'warn_message=warning_message\r\n' - '\r\n') - - self.assertEqual(output_buffer.getvalue().decode('utf-8'), expected) - - _package_path = os.path.dirname(__file__) - - -if __name__ == "__main__": - main() +#!/usr/bin/env python +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from contextlib import closing +from unittest import main, TestCase +import os +from io import StringIO, BytesIO +from functools import reduce +import pytest + +from splunklib.searchcommands.internals import CommandLineParser, InputHeader, RecordWriterV1 +from splunklib.searchcommands.decorators import Configuration, Option +from splunklib.searchcommands.validators import Boolean + +from splunklib.searchcommands.search_command import SearchCommand + + +@pytest.mark.smoke +class TestInternals(TestCase): + def setUp(self): + TestCase.setUp(self) + + def test_command_line_parser(self): + + @Configuration() + class TestCommandLineParserCommand(SearchCommand): + + required_option = Option(validate=Boolean(), require=True) + unnecessary_option = Option(validate=Boolean(), default=True, require=False) + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + + @classmethod + def fix_up(cls, command_class): pass + + # Command line without fieldnames + + options = ['required_option=true', 'unnecessary_option=false'] + + command = TestCommandLineParserCommand() + CommandLineParser.parse(command, options) + + for option in command.options.values(): + if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: + self.assertFalse(option.is_set) + continue + self.assertTrue(option.is_set) + + expected = 'testcommandlineparser required_option="t" unnecessary_option="f"' + self.assertEqual(expected, str(command)) + self.assertEqual(command.fieldnames, []) + + # Command line with fieldnames + + fieldnames = ['field_1', 'field_2', 'field_3'] + + command = TestCommandLineParserCommand() + CommandLineParser.parse(command, options + fieldnames) + + for option in command.options.values(): + if option.name in ['logging_configuration', 'logging_level', 'record', 'show_configuration']: + self.assertFalse(option.is_set) + continue + self.assertTrue(option.is_set) + + expected = 'testcommandlineparser required_option="t" unnecessary_option="f" field_1 field_2 field_3' + self.assertEqual(expected, str(command)) + self.assertEqual(command.fieldnames, fieldnames) + + # Command line without any unnecessary options + + command = TestCommandLineParserCommand() + CommandLineParser.parse(command, ['required_option=true'] + fieldnames) + + for option in command.options.values(): + if option.name in ['unnecessary_option', 'logging_configuration', 'logging_level', 'record', + 'show_configuration']: + self.assertFalse(option.is_set) + continue + self.assertTrue(option.is_set) + + expected = 'testcommandlineparser required_option="t" field_1 field_2 field_3' + self.assertEqual(expected, str(command)) + self.assertEqual(command.fieldnames, fieldnames) + + # Command line with missing required options, with or without fieldnames or unnecessary options + + options = ['unnecessary_option=true'] + self.assertRaises(ValueError, CommandLineParser.parse, command, options + fieldnames) + self.assertRaises(ValueError, CommandLineParser.parse, command, options) + self.assertRaises(ValueError, CommandLineParser.parse, command, []) + + # Command line with unrecognized options + + self.assertRaises(ValueError, CommandLineParser.parse, command, + ['unrecognized_option_1=foo', 'unrecognized_option_2=bar']) + + # Command line with a variety of quoted/escaped text options + + @Configuration() + class TestCommandLineParserCommand(SearchCommand): + + text = Option() + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + + @classmethod + def fix_up(cls, command_class): pass + + strings = [ + r'"foo bar"', + r'"foo/bar"', + r'"foo\\bar"', + r'"""foo bar"""', + r'"\"foo bar\""', + r'Hello\ World!', + r'\"Hello\ World!\"'] + + expected_values = [ + r'foo bar', + r'foo/bar', + r'foo\bar', + r'"foo bar"', + r'"foo bar"', + r'Hello World!', + r'"Hello World!"' + ] + + for string, expected_value in zip(strings, expected_values): + command = TestCommandLineParserCommand() + argv = ['text', '=', string] + CommandLineParser.parse(command, argv) + self.assertEqual(command.text, expected_value) + + for string, expected_value in zip(strings, expected_values): + command = TestCommandLineParserCommand() + argv = [string] + CommandLineParser.parse(command, argv) + self.assertEqual(command.fieldnames[0], expected_value) + + for string, expected_value in zip(strings, expected_values): + command = TestCommandLineParserCommand() + argv = ['text', '=', string] + strings + CommandLineParser.parse(command, argv) + self.assertEqual(command.text, expected_value) + self.assertEqual(command.fieldnames, expected_values) + + strings = [ + 'some\\ string\\', + r'some\ string"', + r'"some string', + r'some"string' + ] + + for string in strings: + command = TestCommandLineParserCommand() + argv = [string] + self.assertRaises(SyntaxError, CommandLineParser.parse, command, argv) + + def test_command_line_parser_unquote(self): + parser = CommandLineParser + + options = [ + r'foo', # unquoted string with no escaped characters + r'fo\o\ b\"a\\r', # unquoted string with some escaped characters + r'"foo"', # quoted string with no special characters + r'"""foobar1"""', # quoted string with quotes escaped like this: "" + r'"\"foobar2\""', # quoted string with quotes escaped like this: \" + r'"foo ""x"" bar"', # quoted string with quotes escaped like this: "" + r'"foo \"x\" bar"', # quoted string with quotes escaped like this: \" + r'"\\foobar"', # quoted string with an escaped backslash + r'"foo \\ bar"', # quoted string with an escaped backslash + r'"foobar\\"', # quoted string with an escaped backslash + r'foo\\\bar', # quoted string with an escaped backslash and an escaped 'b' + r'""', # pair of quotes + r''] # empty string + + expected = [ + r'foo', + r'foo b"a\r', + r'foo', + r'"foobar1"', + r'"foobar2"', + r'foo "x" bar', + r'foo "x" bar', + '\\foobar', + r'foo \ bar', + 'foobar\\', + r'foo\bar', + r'', + r''] + + # Command line with an assortment of string values + + self.assertEqual(expected[-4], parser.unquote(options[-4])) + + for i in range(0, len(options)): + self.assertEqual(expected[i], parser.unquote(options[i])) + + self.assertRaises(SyntaxError, parser.unquote, '"') + self.assertRaises(SyntaxError, parser.unquote, '"foo') + self.assertRaises(SyntaxError, parser.unquote, 'foo"') + self.assertRaises(SyntaxError, parser.unquote, 'foo\\') + + def test_input_header(self): + + # No items + + input_header = InputHeader() + + with closing(StringIO('\r\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 0) + + # One unnamed single-line item (same as no items) + + input_header = InputHeader() + + with closing(StringIO('this%20is%20an%20unnamed%20single-line%20item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 0) + + input_header = InputHeader() + + with closing(StringIO('this%20is%20an%20unnamed\nmulti-\nline%20item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 0) + + # One named single-line item + + input_header = InputHeader() + + with closing(StringIO('Foo:this%20is%20a%20single-line%20item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 1) + self.assertEqual(input_header['Foo'], 'this is a single-line item') + + input_header = InputHeader() + + with closing(StringIO('Bar:this is a\nmulti-\nline item\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 1) + self.assertEqual(input_header['Bar'], 'this is a\nmulti-\nline item') + + # The infoPath item (which is the path to a file that we open for reads) + + input_header = InputHeader() + + with closing(StringIO('infoPath:non-existent.csv\n\n')) as input_file: + input_header.read(input_file) + + self.assertEqual(len(input_header), 1) + self.assertEqual(input_header['infoPath'], 'non-existent.csv') + + # Set of named items + + collection = { + 'word_list': 'hello\nworld\n!', + 'word_1': 'hello', + 'word_2': 'world', + 'word_3': '!', + 'sentence': 'hello world!'} + + input_header = InputHeader() + text = reduce(lambda value, item: value + f'{item[0]}:{item[1]}\n', collection.items(), '') + '\n' + + with closing(StringIO(text)) as input_file: + input_header.read(input_file) + + self.assertDictEqual(input_header, collection) + + # Set of named items with an unnamed item at the beginning (the only place that an unnamed item can appear) + + with closing(StringIO('unnamed item\n' + text)) as input_file: + input_header.read(input_file) + + self.assertDictEqual(input_header, collection) + + # Test iterators, indirectly through items, keys, and values + + self.assertEqual(sorted(input_header.items()), sorted(collection.items())) + self.assertEqual(sorted(input_header.keys()), sorted(collection.keys())) + self.assertEqual(sorted(input_header.values()), sorted(collection.values())) + + def test_messages_header(self): + + @Configuration() + class TestMessagesHeaderCommand(SearchCommand): + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + + @classmethod + def fix_up(cls, command_class): pass + + command = TestMessagesHeaderCommand() + command._protocol_version = 1 + output_buffer = BytesIO() + command._record_writer = RecordWriterV1(output_buffer) + + messages = [ + (command.write_debug, 'debug_message'), + (command.write_error, 'error_message'), + (command.write_fatal, 'fatal_message'), + (command.write_info, 'info_message'), + (command.write_warning, 'warning_message')] + + for write, message in messages: + write(message) + + command.finish() + + expected = ( + 'debug_message=debug_message\r\n' + 'error_message=error_message\r\n' + 'error_message=fatal_message\r\n' + 'info_message=info_message\r\n' + 'warn_message=warning_message\r\n' + '\r\n') + + self.assertEqual(output_buffer.getvalue().decode('utf-8'), expected) + + _package_path = os.path.dirname(__file__) + + +if __name__ == "__main__": + main() diff --git a/tests/test_binding.py b/tests/test_binding.py old mode 100644 new mode 100755 index 9d4dd4b8..fe14c259 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -1,975 +1,975 @@ -#!/usr/bin/env python -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from http import server as BaseHTTPServer -from io import BytesIO, StringIO -from threading import Thread -from urllib.request import Request, urlopen - -from xml.etree.ElementTree import XML - -import json -import logging -from tests import testlib -import unittest -import socket -import ssl - -import splunklib -from splunklib import binding -from splunklib.binding import HTTPError, AuthenticationError, UrlEncoded -from splunklib import data -from splunklib.utils import ensure_str - -import pytest - -# splunkd endpoint paths -PATH_USERS = "authentication/users/" - -# XML Namespaces -NAMESPACE_ATOM = "http://www.w3.org/2005/Atom" -NAMESPACE_REST = "http://dev.splunk.com/ns/rest" -NAMESPACE_OPENSEARCH = "http://a9.com/-/spec/opensearch/1.1" - -# XML Extended Name Fragments -XNAMEF_ATOM = "{%s}%%s" % NAMESPACE_ATOM -XNAMEF_REST = "{%s}%%s" % NAMESPACE_REST -XNAMEF_OPENSEARCH = "{%s}%%s" % NAMESPACE_OPENSEARCH - -# XML Extended Names -XNAME_AUTHOR = XNAMEF_ATOM % "author" -XNAME_ENTRY = XNAMEF_ATOM % "entry" -XNAME_FEED = XNAMEF_ATOM % "feed" -XNAME_ID = XNAMEF_ATOM % "id" -XNAME_TITLE = XNAMEF_ATOM % "title" - - -def load(response): - return data.load(response.body.read()) - - -class BindingTestCase(unittest.TestCase): - context = None - - def setUp(self): - logging.info("%s", self.__class__.__name__) - self.opts = testlib.parse([], {}, ".env") - self.context = binding.connect(**self.opts.kwargs) - logging.debug("Connected to splunkd.") - - -class TestResponseReader(BindingTestCase): - def test_empty(self): - response = binding.ResponseReader(BytesIO(b"")) - self.assertTrue(response.empty) - self.assertEqual(response.peek(10), b"") - self.assertEqual(response.read(10), b"") - - arr = bytearray(10) - self.assertEqual(response.readinto(arr), 0) - self.assertEqual(arr, bytearray(10)) - self.assertTrue(response.empty) - - def test_read_past_end(self): - txt = b"abcd" - response = binding.ResponseReader(BytesIO(txt)) - self.assertFalse(response.empty) - self.assertEqual(response.peek(10), txt) - self.assertEqual(response.read(10), txt) - self.assertTrue(response.empty) - self.assertEqual(response.peek(10), b"") - self.assertEqual(response.read(10), b"") - - def test_read_partial(self): - txt = b"This is a test of the emergency broadcasting system." - response = binding.ResponseReader(BytesIO(txt)) - self.assertEqual(response.peek(5), txt[:5]) - self.assertFalse(response.empty) - self.assertEqual(response.read(), txt) - self.assertTrue(response.empty) - self.assertEqual(response.read(), b'') - - def test_readable(self): - txt = "abcd" - response = binding.ResponseReader(StringIO(txt)) - self.assertTrue(response.readable()) - - def test_readinto_bytearray(self): - txt = b"Checking readinto works as expected" - response = binding.ResponseReader(BytesIO(txt)) - arr = bytearray(10) - self.assertEqual(response.readinto(arr), 10) - self.assertEqual(arr[:10], b"Checking r") - self.assertEqual(response.readinto(arr), 10) - self.assertEqual(arr[:10], b"eadinto wo") - self.assertEqual(response.readinto(arr), 10) - self.assertEqual(arr[:10], b"rks as exp") - self.assertEqual(response.readinto(arr), 5) - self.assertEqual(arr[:5], b"ected") - self.assertTrue(response.empty) - - def test_readinto_memoryview(self): - txt = b"Checking readinto works as expected" - response = binding.ResponseReader(BytesIO(txt)) - arr = bytearray(10) - mv = memoryview(arr) - self.assertEqual(response.readinto(mv), 10) - self.assertEqual(arr[:10], b"Checking r") - self.assertEqual(response.readinto(mv), 10) - self.assertEqual(arr[:10], b"eadinto wo") - self.assertEqual(response.readinto(mv), 10) - self.assertEqual(arr[:10], b"rks as exp") - self.assertEqual(response.readinto(mv), 5) - self.assertEqual(arr[:5], b"ected") - self.assertTrue(response.empty) - - -class TestUrlEncoded(BindingTestCase): - def test_idempotent(self): - a = UrlEncoded('abc') - self.assertEqual(a, UrlEncoded(a)) - - def test_append(self): - self.assertEqual(UrlEncoded('a') + UrlEncoded('b'), - UrlEncoded('ab')) - - def test_append_string(self): - self.assertEqual(UrlEncoded('a') + '%', - UrlEncoded('a%')) - - def test_append_to_string(self): - self.assertEqual('%' + UrlEncoded('a'), - UrlEncoded('%a')) - - def test_interpolation_fails(self): - self.assertRaises(TypeError, lambda: UrlEncoded('%s') % 'boris') - - def test_chars(self): - for char, code in [(' ', '%20'), - ('"', '%22'), - ('%', '%25')]: - self.assertEqual(UrlEncoded(char), - UrlEncoded(code, skip_encode=True)) - - def test_repr(self): - self.assertEqual(repr(UrlEncoded('% %')), "UrlEncoded('% %')") - - -class TestAuthority(unittest.TestCase): - def test_authority_default(self): - self.assertEqual(binding._authority(), - "https://localhost:8089") - - def test_ipv4_host(self): - self.assertEqual( - binding._authority( - host="splunk.utopia.net"), - "https://splunk.utopia.net:8089") - - def test_ipv6_host(self): - self.assertEqual( - binding._authority( - host="2001:0db8:85a3:0000:0000:8a2e:0370:7334"), - "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") - - def test_ipv6_host_enclosed(self): - self.assertEqual( - binding._authority( - host="[2001:0db8:85a3:0000:0000:8a2e:0370:7334]"), - "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") - - def test_all_fields(self): - self.assertEqual( - binding._authority( - scheme="http", - host="splunk.utopia.net", - port="471"), - "http://splunk.utopia.net:471") - - -class TestUserManipulation(BindingTestCase): - def setUp(self): - BindingTestCase.setUp(self) - self.username = testlib.tmpname() - self.password = "changeme!" - self.roles = "power" - - # Delete user if it exists already - try: - response = self.context.delete(PATH_USERS + self.username) - self.assertEqual(response.status, 200) - except HTTPError as e: - self.assertTrue(e.status in [400, 500]) - - def tearDown(self): - BindingTestCase.tearDown(self) - try: - self.context.delete(PATH_USERS + self.username) - except HTTPError as e: - if e.status not in [400, 500]: - raise - - def test_user_without_role_fails(self): - self.assertRaises(binding.HTTPError, - self.context.post, - PATH_USERS, name=self.username, - password=self.password) - - def test_create_user(self): - response = self.context.post( - PATH_USERS, name=self.username, - password=self.password, roles=self.roles) - self.assertEqual(response.status, 201) - - response = self.context.get(PATH_USERS + self.username) - entry = load(response).feed.entry - self.assertEqual(entry.title, self.username) - - def test_update_user(self): - self.test_create_user() - response = self.context.post( - PATH_USERS + self.username, - password=self.password, - roles=self.roles, - defaultApp="search", - realname="Renzo", - email="email.me@now.com") - self.assertEqual(response.status, 200) - - response = self.context.get(PATH_USERS + self.username) - self.assertEqual(response.status, 200) - entry = load(response).feed.entry - self.assertEqual(entry.title, self.username) - self.assertEqual(entry.content.defaultApp, "search") - self.assertEqual(entry.content.realname, "Renzo") - self.assertEqual(entry.content.email, "email.me@now.com") - - def test_post_with_body_behaves(self): - self.test_create_user() - response = self.context.post( - PATH_USERS + self.username, - body="defaultApp=search", - ) - self.assertEqual(response.status, 200) - - def test_post_with_get_arguments_to_receivers_stream(self): - text = 'Hello, world!' - response = self.context.post( - '/services/receivers/simple', - headers=[('x-splunk-input-mode', 'streaming')], - source='sdk', sourcetype='sdk_test', - body=text - ) - self.assertEqual(response.status, 200) - - -class TestSocket(BindingTestCase): - def test_socket(self): - socket = self.context.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - # Sockets take bytes not strings - # - # def test_unicode_socket(self): - # socket = self.context.connect() - # socket.write(u"POST %s HTTP/1.1\r\n" %\ - # self.context._abspath("some/path/to/post/to")) - # socket.write(u"Host: %s:%s\r\n" %\ - # (self.context.host, self.context.port)) - # socket.write(u"Accept-Encoding: identity\r\n") - # socket.write((u"Authorization: %s\r\n" %\ - # self.context.token).encode('utf-8')) - # socket.write(u"X-Splunk-Input-Mode: Streaming\r\n") - # socket.write("\r\n") - # socket.close() - - def test_socket_gethostbyname(self): - self.assertTrue(self.context.connect()) - self.context.host = socket.gethostbyname(self.context.host) - self.assertTrue(self.context.connect()) - - -class TestUnicodeConnect(BindingTestCase): - def test_unicode_connect(self): - opts = self.opts.kwargs.copy() - opts['host'] = str(opts['host']) - context = binding.connect(**opts) - # Just check to make sure the service is alive - response = context.get("/services") - self.assertEqual(response.status, 200) - - -@pytest.mark.smoke -class TestAutologin(BindingTestCase): - def test_with_autologin(self): - self.context.autologin = True - self.assertEqual(self.context.get("/services").status, 200) - self.context.logout() - self.assertEqual(self.context.get("/services").status, 200) - - def test_without_autologin(self): - self.context.autologin = False - self.assertEqual(self.context.get("/services").status, 200) - self.context.logout() - self.assertRaises(AuthenticationError, - self.context.get, "/services") - - -class TestAbspath(BindingTestCase): - def setUp(self): - BindingTestCase.setUp(self) - self.kwargs = self.opts.kwargs.copy() - if 'app' in self.kwargs: del self.kwargs['app'] - if 'owner' in self.kwargs: del self.kwargs['owner'] - - def test_default(self): - path = self.context._abspath("foo", owner=None, app=None) - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/services/foo") - - def test_with_owner(self): - path = self.context._abspath("foo", owner="me", app=None) - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/system/foo") - - def test_with_app(self): - path = self.context._abspath("foo", owner=None, app="MyApp") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_with_both(self): - path = self.context._abspath("foo", owner="me", app="MyApp") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_user_sharing(self): - path = self.context._abspath("foo", owner="me", app="MyApp", sharing="user") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_sharing_app(self): - path = self.context._abspath("foo", owner="me", app="MyApp", sharing="app") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_sharing_global(self): - path = self.context._abspath("foo", owner="me", app="MyApp", sharing="global") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_sharing_system(self): - path = self.context._abspath("foo bar", owner="me", app="MyApp", sharing="system") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/system/foo%20bar") - - def test_url_forbidden_characters(self): - path = self.context._abspath('/a/b c/d') - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, '/a/b%20c/d') - - def test_context_defaults(self): - context = binding.connect(**self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/services/foo") - - def test_context_with_owner(self): - context = binding.connect(owner="me", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/system/foo") - - def test_context_with_app(self): - context = binding.connect(app="MyApp", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_context_with_both(self): - context = binding.connect(owner="me", app="MyApp", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_context_with_user_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="user", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me/MyApp/foo") - - def test_context_with_app_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="app", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_context_with_global_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="global", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") - - def test_context_with_system_sharing(self): - context = binding.connect( - owner="me", app="MyApp", sharing="system", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/nobody/system/foo") - - def test_context_with_owner_as_email(self): - context = binding.connect(owner="me@me.com", **self.kwargs) - path = context._abspath("foo") - self.assertTrue(isinstance(path, UrlEncoded)) - self.assertEqual(path, "/servicesNS/me%40me.com/system/foo") - self.assertEqual(path, UrlEncoded("/servicesNS/me@me.com/system/foo")) - - -# An urllib2 based HTTP request handler, used to test the binding layers -# support for pluggable request handlers. -def urllib2_handler(url, message, **kwargs): - method = message['method'].lower() - data = message.get('body', b"") if method == 'post' else None - headers = dict(message.get('headers', [])) - req = Request(url, data, headers) - try: - response = urlopen(req, context=ssl._create_unverified_context()) - except HTTPError as response: - pass # Propagate HTTP errors via the returned response message - return { - 'status': response.code, - 'reason': response.msg, - 'headers': dict(response.info()), - 'body': BytesIO(response.read()) - } - - -def isatom(body): - """Answers if the given response body looks like ATOM.""" - root = XML(body) - return \ - root.tag == XNAME_FEED and \ - root.find(XNAME_AUTHOR) is not None and \ - root.find(XNAME_ID) is not None and \ - root.find(XNAME_TITLE) is not None - - -class TestPluggableHTTP(testlib.SDKTestCase): - # Verify pluggable HTTP reqeust handlers. - def test_handlers(self): - paths = ["/services", "authentication/users", - "search/jobs"] - handlers = [binding.handler(), # default handler - urllib2_handler] - for handler in handlers: - logging.debug("Connecting with handler %s", handler) - context = binding.connect( - handler=handler, - **self.opts.kwargs) - for path in paths: - body = context.get(path).body.read() - self.assertTrue(isatom(body)) - - -def urllib2_insert_cookie_handler(url, message, **kwargs): - method = message['method'].lower() - data = message.get('body', b"") if method == 'post' else None - headers = dict(message.get('headers', [])) - req = Request(url, data, headers) - try: - response = urlopen(req, context=ssl._create_unverified_context()) - except HTTPError as response: - pass # Propagate HTTP errors via the returned response message - - # Mimic the insertion of 3rd party cookies into the response. - # An example is "sticky session"/"insert cookie" persistence - # of a load balancer for a SHC. - header_list = list(response.info().items()) - header_list.append(("Set-Cookie", "BIGipServer_splunk-shc-8089=1234567890.12345.0000; path=/; Httponly; Secure")) - header_list.append(("Set-Cookie", "home_made=yummy")) - - return { - 'status': response.code, - 'reason': response.msg, - 'headers': header_list, - 'body': BytesIO(response.read()) - } - - -class TestCookiePersistence(testlib.SDKTestCase): - # Verify persistence of 3rd party inserted cookies. - def test_3rdPartyInsertedCookiePersistence(self): - paths = ["/services", "authentication/users", - "search/jobs"] - logging.debug("Connecting with urllib2_insert_cookie_handler %s", urllib2_insert_cookie_handler) - context = binding.connect( - handler=urllib2_insert_cookie_handler, - **self.opts.kwargs) - - persisted_cookies = context.get_cookies() - - splunk_token_found = False - for k, v in persisted_cookies.items(): - if k[:8] == "splunkd_": - splunk_token_found = True - break - - self.assertEqual(splunk_token_found, True) - self.assertEqual(persisted_cookies['BIGipServer_splunk-shc-8089'], "1234567890.12345.0000") - self.assertEqual(persisted_cookies['home_made'], "yummy") - - -@pytest.mark.smoke -class TestLogout(BindingTestCase): - def test_logout(self): - response = self.context.get("/services") - self.assertEqual(response.status, 200) - self.context.logout() - self.assertEqual(self.context.token, binding._NoAuthenticationToken) - self.assertEqual(self.context.get_cookies(), {}) - self.assertRaises(AuthenticationError, - self.context.get, "/services") - self.assertRaises(AuthenticationError, - self.context.post, "/services") - self.assertRaises(AuthenticationError, - self.context.delete, "/services") - self.context.login() - response = self.context.get("/services") - self.assertEqual(response.status, 200) - - -class TestCookieAuthentication(unittest.TestCase): - def setUp(self): - self.opts = testlib.parse([], {}, ".env") - self.context = binding.connect(**self.opts.kwargs) - - # Skip these tests if running below Splunk 6.2, cookie-auth didn't exist before - from splunklib import client - service = client.Service(**self.opts.kwargs) - # TODO: Workaround the fact that skipTest is not defined by unittest2.TestCase - service.login() - splver = service.splunk_version - if splver[:2] < (6, 2): - self.skipTest("Skipping cookie-auth tests, running in %d.%d.%d, this feature was added in 6.2+" % splver) - - if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: - - def assertIsNotNone(self, obj, msg=None): - if obj is None: - raise self.failureException(msg or '%r is not None' % obj) - - @pytest.mark.smoke - def test_cookie_in_auth_headers(self): - self.assertIsNotNone(self.context._auth_headers) - self.assertNotEqual(self.context._auth_headers, []) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(self.context._auth_headers[0][0], "Cookie") - self.assertEqual(self.context._auth_headers[0][1][:8], "splunkd_") - - @pytest.mark.smoke - def test_got_cookie_on_connect(self): - self.assertIsNotNone(self.context.get_cookies()) - self.assertNotEqual(self.context.get_cookies(), {}) - self.assertEqual(len(self.context.get_cookies()), 1) - self.assertEqual(list(self.context.get_cookies().keys())[0][:8], "splunkd_") - - @pytest.mark.smoke - def test_cookie_with_autologin(self): - self.context.autologin = True - self.assertEqual(self.context.get("/services").status, 200) - self.assertTrue(self.context.has_cookies()) - self.context.logout() - self.assertFalse(self.context.has_cookies()) - self.assertEqual(self.context.get("/services").status, 200) - self.assertTrue(self.context.has_cookies()) - - @pytest.mark.smoke - def test_cookie_without_autologin(self): - self.context.autologin = False - self.assertEqual(self.context.get("/services").status, 200) - self.assertTrue(self.context.has_cookies()) - self.context.logout() - self.assertFalse(self.context.has_cookies()) - self.assertRaises(AuthenticationError, - self.context.get, "/services") - - @pytest.mark.smoke - def test_got_updated_cookie_with_get(self): - old_cookies = self.context.get_cookies() - resp = self.context.get("apps/local") - found = False - for key, value in resp.headers: - if key.lower() == "set-cookie": - found = True - self.assertEqual(value[:8], "splunkd_") - - new_cookies = {} - binding._parse_cookies(value, new_cookies) - # We're only expecting 1 in this scenario - self.assertEqual(len(old_cookies), 1) - self.assertTrue(len(list(new_cookies.values())), 1) - self.assertEqual(old_cookies, new_cookies) - self.assertEqual(list(new_cookies.values())[0], list(old_cookies.values())[0]) - self.assertTrue(found) - - @pytest.mark.smoke - def test_login_fails_with_bad_cookie(self): - # We should get an error if using a bad cookie - try: - binding.connect(**{"cookie": "bad=cookie"}) - self.fail() - except AuthenticationError as ae: - self.assertEqual(str(ae), "Login failed.") - - @pytest.mark.smoke - def test_login_with_multiple_cookies(self): - # We should get an error if using a bad cookie - new_context = binding.Context() - new_context.get_cookies().update({"bad": "cookie"}) - try: - new_context = new_context.login() - self.fail() - except AuthenticationError as ae: - self.assertEqual(str(ae), "Login failed.") - # Bring in a valid cookie now - for key, value in self.context.get_cookies().items(): - new_context.get_cookies()[key] = value - - self.assertEqual(len(new_context.get_cookies()), 2) - self.assertTrue('bad' in list(new_context.get_cookies().keys())) - self.assertTrue('cookie' in list(new_context.get_cookies().values())) - - for k, v in self.context.get_cookies().items(): - self.assertEqual(new_context.get_cookies()[k], v) - - self.assertEqual(new_context.get("apps/local").status, 200) - - @pytest.mark.smoke - def test_login_fails_without_cookie_or_token(self): - opts = { - 'host': self.opts.kwargs['host'], - 'port': self.opts.kwargs['port'] - } - try: - binding.connect(**opts) - self.fail() - except AuthenticationError as ae: - self.assertEqual(str(ae), "Login failed.") - - -class TestNamespace(unittest.TestCase): - def test_namespace(self): - tests = [ - ({}, - {'sharing': None, 'owner': None, 'app': None}), - - ({'owner': "Bob"}, - {'sharing': None, 'owner': "Bob", 'app': None}), - - ({'app': "search"}, - {'sharing': None, 'owner': None, 'app': "search"}), - - ({'owner': "Bob", 'app': "search"}, - {'sharing': None, 'owner': "Bob", 'app': "search"}), - - ({'sharing': "user", 'owner': "Bob@bob.com"}, - {'sharing': "user", 'owner': "Bob@bob.com", 'app': None}), - - ({'sharing': "user"}, - {'sharing': "user", 'owner': None, 'app': None}), - - ({'sharing': "user", 'owner': "Bob"}, - {'sharing': "user", 'owner': "Bob", 'app': None}), - - ({'sharing': "user", 'app': "search"}, - {'sharing': "user", 'owner': None, 'app': "search"}), - - ({'sharing': "user", 'owner': "Bob", 'app': "search"}, - {'sharing': "user", 'owner': "Bob", 'app': "search"}), - - ({'sharing': "app"}, - {'sharing': "app", 'owner': "nobody", 'app': None}), - - ({'sharing': "app", 'owner': "Bob"}, - {'sharing': "app", 'owner': "nobody", 'app': None}), - - ({'sharing': "app", 'app': "search"}, - {'sharing': "app", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "app", 'owner': "Bob", 'app': "search"}, - {'sharing': "app", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "global"}, - {'sharing': "global", 'owner': "nobody", 'app': None}), - - ({'sharing': "global", 'owner': "Bob"}, - {'sharing': "global", 'owner': "nobody", 'app': None}), - - ({'sharing': "global", 'app': "search"}, - {'sharing': "global", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "global", 'owner': "Bob", 'app': "search"}, - {'sharing': "global", 'owner': "nobody", 'app': "search"}), - - ({'sharing': "system"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': "system", 'owner': "Bob"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': "system", 'app': "search"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': "system", 'owner': "Bob", 'app': "search"}, - {'sharing': "system", 'owner': "nobody", 'app': "system"}), - - ({'sharing': 'user', 'owner': '-', 'app': '-'}, - {'sharing': 'user', 'owner': '-', 'app': '-'})] - - for kwargs, expected in tests: - namespace = binding.namespace(**kwargs) - for k, v in expected.items(): - self.assertEqual(namespace[k], v) - - def test_namespace_fails(self): - self.assertRaises(ValueError, binding.namespace, sharing="gobble") - - -@pytest.mark.smoke -class TestBasicAuthentication(unittest.TestCase): - def setUp(self): - self.opts = testlib.parse([], {}, ".env") - opts = self.opts.kwargs.copy() - opts["basic"] = True - opts["username"] = self.opts.kwargs["username"] - opts["password"] = self.opts.kwargs["password"] - - self.context = binding.connect(**opts) - from splunklib import client - service = client.Service(**opts) - - if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: - def assertIsNotNone(self, obj, msg=None): - if obj is None: - raise self.failureException(msg or '%r is not None' % obj) - - def test_basic_in_auth_headers(self): - self.assertIsNotNone(self.context._auth_headers) - self.assertNotEqual(self.context._auth_headers, []) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(len(self.context._auth_headers), 1) - self.assertEqual(self.context._auth_headers[0][0], "Authorization") - self.assertEqual(self.context._auth_headers[0][1][:6], "Basic ") - self.assertEqual(self.context.get("/services").status, 200) - - -@pytest.mark.smoke -class TestTokenAuthentication(BindingTestCase): - def test_preexisting_token(self): - token = self.context.token - opts = self.opts.kwargs.copy() - opts["token"] = token - opts["username"] = "boris the mad baboon" - opts["password"] = "nothing real" - - newContext = binding.Context(**opts) - response = newContext.get("/services") - self.assertEqual(response.status, 200) - - socket = newContext.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - def test_preexisting_token_sans_splunk(self): - token = self.context.token - if token.startswith('Splunk '): - token = token.split(' ', 1)[1] - self.assertFalse(token.startswith('Splunk ')) - else: - self.fail('Token did not start with "Splunk ".') - opts = self.opts.kwargs.copy() - opts["token"] = token - opts["username"] = "boris the mad baboon" - opts["password"] = "nothing real" - - newContext = binding.Context(**opts) - response = newContext.get("/services") - self.assertEqual(response.status, 200) - - socket = newContext.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - def test_connect_with_preexisting_token_sans_user_and_pass(self): - token = self.context.token - opts = self.opts.kwargs.copy() - del opts['username'] - del opts['password'] - opts["token"] = token - - newContext = binding.connect(**opts) - response = newContext.get('/services') - self.assertEqual(response.status, 200) - - socket = newContext.connect() - socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) - socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) - socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) - socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) - socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) - socket.write("\r\n".encode('utf-8')) - socket.close() - - -class TestPostWithBodyParam(unittest.TestCase): - - def test_post(self): - def handler(url, message, **kwargs): - assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" - assert message["body"] == b"testkey=testvalue" - return splunklib.data.Record({ - "status": 200, - "headers": [], - }) - - ctx = binding.Context(handler=handler) - ctx.post("foo/bar", owner="testowner", app="testapp", body={"testkey": "testvalue"}) - - def test_post_with_params_and_body(self): - def handler(url, message, **kwargs): - assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar?extrakey=extraval" - assert message["body"] == b"testkey=testvalue" - return splunklib.data.Record({ - "status": 200, - "headers": [], - }) - - ctx = binding.Context(handler=handler) - ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp", body={"testkey": "testvalue"}) - - def test_post_with_params_and_no_body(self): - def handler(url, message, **kwargs): - assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" - assert message["body"] == b"extrakey=extraval" - return splunklib.data.Record({ - "status": 200, - "headers": [], - }) - - ctx = binding.Context(handler=handler) - ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp") - - -def _wrap_handler(func, response_code=200, body=""): - def wrapped(handler_self): - result = func(handler_self) - if result is None: - handler_self.send_response(response_code) - handler_self.end_headers() - handler_self.wfile.write(body) - - return wrapped - - -class MockServer: - def __init__(self, port=9093, **handlers): - methods = {"do_" + k: _wrap_handler(v) for (k, v) in handlers.items()} - - def init(handler_self, socket, address, server): - BaseHTTPServer.BaseHTTPRequestHandler.__init__(handler_self, socket, address, server) - - def log(*args): # To silence server access logs - pass - - methods["__init__"] = init - methods["log_message"] = log - Handler = type("Handler", - (BaseHTTPServer.BaseHTTPRequestHandler, object), - methods) - self._svr = BaseHTTPServer.HTTPServer(("localhost", port), Handler) - - def run(): - self._svr.handle_request() - - self._thread = Thread(target=run) - self._thread.daemon = True - - def __enter__(self): - self._thread.start() - return self._svr - - def __exit__(self, typ, value, traceback): - self._thread.join(10) - self._svr.server_close() - - -class TestFullPost(unittest.TestCase): - - def test_post_with_body_urlencoded(self): - def check_response(handler): - length = int(handler.headers.get('content-length', 0)) - body = handler.rfile.read(length) - assert body.decode('utf-8') == "foo=bar" - - with MockServer(POST=check_response): - ctx = binding.connect(port=9093, scheme='http', token="waffle") - ctx.post("/", foo="bar") - - def test_post_with_body_string(self): - def check_response(handler): - length = int(handler.headers.get('content-length', 0)) - body = handler.rfile.read(length) - assert handler.headers['content-type'] == 'application/json' - assert json.loads(body)["baz"] == "baf" - - with MockServer(POST=check_response): - ctx = binding.connect(port=9093, scheme='http', token="waffle", - headers=[("Content-Type", "application/json")]) - ctx.post("/", foo="bar", body='{"baz": "baf"}') - - def test_post_with_body_dict(self): - def check_response(handler): - length = int(handler.headers.get('content-length', 0)) - body = handler.rfile.read(length) - assert handler.headers['content-type'] == 'application/x-www-form-urlencoded' - assert ensure_str(body) in ['baz=baf&hep=cat', 'hep=cat&baz=baf'] - - with MockServer(POST=check_response): - ctx = binding.connect(port=9093, scheme='http', token="waffle") - ctx.post("/", foo="bar", body={"baz": "baf", "hep": "cat"}) - - -if __name__ == "__main__": - unittest.main() +#!/usr/bin/env python +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from http import server as BaseHTTPServer +from io import BytesIO, StringIO +from threading import Thread +from urllib.request import Request, urlopen + +from xml.etree.ElementTree import XML + +import json +import logging +from tests import testlib +import unittest +import socket +import ssl + +import splunklib +from splunklib import binding +from splunklib.binding import HTTPError, AuthenticationError, UrlEncoded +from splunklib import data +from splunklib.utils import ensure_str + +import pytest + +# splunkd endpoint paths +PATH_USERS = "authentication/users/" + +# XML Namespaces +NAMESPACE_ATOM = "http://www.w3.org/2005/Atom" +NAMESPACE_REST = "http://dev.splunk.com/ns/rest" +NAMESPACE_OPENSEARCH = "http://a9.com/-/spec/opensearch/1.1" + +# XML Extended Name Fragments +XNAMEF_ATOM = "{%s}%%s" % NAMESPACE_ATOM +XNAMEF_REST = "{%s}%%s" % NAMESPACE_REST +XNAMEF_OPENSEARCH = "{%s}%%s" % NAMESPACE_OPENSEARCH + +# XML Extended Names +XNAME_AUTHOR = XNAMEF_ATOM % "author" +XNAME_ENTRY = XNAMEF_ATOM % "entry" +XNAME_FEED = XNAMEF_ATOM % "feed" +XNAME_ID = XNAMEF_ATOM % "id" +XNAME_TITLE = XNAMEF_ATOM % "title" + + +def load(response): + return data.load(response.body.read()) + + +class BindingTestCase(unittest.TestCase): + context = None + + def setUp(self): + logging.info("%s", self.__class__.__name__) + self.opts = testlib.parse([], {}, ".env") + self.context = binding.connect(**self.opts.kwargs) + logging.debug("Connected to splunkd.") + + +class TestResponseReader(BindingTestCase): + def test_empty(self): + response = binding.ResponseReader(BytesIO(b"")) + self.assertTrue(response.empty) + self.assertEqual(response.peek(10), b"") + self.assertEqual(response.read(10), b"") + + arr = bytearray(10) + self.assertEqual(response.readinto(arr), 0) + self.assertEqual(arr, bytearray(10)) + self.assertTrue(response.empty) + + def test_read_past_end(self): + txt = b"abcd" + response = binding.ResponseReader(BytesIO(txt)) + self.assertFalse(response.empty) + self.assertEqual(response.peek(10), txt) + self.assertEqual(response.read(10), txt) + self.assertTrue(response.empty) + self.assertEqual(response.peek(10), b"") + self.assertEqual(response.read(10), b"") + + def test_read_partial(self): + txt = b"This is a test of the emergency broadcasting system." + response = binding.ResponseReader(BytesIO(txt)) + self.assertEqual(response.peek(5), txt[:5]) + self.assertFalse(response.empty) + self.assertEqual(response.read(), txt) + self.assertTrue(response.empty) + self.assertEqual(response.read(), b'') + + def test_readable(self): + txt = "abcd" + response = binding.ResponseReader(StringIO(txt)) + self.assertTrue(response.readable()) + + def test_readinto_bytearray(self): + txt = b"Checking readinto works as expected" + response = binding.ResponseReader(BytesIO(txt)) + arr = bytearray(10) + self.assertEqual(response.readinto(arr), 10) + self.assertEqual(arr[:10], b"Checking r") + self.assertEqual(response.readinto(arr), 10) + self.assertEqual(arr[:10], b"eadinto wo") + self.assertEqual(response.readinto(arr), 10) + self.assertEqual(arr[:10], b"rks as exp") + self.assertEqual(response.readinto(arr), 5) + self.assertEqual(arr[:5], b"ected") + self.assertTrue(response.empty) + + def test_readinto_memoryview(self): + txt = b"Checking readinto works as expected" + response = binding.ResponseReader(BytesIO(txt)) + arr = bytearray(10) + mv = memoryview(arr) + self.assertEqual(response.readinto(mv), 10) + self.assertEqual(arr[:10], b"Checking r") + self.assertEqual(response.readinto(mv), 10) + self.assertEqual(arr[:10], b"eadinto wo") + self.assertEqual(response.readinto(mv), 10) + self.assertEqual(arr[:10], b"rks as exp") + self.assertEqual(response.readinto(mv), 5) + self.assertEqual(arr[:5], b"ected") + self.assertTrue(response.empty) + + +class TestUrlEncoded(BindingTestCase): + def test_idempotent(self): + a = UrlEncoded('abc') + self.assertEqual(a, UrlEncoded(a)) + + def test_append(self): + self.assertEqual(UrlEncoded('a') + UrlEncoded('b'), + UrlEncoded('ab')) + + def test_append_string(self): + self.assertEqual(UrlEncoded('a') + '%', + UrlEncoded('a%')) + + def test_append_to_string(self): + self.assertEqual('%' + UrlEncoded('a'), + UrlEncoded('%a')) + + def test_interpolation_fails(self): + self.assertRaises(TypeError, lambda: UrlEncoded('%s') % 'boris') + + def test_chars(self): + for char, code in [(' ', '%20'), + ('"', '%22'), + ('%', '%25')]: + self.assertEqual(UrlEncoded(char), + UrlEncoded(code, skip_encode=True)) + + def test_repr(self): + self.assertEqual(repr(UrlEncoded('% %')), "UrlEncoded('% %')") + + +class TestAuthority(unittest.TestCase): + def test_authority_default(self): + self.assertEqual(binding._authority(), + "https://localhost:8089") + + def test_ipv4_host(self): + self.assertEqual( + binding._authority( + host="splunk.utopia.net"), + "https://splunk.utopia.net:8089") + + def test_ipv6_host(self): + self.assertEqual( + binding._authority( + host="2001:0db8:85a3:0000:0000:8a2e:0370:7334"), + "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") + + def test_ipv6_host_enclosed(self): + self.assertEqual( + binding._authority( + host="[2001:0db8:85a3:0000:0000:8a2e:0370:7334]"), + "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089") + + def test_all_fields(self): + self.assertEqual( + binding._authority( + scheme="http", + host="splunk.utopia.net", + port="471"), + "http://splunk.utopia.net:471") + + +class TestUserManipulation(BindingTestCase): + def setUp(self): + BindingTestCase.setUp(self) + self.username = testlib.tmpname() + self.password = "changeme!" + self.roles = "power" + + # Delete user if it exists already + try: + response = self.context.delete(PATH_USERS + self.username) + self.assertEqual(response.status, 200) + except HTTPError as e: + self.assertTrue(e.status in [400, 500]) + + def tearDown(self): + BindingTestCase.tearDown(self) + try: + self.context.delete(PATH_USERS + self.username) + except HTTPError as e: + if e.status not in [400, 500]: + raise + + def test_user_without_role_fails(self): + self.assertRaises(binding.HTTPError, + self.context.post, + PATH_USERS, name=self.username, + password=self.password) + + def test_create_user(self): + response = self.context.post( + PATH_USERS, name=self.username, + password=self.password, roles=self.roles) + self.assertEqual(response.status, 201) + + response = self.context.get(PATH_USERS + self.username) + entry = load(response).feed.entry + self.assertEqual(entry.title, self.username) + + def test_update_user(self): + self.test_create_user() + response = self.context.post( + PATH_USERS + self.username, + password=self.password, + roles=self.roles, + defaultApp="search", + realname="Renzo", + email="email.me@now.com") + self.assertEqual(response.status, 200) + + response = self.context.get(PATH_USERS + self.username) + self.assertEqual(response.status, 200) + entry = load(response).feed.entry + self.assertEqual(entry.title, self.username) + self.assertEqual(entry.content.defaultApp, "search") + self.assertEqual(entry.content.realname, "Renzo") + self.assertEqual(entry.content.email, "email.me@now.com") + + def test_post_with_body_behaves(self): + self.test_create_user() + response = self.context.post( + PATH_USERS + self.username, + body="defaultApp=search", + ) + self.assertEqual(response.status, 200) + + def test_post_with_get_arguments_to_receivers_stream(self): + text = 'Hello, world!' + response = self.context.post( + '/services/receivers/simple', + headers=[('x-splunk-input-mode', 'streaming')], + source='sdk', sourcetype='sdk_test', + body=text + ) + self.assertEqual(response.status, 200) + + +class TestSocket(BindingTestCase): + def test_socket(self): + socket = self.context.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + # Sockets take bytes not strings + # + # def test_unicode_socket(self): + # socket = self.context.connect() + # socket.write(u"POST %s HTTP/1.1\r\n" %\ + # self.context._abspath("some/path/to/post/to")) + # socket.write(u"Host: %s:%s\r\n" %\ + # (self.context.host, self.context.port)) + # socket.write(u"Accept-Encoding: identity\r\n") + # socket.write((u"Authorization: %s\r\n" %\ + # self.context.token).encode('utf-8')) + # socket.write(u"X-Splunk-Input-Mode: Streaming\r\n") + # socket.write("\r\n") + # socket.close() + + def test_socket_gethostbyname(self): + self.assertTrue(self.context.connect()) + self.context.host = socket.gethostbyname(self.context.host) + self.assertTrue(self.context.connect()) + + +class TestUnicodeConnect(BindingTestCase): + def test_unicode_connect(self): + opts = self.opts.kwargs.copy() + opts['host'] = str(opts['host']) + context = binding.connect(**opts) + # Just check to make sure the service is alive + response = context.get("/services") + self.assertEqual(response.status, 200) + + +@pytest.mark.smoke +class TestAutologin(BindingTestCase): + def test_with_autologin(self): + self.context.autologin = True + self.assertEqual(self.context.get("/services").status, 200) + self.context.logout() + self.assertEqual(self.context.get("/services").status, 200) + + def test_without_autologin(self): + self.context.autologin = False + self.assertEqual(self.context.get("/services").status, 200) + self.context.logout() + self.assertRaises(AuthenticationError, + self.context.get, "/services") + + +class TestAbspath(BindingTestCase): + def setUp(self): + BindingTestCase.setUp(self) + self.kwargs = self.opts.kwargs.copy() + if 'app' in self.kwargs: del self.kwargs['app'] + if 'owner' in self.kwargs: del self.kwargs['owner'] + + def test_default(self): + path = self.context._abspath("foo", owner=None, app=None) + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/services/foo") + + def test_with_owner(self): + path = self.context._abspath("foo", owner="me", app=None) + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/system/foo") + + def test_with_app(self): + path = self.context._abspath("foo", owner=None, app="MyApp") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_with_both(self): + path = self.context._abspath("foo", owner="me", app="MyApp") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_user_sharing(self): + path = self.context._abspath("foo", owner="me", app="MyApp", sharing="user") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_sharing_app(self): + path = self.context._abspath("foo", owner="me", app="MyApp", sharing="app") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_sharing_global(self): + path = self.context._abspath("foo", owner="me", app="MyApp", sharing="global") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_sharing_system(self): + path = self.context._abspath("foo bar", owner="me", app="MyApp", sharing="system") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/system/foo%20bar") + + def test_url_forbidden_characters(self): + path = self.context._abspath('/a/b c/d') + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, '/a/b%20c/d') + + def test_context_defaults(self): + context = binding.connect(**self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/services/foo") + + def test_context_with_owner(self): + context = binding.connect(owner="me", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/system/foo") + + def test_context_with_app(self): + context = binding.connect(app="MyApp", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_context_with_both(self): + context = binding.connect(owner="me", app="MyApp", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_context_with_user_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="user", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me/MyApp/foo") + + def test_context_with_app_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="app", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_context_with_global_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="global", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/MyApp/foo") + + def test_context_with_system_sharing(self): + context = binding.connect( + owner="me", app="MyApp", sharing="system", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/nobody/system/foo") + + def test_context_with_owner_as_email(self): + context = binding.connect(owner="me@me.com", **self.kwargs) + path = context._abspath("foo") + self.assertTrue(isinstance(path, UrlEncoded)) + self.assertEqual(path, "/servicesNS/me%40me.com/system/foo") + self.assertEqual(path, UrlEncoded("/servicesNS/me@me.com/system/foo")) + + +# An urllib2 based HTTP request handler, used to test the binding layers +# support for pluggable request handlers. +def urllib2_handler(url, message, **kwargs): + method = message['method'].lower() + data = message.get('body', b"") if method == 'post' else None + headers = dict(message.get('headers', [])) + req = Request(url, data, headers) + try: + response = urlopen(req, context=ssl._create_unverified_context()) + except HTTPError as response: + pass # Propagate HTTP errors via the returned response message + return { + 'status': response.code, + 'reason': response.msg, + 'headers': dict(response.info()), + 'body': BytesIO(response.read()) + } + + +def isatom(body): + """Answers if the given response body looks like ATOM.""" + root = XML(body) + return \ + root.tag == XNAME_FEED and \ + root.find(XNAME_AUTHOR) is not None and \ + root.find(XNAME_ID) is not None and \ + root.find(XNAME_TITLE) is not None + + +class TestPluggableHTTP(testlib.SDKTestCase): + # Verify pluggable HTTP reqeust handlers. + def test_handlers(self): + paths = ["/services", "authentication/users", + "search/jobs"] + handlers = [binding.handler(), # default handler + urllib2_handler] + for handler in handlers: + logging.debug("Connecting with handler %s", handler) + context = binding.connect( + handler=handler, + **self.opts.kwargs) + for path in paths: + body = context.get(path).body.read() + self.assertTrue(isatom(body)) + + +def urllib2_insert_cookie_handler(url, message, **kwargs): + method = message['method'].lower() + data = message.get('body', b"") if method == 'post' else None + headers = dict(message.get('headers', [])) + req = Request(url, data, headers) + try: + response = urlopen(req, context=ssl._create_unverified_context()) + except HTTPError as response: + pass # Propagate HTTP errors via the returned response message + + # Mimic the insertion of 3rd party cookies into the response. + # An example is "sticky session"/"insert cookie" persistence + # of a load balancer for a SHC. + header_list = list(response.info().items()) + header_list.append(("Set-Cookie", "BIGipServer_splunk-shc-8089=1234567890.12345.0000; path=/; Httponly; Secure")) + header_list.append(("Set-Cookie", "home_made=yummy")) + + return { + 'status': response.code, + 'reason': response.msg, + 'headers': header_list, + 'body': BytesIO(response.read()) + } + + +class TestCookiePersistence(testlib.SDKTestCase): + # Verify persistence of 3rd party inserted cookies. + def test_3rdPartyInsertedCookiePersistence(self): + paths = ["/services", "authentication/users", + "search/jobs"] + logging.debug("Connecting with urllib2_insert_cookie_handler %s", urllib2_insert_cookie_handler) + context = binding.connect( + handler=urllib2_insert_cookie_handler, + **self.opts.kwargs) + + persisted_cookies = context.get_cookies() + + splunk_token_found = False + for k, v in persisted_cookies.items(): + if k[:8] == "splunkd_": + splunk_token_found = True + break + + self.assertEqual(splunk_token_found, True) + self.assertEqual(persisted_cookies['BIGipServer_splunk-shc-8089'], "1234567890.12345.0000") + self.assertEqual(persisted_cookies['home_made'], "yummy") + + +@pytest.mark.smoke +class TestLogout(BindingTestCase): + def test_logout(self): + response = self.context.get("/services") + self.assertEqual(response.status, 200) + self.context.logout() + self.assertEqual(self.context.token, binding._NoAuthenticationToken) + self.assertEqual(self.context.get_cookies(), {}) + self.assertRaises(AuthenticationError, + self.context.get, "/services") + self.assertRaises(AuthenticationError, + self.context.post, "/services") + self.assertRaises(AuthenticationError, + self.context.delete, "/services") + self.context.login() + response = self.context.get("/services") + self.assertEqual(response.status, 200) + + +class TestCookieAuthentication(unittest.TestCase): + def setUp(self): + self.opts = testlib.parse([], {}, ".env") + self.context = binding.connect(**self.opts.kwargs) + + # Skip these tests if running below Splunk 6.2, cookie-auth didn't exist before + from splunklib import client + service = client.Service(**self.opts.kwargs) + # TODO: Workaround the fact that skipTest is not defined by unittest2.TestCase + service.login() + splver = service.splunk_version + if splver[:2] < (6, 2): + self.skipTest("Skipping cookie-auth tests, running in %d.%d.%d, this feature was added in 6.2+" % splver) + + if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: + + def assertIsNotNone(self, obj, msg=None): + if obj is None: + raise self.failureException(msg or '%r is not None' % obj) + + @pytest.mark.smoke + def test_cookie_in_auth_headers(self): + self.assertIsNotNone(self.context._auth_headers) + self.assertNotEqual(self.context._auth_headers, []) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(self.context._auth_headers[0][0], "Cookie") + self.assertEqual(self.context._auth_headers[0][1][:8], "splunkd_") + + @pytest.mark.smoke + def test_got_cookie_on_connect(self): + self.assertIsNotNone(self.context.get_cookies()) + self.assertNotEqual(self.context.get_cookies(), {}) + self.assertEqual(len(self.context.get_cookies()), 1) + self.assertEqual(list(self.context.get_cookies().keys())[0][:8], "splunkd_") + + @pytest.mark.smoke + def test_cookie_with_autologin(self): + self.context.autologin = True + self.assertEqual(self.context.get("/services").status, 200) + self.assertTrue(self.context.has_cookies()) + self.context.logout() + self.assertFalse(self.context.has_cookies()) + self.assertEqual(self.context.get("/services").status, 200) + self.assertTrue(self.context.has_cookies()) + + @pytest.mark.smoke + def test_cookie_without_autologin(self): + self.context.autologin = False + self.assertEqual(self.context.get("/services").status, 200) + self.assertTrue(self.context.has_cookies()) + self.context.logout() + self.assertFalse(self.context.has_cookies()) + self.assertRaises(AuthenticationError, + self.context.get, "/services") + + @pytest.mark.smoke + def test_got_updated_cookie_with_get(self): + old_cookies = self.context.get_cookies() + resp = self.context.get("apps/local") + found = False + for key, value in resp.headers: + if key.lower() == "set-cookie": + found = True + self.assertEqual(value[:8], "splunkd_") + + new_cookies = {} + binding._parse_cookies(value, new_cookies) + # We're only expecting 1 in this scenario + self.assertEqual(len(old_cookies), 1) + self.assertTrue(len(list(new_cookies.values())), 1) + self.assertEqual(old_cookies, new_cookies) + self.assertEqual(list(new_cookies.values())[0], list(old_cookies.values())[0]) + self.assertTrue(found) + + @pytest.mark.smoke + def test_login_fails_with_bad_cookie(self): + # We should get an error if using a bad cookie + try: + binding.connect(**{"cookie": "bad=cookie"}) + self.fail() + except AuthenticationError as ae: + self.assertEqual(str(ae), "Login failed.") + + @pytest.mark.smoke + def test_login_with_multiple_cookies(self): + # We should get an error if using a bad cookie + new_context = binding.Context() + new_context.get_cookies().update({"bad": "cookie"}) + try: + new_context = new_context.login() + self.fail() + except AuthenticationError as ae: + self.assertEqual(str(ae), "Login failed.") + # Bring in a valid cookie now + for key, value in self.context.get_cookies().items(): + new_context.get_cookies()[key] = value + + self.assertEqual(len(new_context.get_cookies()), 2) + self.assertTrue('bad' in list(new_context.get_cookies().keys())) + self.assertTrue('cookie' in list(new_context.get_cookies().values())) + + for k, v in self.context.get_cookies().items(): + self.assertEqual(new_context.get_cookies()[k], v) + + self.assertEqual(new_context.get("apps/local").status, 200) + + @pytest.mark.smoke + def test_login_fails_without_cookie_or_token(self): + opts = { + 'host': self.opts.kwargs['host'], + 'port': self.opts.kwargs['port'] + } + try: + binding.connect(**opts) + self.fail() + except AuthenticationError as ae: + self.assertEqual(str(ae), "Login failed.") + + +class TestNamespace(unittest.TestCase): + def test_namespace(self): + tests = [ + ({}, + {'sharing': None, 'owner': None, 'app': None}), + + ({'owner': "Bob"}, + {'sharing': None, 'owner': "Bob", 'app': None}), + + ({'app': "search"}, + {'sharing': None, 'owner': None, 'app': "search"}), + + ({'owner': "Bob", 'app': "search"}, + {'sharing': None, 'owner': "Bob", 'app': "search"}), + + ({'sharing': "user", 'owner': "Bob@bob.com"}, + {'sharing': "user", 'owner': "Bob@bob.com", 'app': None}), + + ({'sharing': "user"}, + {'sharing': "user", 'owner': None, 'app': None}), + + ({'sharing': "user", 'owner': "Bob"}, + {'sharing': "user", 'owner': "Bob", 'app': None}), + + ({'sharing': "user", 'app': "search"}, + {'sharing': "user", 'owner': None, 'app': "search"}), + + ({'sharing': "user", 'owner': "Bob", 'app': "search"}, + {'sharing': "user", 'owner': "Bob", 'app': "search"}), + + ({'sharing': "app"}, + {'sharing': "app", 'owner': "nobody", 'app': None}), + + ({'sharing': "app", 'owner': "Bob"}, + {'sharing': "app", 'owner': "nobody", 'app': None}), + + ({'sharing': "app", 'app': "search"}, + {'sharing': "app", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "app", 'owner': "Bob", 'app': "search"}, + {'sharing': "app", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "global"}, + {'sharing': "global", 'owner': "nobody", 'app': None}), + + ({'sharing': "global", 'owner': "Bob"}, + {'sharing': "global", 'owner': "nobody", 'app': None}), + + ({'sharing': "global", 'app': "search"}, + {'sharing': "global", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "global", 'owner': "Bob", 'app': "search"}, + {'sharing': "global", 'owner': "nobody", 'app': "search"}), + + ({'sharing': "system"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': "system", 'owner': "Bob"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': "system", 'app': "search"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': "system", 'owner': "Bob", 'app': "search"}, + {'sharing': "system", 'owner': "nobody", 'app': "system"}), + + ({'sharing': 'user', 'owner': '-', 'app': '-'}, + {'sharing': 'user', 'owner': '-', 'app': '-'})] + + for kwargs, expected in tests: + namespace = binding.namespace(**kwargs) + for k, v in expected.items(): + self.assertEqual(namespace[k], v) + + def test_namespace_fails(self): + self.assertRaises(ValueError, binding.namespace, sharing="gobble") + + +@pytest.mark.smoke +class TestBasicAuthentication(unittest.TestCase): + def setUp(self): + self.opts = testlib.parse([], {}, ".env") + opts = self.opts.kwargs.copy() + opts["basic"] = True + opts["username"] = self.opts.kwargs["username"] + opts["password"] = self.opts.kwargs["password"] + + self.context = binding.connect(**opts) + from splunklib import client + service = client.Service(**opts) + + if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: + def assertIsNotNone(self, obj, msg=None): + if obj is None: + raise self.failureException(msg or '%r is not None' % obj) + + def test_basic_in_auth_headers(self): + self.assertIsNotNone(self.context._auth_headers) + self.assertNotEqual(self.context._auth_headers, []) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(len(self.context._auth_headers), 1) + self.assertEqual(self.context._auth_headers[0][0], "Authorization") + self.assertEqual(self.context._auth_headers[0][1][:6], "Basic ") + self.assertEqual(self.context.get("/services").status, 200) + + +@pytest.mark.smoke +class TestTokenAuthentication(BindingTestCase): + def test_preexisting_token(self): + token = self.context.token + opts = self.opts.kwargs.copy() + opts["token"] = token + opts["username"] = "boris the mad baboon" + opts["password"] = "nothing real" + + newContext = binding.Context(**opts) + response = newContext.get("/services") + self.assertEqual(response.status, 200) + + socket = newContext.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + def test_preexisting_token_sans_splunk(self): + token = self.context.token + if token.startswith('Splunk '): + token = token.split(' ', 1)[1] + self.assertFalse(token.startswith('Splunk ')) + else: + self.fail('Token did not start with "Splunk ".') + opts = self.opts.kwargs.copy() + opts["token"] = token + opts["username"] = "boris the mad baboon" + opts["password"] = "nothing real" + + newContext = binding.Context(**opts) + response = newContext.get("/services") + self.assertEqual(response.status, 200) + + socket = newContext.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + def test_connect_with_preexisting_token_sans_user_and_pass(self): + token = self.context.token + opts = self.opts.kwargs.copy() + del opts['username'] + del opts['password'] + opts["token"] = token + + newContext = binding.connect(**opts) + response = newContext.get('/services') + self.assertEqual(response.status, 200) + + socket = newContext.connect() + socket.write((f"POST {self.context._abspath('some/path/to/post/to')} HTTP/1.1\r\n").encode('utf-8')) + socket.write((f"Host: {self.context.host}:{self.context.port}\r\n").encode('utf-8')) + socket.write("Accept-Encoding: identity\r\n".encode('utf-8')) + socket.write((f"Authorization: {self.context.token}\r\n").encode('utf-8')) + socket.write("X-Splunk-Input-Mode: Streaming\r\n".encode('utf-8')) + socket.write("\r\n".encode('utf-8')) + socket.close() + + +class TestPostWithBodyParam(unittest.TestCase): + + def test_post(self): + def handler(url, message, **kwargs): + assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" + assert message["body"] == b"testkey=testvalue" + return splunklib.data.Record({ + "status": 200, + "headers": [], + }) + + ctx = binding.Context(handler=handler) + ctx.post("foo/bar", owner="testowner", app="testapp", body={"testkey": "testvalue"}) + + def test_post_with_params_and_body(self): + def handler(url, message, **kwargs): + assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar?extrakey=extraval" + assert message["body"] == b"testkey=testvalue" + return splunklib.data.Record({ + "status": 200, + "headers": [], + }) + + ctx = binding.Context(handler=handler) + ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp", body={"testkey": "testvalue"}) + + def test_post_with_params_and_no_body(self): + def handler(url, message, **kwargs): + assert url == "https://localhost:8089/servicesNS/testowner/testapp/foo/bar" + assert message["body"] == b"extrakey=extraval" + return splunklib.data.Record({ + "status": 200, + "headers": [], + }) + + ctx = binding.Context(handler=handler) + ctx.post("foo/bar", extrakey="extraval", owner="testowner", app="testapp") + + +def _wrap_handler(func, response_code=200, body=""): + def wrapped(handler_self): + result = func(handler_self) + if result is None: + handler_self.send_response(response_code) + handler_self.end_headers() + handler_self.wfile.write(body) + + return wrapped + + +class MockServer: + def __init__(self, port=9093, **handlers): + methods = {"do_" + k: _wrap_handler(v) for (k, v) in handlers.items()} + + def init(handler_self, socket, address, server): + BaseHTTPServer.BaseHTTPRequestHandler.__init__(handler_self, socket, address, server) + + def log(*args): # To silence server access logs + pass + + methods["__init__"] = init + methods["log_message"] = log + Handler = type("Handler", + (BaseHTTPServer.BaseHTTPRequestHandler, object), + methods) + self._svr = BaseHTTPServer.HTTPServer(("localhost", port), Handler) + + def run(): + self._svr.handle_request() + + self._thread = Thread(target=run) + self._thread.daemon = True + + def __enter__(self): + self._thread.start() + return self._svr + + def __exit__(self, typ, value, traceback): + self._thread.join(10) + self._svr.server_close() + + +class TestFullPost(unittest.TestCase): + + def test_post_with_body_urlencoded(self): + def check_response(handler): + length = int(handler.headers.get('content-length', 0)) + body = handler.rfile.read(length) + assert body.decode('utf-8') == "foo=bar" + + with MockServer(POST=check_response): + ctx = binding.connect(port=9093, scheme='http', token="waffle") + ctx.post("/", foo="bar") + + def test_post_with_body_string(self): + def check_response(handler): + length = int(handler.headers.get('content-length', 0)) + body = handler.rfile.read(length) + assert handler.headers['content-type'] == 'application/json' + assert json.loads(body)["baz"] == "baf" + + with MockServer(POST=check_response): + ctx = binding.connect(port=9093, scheme='http', token="waffle", + headers=[("Content-Type", "application/json")]) + ctx.post("/", foo="bar", body='{"baz": "baf"}') + + def test_post_with_body_dict(self): + def check_response(handler): + length = int(handler.headers.get('content-length', 0)) + body = handler.rfile.read(length) + assert handler.headers['content-type'] == 'application/x-www-form-urlencoded' + assert ensure_str(body) in ['baz=baf&hep=cat', 'hep=cat&baz=baf'] + + with MockServer(POST=check_response): + ctx = binding.connect(port=9093, scheme='http', token="waffle") + ctx.post("/", foo="bar", body={"baz": "baf", "hep": "cat"}) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/testlib.py b/tests/testlib.py index a92790e2..e7c7b6a7 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -1,261 +1,261 @@ -#!/usr/bin/env python -# -# Copyright © 2011-2024 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Shared unit test utilities.""" -import contextlib - -import os -import time -import logging -import sys - -# Run the test suite on the SDK without installing it. -sys.path.insert(0, '../') - -from time import sleep -from datetime import datetime, timedelta - -import unittest - -from utils import parse - -from splunklib import client - - - -logging.basicConfig( - filename='test.log', - level=logging.DEBUG, - format="%(asctime)s:%(levelname)s:%(message)s") - - -class NoRestartRequiredError(Exception): - pass - - -class WaitTimedOutError(Exception): - pass - - -def to_bool(x): - if x == '1': - return True - if x == '0': - return False - raise ValueError(f"Not a boolean value: {x}") - - -def tmpname(): - name = 'delete-me-' + str(os.getpid()) + str(time.time()).replace('.', '-') - return name - - -def wait(predicate, timeout=60, pause_time=0.5): - assert pause_time < timeout - start = datetime.now() - diff = timedelta(seconds=timeout) - while not predicate(): - if datetime.now() - start > diff: - logging.debug("wait timed out after %d seconds", timeout) - raise WaitTimedOutError - sleep(pause_time) - logging.debug("wait finished after %s seconds", datetime.now() - start) - - -class SDKTestCase(unittest.TestCase): - restart_already_required = False - installedApps = [] - - def assertEventuallyTrue(self, predicate, timeout=30, pause_time=0.5, - timeout_message="Operation timed out."): - assert pause_time < timeout - start = datetime.now() - diff = timedelta(seconds=timeout) - while not predicate(): - if datetime.now() - start > diff: - logging.debug("wait timed out after %d seconds", timeout) - self.fail(timeout_message) - sleep(pause_time) - logging.debug("wait finished after %s seconds", datetime.now() - start) - - def check_content(self, entity, **kwargs): - for k, v in kwargs: - self.assertEqual(entity[k], str(v)) - - def check_entity(self, entity): - assert entity is not None - self.assertTrue(entity.name is not None) - self.assertTrue(entity.path is not None) - - self.assertTrue(entity.state is not None) - self.assertTrue(entity.content is not None) - - # Verify access metadata - assert entity.access is not None - entity.access.app - entity.access.owner - entity.access.sharing - - # Verify content metadata - - # In some cases, the REST API does not return field metadata for when - # entities are intially listed by a collection, so we refresh to make - # sure the metadata is available. - entity.refresh() - - self.assertTrue(isinstance(entity.fields.required, list)) - self.assertTrue(isinstance(entity.fields.optional, list)) - self.assertTrue(isinstance(entity.fields.wildcard, list)) - - # Verify that all required fields appear in entity content - - for field in entity.fields.required: - try: - self.assertTrue(field in entity.content) - except: - # Check for known exceptions - if "configs/conf-times" in entity.path: - if field in ["is_sub_menu"]: - continue - raise - - def clear_restart_message(self): - """Tell Splunk to forget that it needs to be restarted. - - This is used mostly in cases such as deleting a temporary application. - Splunk asks to be restarted when that happens, but unless the application - contained modular input kinds or the like, it isn't necessary. - """ - if not self.service.restart_required: - raise ValueError("Tried to clear restart message when there was none.") - try: - self.service.delete("messages/restart_required") - except client.HTTPError as he: - if he.status != 404: - raise - - @contextlib.contextmanager - def fake_splunk_version(self, version): - original_version = self.service.splunk_version - try: - self.service._splunk_version = version - yield - finally: - self.service._splunk_version = original_version - - def install_app_from_collection(self, name): - collectionName = 'sdkappcollection' - if collectionName not in self.service.apps: - raise ValueError("sdk-test-application not installed in splunkd") - appPath = self.pathInApp(collectionName, ["build", name + ".tar"]) - kwargs = {"update": True, "name": appPath, "filename": True} - - try: - self.service.post("apps/local", **kwargs) - except client.HTTPError as he: - if he.status == 400: - raise IOError(f"App {name} not found in app collection") - if self.service.restart_required: - self.service.restart(120) - self.installedApps.append(name) - - def app_collection_installed(self): - collectionName = 'sdkappcollection' - return collectionName in self.service.apps - - def pathInApp(self, appName, pathComponents): - r"""Return a path to *pathComponents* in *appName*. - - `pathInApp` is used to refer to files in applications installed with - `install_app_from_collection`. For example, the app `file_to_upload` in - the collection contains `log.txt`. To get the path to it, call:: - - pathInApp('file_to_upload', ['log.txt']) - - The path to `setup.xml` in `has_setup_xml` would be fetched with:: - - pathInApp('has_setup_xml', ['default', 'setup.xml']) - - `pathInApp` figures out the correct separator to use (based on whether - splunkd is running on Windows or Unix) and joins the elements in - *pathComponents* into a path relative to the application specified by - *appName*. - - *pathComponents* should be a list of strings giving the components. - This function will try to figure out the correct separator (/ or \) - for the platform that splunkd is running on and construct the path - as needed. - - :return: A string giving the path. - """ - splunkHome = self.service.settings['SPLUNK_HOME'] - if "\\" in splunkHome: - # This clause must come first, since Windows machines may - # have mixed \ and / in their paths. - separator = "\\" - elif "/" in splunkHome: - separator = "/" - else: - raise ValueError("No separators in $SPLUNK_HOME. Can't determine what file separator to use.") - appPath = separator.join([splunkHome, "etc", "apps", appName] + pathComponents) - return appPath - - def uncheckedRestartSplunk(self, timeout=240): - self.service.restart(timeout) - - def restartSplunk(self, timeout=240): - if self.service.restart_required: - self.service.restart(timeout) - else: - raise NoRestartRequiredError() - - @classmethod - def setUpClass(cls): - cls.opts = parse([], {}, ".env") - cls.opts.kwargs.update({'retries': 3}) - # Before we start, make sure splunk doesn't need a restart. - service = client.connect(**cls.opts.kwargs) - if service.restart_required: - service.restart(timeout=120) - - def setUp(self): - unittest.TestCase.setUp(self) - self.opts.kwargs.update({'retries': 3}) - self.service = client.connect(**self.opts.kwargs) - # If Splunk is in a state requiring restart, go ahead - # and restart. That way we'll be sane for the rest of - # the test. - if self.service.restart_required: - self.restartSplunk() - logging.debug("Connected to splunkd version %s", '.'.join(str(x) for x in self.service.splunk_version)) - - def tearDown(self): - from splunklib.binding import HTTPError - - if self.service.restart_required: - self.fail("Test left Splunk in a state requiring a restart.") - - for appName in self.installedApps: - if appName in self.service.apps: - try: - self.service.apps.delete(appName) - wait(lambda: appName not in self.service.apps) - except HTTPError as error: - if not (os.name == 'nt' and error.status == 500): - raise - print(f'Ignoring failure to delete {appName} during tear down: {error}') - if self.service.restart_required: - self.clear_restart_message() +#!/usr/bin/env python +# +# Copyright © 2011-2024 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Shared unit test utilities.""" +import contextlib + +import os +import time +import logging +import sys + +# Run the test suite on the SDK without installing it. +sys.path.insert(0, '../') + +from time import sleep +from datetime import datetime, timedelta + +import unittest + +from utils import parse + +from splunklib import client + + + +logging.basicConfig( + filename='test.log', + level=logging.DEBUG, + format="%(asctime)s:%(levelname)s:%(message)s") + + +class NoRestartRequiredError(Exception): + pass + + +class WaitTimedOutError(Exception): + pass + + +def to_bool(x): + if x == '1': + return True + if x == '0': + return False + raise ValueError(f"Not a boolean value: {x}") + + +def tmpname(): + name = 'delete-me-' + str(os.getpid()) + str(time.time()).replace('.', '-') + return name + + +def wait(predicate, timeout=60, pause_time=0.5): + assert pause_time < timeout + start = datetime.now() + diff = timedelta(seconds=timeout) + while not predicate(): + if datetime.now() - start > diff: + logging.debug("wait timed out after %d seconds", timeout) + raise WaitTimedOutError + sleep(pause_time) + logging.debug("wait finished after %s seconds", datetime.now() - start) + + +class SDKTestCase(unittest.TestCase): + restart_already_required = False + installedApps = [] + + def assertEventuallyTrue(self, predicate, timeout=30, pause_time=0.5, + timeout_message="Operation timed out."): + assert pause_time < timeout + start = datetime.now() + diff = timedelta(seconds=timeout) + while not predicate(): + if datetime.now() - start > diff: + logging.debug("wait timed out after %d seconds", timeout) + self.fail(timeout_message) + sleep(pause_time) + logging.debug("wait finished after %s seconds", datetime.now() - start) + + def check_content(self, entity, **kwargs): + for k, v in kwargs: + self.assertEqual(entity[k], str(v)) + + def check_entity(self, entity): + assert entity is not None + self.assertTrue(entity.name is not None) + self.assertTrue(entity.path is not None) + + self.assertTrue(entity.state is not None) + self.assertTrue(entity.content is not None) + + # Verify access metadata + assert entity.access is not None + entity.access.app + entity.access.owner + entity.access.sharing + + # Verify content metadata + + # In some cases, the REST API does not return field metadata for when + # entities are intially listed by a collection, so we refresh to make + # sure the metadata is available. + entity.refresh() + + self.assertTrue(isinstance(entity.fields.required, list)) + self.assertTrue(isinstance(entity.fields.optional, list)) + self.assertTrue(isinstance(entity.fields.wildcard, list)) + + # Verify that all required fields appear in entity content + + for field in entity.fields.required: + try: + self.assertTrue(field in entity.content) + except: + # Check for known exceptions + if "configs/conf-times" in entity.path: + if field in ["is_sub_menu"]: + continue + raise + + def clear_restart_message(self): + """Tell Splunk to forget that it needs to be restarted. + + This is used mostly in cases such as deleting a temporary application. + Splunk asks to be restarted when that happens, but unless the application + contained modular input kinds or the like, it isn't necessary. + """ + if not self.service.restart_required: + raise ValueError("Tried to clear restart message when there was none.") + try: + self.service.delete("messages/restart_required") + except client.HTTPError as he: + if he.status != 404: + raise + + @contextlib.contextmanager + def fake_splunk_version(self, version): + original_version = self.service.splunk_version + try: + self.service._splunk_version = version + yield + finally: + self.service._splunk_version = original_version + + def install_app_from_collection(self, name): + collectionName = 'sdkappcollection' + if collectionName not in self.service.apps: + raise ValueError("sdk-test-application not installed in splunkd") + appPath = self.pathInApp(collectionName, ["build", name + ".tar"]) + kwargs = {"update": True, "name": appPath, "filename": True} + + try: + self.service.post("apps/local", **kwargs) + except client.HTTPError as he: + if he.status == 400: + raise IOError(f"App {name} not found in app collection") + if self.service.restart_required: + self.service.restart(120) + self.installedApps.append(name) + + def app_collection_installed(self): + collectionName = 'sdkappcollection' + return collectionName in self.service.apps + + def pathInApp(self, appName, pathComponents): + r"""Return a path to *pathComponents* in *appName*. + + `pathInApp` is used to refer to files in applications installed with + `install_app_from_collection`. For example, the app `file_to_upload` in + the collection contains `log.txt`. To get the path to it, call:: + + pathInApp('file_to_upload', ['log.txt']) + + The path to `setup.xml` in `has_setup_xml` would be fetched with:: + + pathInApp('has_setup_xml', ['default', 'setup.xml']) + + `pathInApp` figures out the correct separator to use (based on whether + splunkd is running on Windows or Unix) and joins the elements in + *pathComponents* into a path relative to the application specified by + *appName*. + + *pathComponents* should be a list of strings giving the components. + This function will try to figure out the correct separator (/ or \) + for the platform that splunkd is running on and construct the path + as needed. + + :return: A string giving the path. + """ + splunkHome = self.service.settings['SPLUNK_HOME'] + if "\\" in splunkHome: + # This clause must come first, since Windows machines may + # have mixed \ and / in their paths. + separator = "\\" + elif "/" in splunkHome: + separator = "/" + else: + raise ValueError("No separators in $SPLUNK_HOME. Can't determine what file separator to use.") + appPath = separator.join([splunkHome, "etc", "apps", appName] + pathComponents) + return appPath + + def uncheckedRestartSplunk(self, timeout=240): + self.service.restart(timeout) + + def restartSplunk(self, timeout=240): + if self.service.restart_required: + self.service.restart(timeout) + else: + raise NoRestartRequiredError() + + @classmethod + def setUpClass(cls): + cls.opts = parse([], {}, ".env") + cls.opts.kwargs.update({'retries': 3}) + # Before we start, make sure splunk doesn't need a restart. + service = client.connect(**cls.opts.kwargs) + if service.restart_required: + service.restart(timeout=120) + + def setUp(self): + unittest.TestCase.setUp(self) + self.opts.kwargs.update({'retries': 3}) + self.service = client.connect(**self.opts.kwargs) + # If Splunk is in a state requiring restart, go ahead + # and restart. That way we'll be sane for the rest of + # the test. + if self.service.restart_required: + self.restartSplunk() + logging.debug("Connected to splunkd version %s", '.'.join(str(x) for x in self.service.splunk_version)) + + def tearDown(self): + from splunklib.binding import HTTPError + + if self.service.restart_required: + self.fail("Test left Splunk in a state requiring a restart.") + + for appName in self.installedApps: + if appName in self.service.apps: + try: + self.service.apps.delete(appName) + wait(lambda: appName not in self.service.apps) + except HTTPError as error: + if not (os.name == 'nt' and error.status == 500): + raise + print(f'Ignoring failure to delete {appName} during tear down: {error}') + if self.service.restart_required: + self.clear_restart_message() From b12a4bf9d20aeb29ff980c59cfceba40f505da50 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 15 Mar 2024 15:12:59 +0530 Subject: [PATCH 58/60] Version updated - added bug fix information in CHANGELOG.md --- CHANGELOG.md | 9 ++++++++- README.md | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ab20e262..e956f96d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Splunk Enterprise SDK for Python Changelog -## Version 2.0.0-beta +## Version 2.0.0 ### Feature updates * `ensure_binary`, `ensure_str` and `assert_regex` utility methods have been migrated from `six.py` to `splunklib/utils.py` @@ -13,6 +13,13 @@ * Updated CI test matrix to run with Python versions - 3.7 and 3.9 * Refactored Code throwing `deprecation` warnings * Refactored Code violating Pylint rules + +### Bug fixes +* [#527](https://github.com/splunk/splunk-sdk-python/issues/527) Added check for user roles +* Fix to access the metadata "finished" field in search commands using the v2 protocol. +* Fix for error messages about ChunkedExternProcessor in splunkd.log for Custom Search Commands. + + ## Version 1.7.4 ### Bug fixes diff --git a/README.md b/README.md index dd58b446..70413efc 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ # The Splunk Enterprise Software Development Kit for Python -#### Version 1.7.4 +#### Version 2.0.0 The Splunk Enterprise Software Development Kit (SDK) for Python contains library code designed to enable developers to build applications using the Splunk platform. From 9baa4554e29bc6f5c8487942961ce8bea7d7855e Mon Sep 17 00:00:00 2001 From: Patrick King <39703314+PKing70@users.noreply.github.com> Date: Mon, 18 Mar 2024 08:51:47 -0700 Subject: [PATCH 59/60] Update README.md Typographic edits. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 70413efc..2bbae4de 100644 --- a/README.md +++ b/README.md @@ -19,13 +19,13 @@ The Splunk developer platform enables developers to take advantage of the same t ## Get started with the Splunk Enterprise SDK for Python -The Splunk Enterprise SDK for Python contains library code, and it's examples are located in the [splunk-app-examples](https://github.com/splunk/splunk-app-examples) repository, that show how to programmatically interact with the Splunk platform for a variety of scenarios including searching, saved searches, data inputs, and many more, along with building complete applications. +The Splunk Enterprise SDK for Python contains library code, and its examples are located in the [splunk-app-examples](https://github.com/splunk/splunk-app-examples) repository. They show how to programmatically interact with the Splunk platform for a variety of scenarios including searching, saved searches, data inputs, and many more, along with building complete applications. ### Requirements Here's what you need to get going with the Splunk Enterprise SDK for Python. -* Python 3.7 or Python 3.9. +* Python 3.7 or Python 3.9 The Splunk Enterprise SDK for Python is compatible with python3 and has been tested with Python v3.7 and v3.9. From d849fda7e886df202da76cd500eb354adfe95b65 Mon Sep 17 00:00:00 2001 From: Patrick King <39703314+PKing70@users.noreply.github.com> Date: Mon, 18 Mar 2024 09:04:19 -0700 Subject: [PATCH 60/60] Update CHANGELOG.md Typographic edits --- CHANGELOG.md | 48 ++++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e956f96d..0e423e67 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,25 +6,25 @@ * `ensure_binary`, `ensure_str` and `assert_regex` utility methods have been migrated from `six.py` to `splunklib/utils.py` ### Major changes -* Removed Code specific to Python2 +* Removed code specific to Python 2 * Removed six.py dependency * Removed `__future__` imports -* Refactored & Updated `splunklib` and `tests` to utilise Python3 features +* Refactored and Updated `splunklib` and `tests` to utilize Python 3 features * Updated CI test matrix to run with Python versions - 3.7 and 3.9 * Refactored Code throwing `deprecation` warnings * Refactored Code violating Pylint rules ### Bug fixes * [#527](https://github.com/splunk/splunk-sdk-python/issues/527) Added check for user roles -* Fix to access the metadata "finished" field in search commands using the v2 protocol. -* Fix for error messages about ChunkedExternProcessor in splunkd.log for Custom Search Commands. +* Fix to access the metadata "finished" field in search commands using the v2 protocol +* Fix for error messages about ChunkedExternProcessor in splunkd.log for Custom Search Commands ## Version 1.7.4 ### Bug fixes -* [#532](https://github.com/splunk/splunk-sdk-python/pull/532) update encoding errors mode to 'replace' [[issue#505](https://github.com/splunk/splunk-sdk-python/issues/505)] -* [#507](https://github.com/splunk/splunk-sdk-python/pull/507) masked sensitive data in logs [[issue#506](https://github.com/splunk/splunk-sdk-python/issues/506)] +* [#532](https://github.com/splunk/splunk-sdk-python/pull/532) Update encoding errors mode to 'replace' [[issue#505](https://github.com/splunk/splunk-sdk-python/issues/505)] +* [#507](https://github.com/splunk/splunk-sdk-python/pull/507) Masked sensitive data in logs [[issue#506](https://github.com/splunk/splunk-sdk-python/issues/506)] ### Minor changes * [#530](https://github.com/splunk/splunk-sdk-python/pull/530) Update GitHub CI build status in README and removed RTD(Read The Docs) reference @@ -55,7 +55,7 @@ * [#471](https://github.com/splunk/splunk-sdk-python/pull/471) Fixed support of Load Balancer "sticky sessions" (persistent cookies) [[issue#438](https://github.com/splunk/splunk-sdk-python/issues/438)] ### Minor changes -* [#466](https://github.com/splunk/splunk-sdk-python/pull/466) tests for CSC apps +* [#466](https://github.com/splunk/splunk-sdk-python/pull/466) Tests for CSC apps * [#467](https://github.com/splunk/splunk-sdk-python/pull/467) Added 'kwargs' parameter for Saved Search History function * [#475](https://github.com/splunk/splunk-sdk-python/pull/475) README updates @@ -65,10 +65,10 @@ * [#468](https://github.com/splunk/splunk-sdk-python/pull/468) SDK Support for splunkd search API changes ### Bug fixes -* [#464](https://github.com/splunk/splunk-sdk-python/pull/464) updated checks for wildcards in StoragePasswords [[issue#458](https://github.com/splunk/splunk-sdk-python/issues/458)] +* [#464](https://github.com/splunk/splunk-sdk-python/pull/464) Updated checks for wildcards in StoragePasswords [[issue#458](https://github.com/splunk/splunk-sdk-python/issues/458)] ### Minor changes -* [#463](https://github.com/splunk/splunk-sdk-python/pull/463) Preserve thirdparty cookies +* [#463](https://github.com/splunk/splunk-sdk-python/pull/463) Preserve third-party cookies ## Version 1.6.20 @@ -94,7 +94,7 @@ * Pre-requisite: Query parameter 'output_mode' must be set to 'json' * Improves performance by approx ~80-90% * ResultsReader is deprecated and will be removed in future releases (NOTE: Please migrate to JSONResultsReader) -* [#437](https://github.com/splunk/splunk-sdk-python/pull/437) added setup_logging() method in splunklib for logging +* [#437](https://github.com/splunk/splunk-sdk-python/pull/437) Added setup_logging() method in splunklib for logging * [#426](https://github.com/splunk/splunk-sdk-python/pull/426) Added new github_commit modular input example * [#392](https://github.com/splunk/splunk-sdk-python/pull/392) Break out search argument to option parsing for v2 custom search commands * [#384](https://github.com/splunk/splunk-sdk-python/pull/384) Added Float parameter validator for custom search commands @@ -110,17 +110,17 @@ ### Minor changes * [#440](https://github.com/splunk/splunk-sdk-python/pull/440) Github release workflow modified to generate docs * [#430](https://github.com/splunk/splunk-sdk-python/pull/430) Fix indentation in README -* [#429](https://github.com/splunk/splunk-sdk-python/pull/429) documented how to access modular input metadata +* [#429](https://github.com/splunk/splunk-sdk-python/pull/429) Documented how to access modular input metadata * [#427](https://github.com/splunk/splunk-sdk-python/pull/427) Replace .splunkrc with .env file in test and examples * [#424](https://github.com/splunk/splunk-sdk-python/pull/424) Float validator test fix -* [#423](https://github.com/splunk/splunk-sdk-python/pull/423) Python3 compatibility for ResponseReader.__str__() +* [#423](https://github.com/splunk/splunk-sdk-python/pull/423) Python 3 compatibility for ResponseReader.__str__() * [#422](https://github.com/splunk/splunk-sdk-python/pull/422) ordereddict and all its reference removed * [#421](https://github.com/splunk/splunk-sdk-python/pull/421) Update README.md * [#387](https://github.com/splunk/splunk-sdk-python/pull/387) Update filter.py * [#331](https://github.com/splunk/splunk-sdk-python/pull/331) Fix a couple of warnings spotted when running python 2.7 tests * [#330](https://github.com/splunk/splunk-sdk-python/pull/330) client: use six.string_types instead of basestring * [#329](https://github.com/splunk/splunk-sdk-python/pull/329) client: remove outdated comment in Index.submit -* [#262](https://github.com/splunk/splunk-sdk-python/pull/262) properly add parameters to request based on the method of the request +* [#262](https://github.com/splunk/splunk-sdk-python/pull/262) Properly add parameters to request based on the method of the request * [#237](https://github.com/splunk/splunk-sdk-python/pull/237) Don't output close tags if you haven't written a start tag * [#149](https://github.com/splunk/splunk-sdk-python/pull/149) "handlers" stanza missing in examples/searchcommands_template/default/logging.conf @@ -190,7 +190,7 @@ https://github.com/splunk/splunk-sdk-python/blob/develop/README.md#customization * Fixed regression in mod inputs which resulted in error ’file' object has no attribute 'readable’, by not forcing to text/bytes in mod inputs event writer any longer. ### Minor changes -* Minor updates to the splunklib search commands to support Python3 +* Minor updates to the splunklib search commands to support Python 3 ## Version 1.6.12 @@ -199,32 +199,32 @@ https://github.com/splunk/splunk-sdk-python/blob/develop/README.md#customization * Made modinput text consistent ### Bug fixes -* Changed permissions from 755 to 644 for python files to pass appinspect checks +* Changed permissions from 755 to 644 for Python files to pass Appinspect checks * Removed version check on ssl verify toggle ## Version 1.6.11 ### Bug Fix -* Fix custom search command V2 failures on Windows for Python3 +* Fix custom search command V2 failures on Windows for Python 3 ## Version 1.6.10 ### Bug Fix -* Fix long type gets wrong values on windows for python 2 +* Fix long type gets wrong values on Windows for Python 2 ## Version 1.6.9 ### Bug Fix -* Fix buffered input in python 3 +* Fix buffered input in Python 3 ## Version 1.6.8 ### Bug Fix -* Fix custom search command on python 3 on windows +* Fix custom search command on Python 3 on Windows ## Version 1.6.7 @@ -298,7 +298,7 @@ The following bugs have been fixed: ### Minor changes -* Use relative imports throughout the the SDK. +* Use relative imports throughout the SDK. * Performance improvement when constructing `Input` entity paths. @@ -437,7 +437,7 @@ The following bugs have been fixed: * Added a script (GenerateHelloCommand) to the searchcommand_app to generate a custom search command. -* Added a human readable argument titles to modular input examples. +* Added a human-readable argument titles to modular input examples. * Renamed the searchcommand `csv` module to `splunk_csv`. @@ -445,7 +445,7 @@ The following bugs have been fixed: * Now entities that contain slashes in their name can be created, accessed and deleted correctly. -* Fixed a perfomance issue with connecting to Splunk on Windows. +* Fixed a performance issue with connecting to Splunk on Windows. * Improved the `service.restart()` function. @@ -539,7 +539,7 @@ The following bugs have been fixed: ### Bug fixes * When running `setup.py dist` without running `setup.py build`, there is no - longer an `No such file or directory` error on the command line, and the + longer a `No such file or directory` error on the command line, and the command behaves as expected. * When setting the sourcetype of a modular input event, events are indexed @@ -561,7 +561,7 @@ The following bugs have been fixed: ### Bug fix * When running `setup.py dist` without running `setup.py build`, there is no - longer an `No such file or directory` error on the command line, and the + longer a `No such file or directory` error on the command line, and the command behaves as expected. * When setting the sourcetype of a modular input event, events are indexed properly.