diff --git a/requirements.in b/requirements.in index fa3bebda..5bfb7530 100644 --- a/requirements.in +++ b/requirements.in @@ -62,7 +62,7 @@ uvicorn gunicorn cryptography -# SOURCE: https://github.com/esnme/ultrajson +# SOURCE: https://github.com/ultrajson/ultrajson # Ultra fast JSON decoder and encoder written in C with Python binding ujson @@ -126,3 +126,5 @@ semver eventlet ipaddr jsonpath_rw +gitpython +lockfile diff --git a/requirements.txt b/requirements.txt index 213604bb..0839545d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -44,6 +44,8 @@ eventlet==0.25.2 # via -r requirements.in factory-boy==2.12.0 # via -r requirements.in faker==4.0.0 # via -r requirements.in, factory-boy fastapi==0.48.0 # via -r requirements.in +gitdb==4.0.5 # via gitpython +gitpython==3.1.3 # via -r requirements.in greenlet==0.4.16 # via eventlet gunicorn==20.0.4 # via -r requirements.in h11==0.9.0 # via uvicorn @@ -57,6 +59,7 @@ jinja2-cli[yaml]==0.7.0 # via -r requirements.in jinja2==2.11.1 # via aiohttp-jinja2, jinja2-cli jsonpath-rw==1.4.0 # via -r requirements.in kombu==4.6.7 # via celery +lockfile==0.12.2 # via -r requirements.in loggerfactory==0.0.5 # via -r requirements.in logging-tree==1.8.1 # via -r requirements.in lxml==4.5.0 # via emails, premailer @@ -96,6 +99,7 @@ ruamel.yaml==0.16.9 # via -r requirements.in, ruamel.yaml.cmd, ruamel.yaml semver==2.10.2 # via -r requirements.in simplejson==3.17.0 # via pytool six==1.14.0 # via bcrypt, configobj, cryptography, ecdsa, eventlet, jsonpath-rw, pyconfig, python-dateutil, python-jose, python-multipart, pytool, sqlalchemy-utils, tenacity +smmap==3.0.4 # via gitdb sqlalchemy-utils==0.36.1 # via -r requirements.in sqlalchemy==1.3.13 # via -r requirements.in, alembic, databases, sqlalchemy-utils starlette-prometheus==0.5.0 # via -r requirements.in diff --git a/tasks/ci.py b/tasks/ci.py index 48a9b7f5..7a6d478a 100644 --- a/tasks/ci.py +++ b/tasks/ci.py @@ -330,7 +330,7 @@ def pytest( if pdb: _cmd += r" --pdb " - _cmd += r" --cov-config=setup.cfg --verbose --cov-append --cov-report=term-missing --cov-report=xml:cov.xml --cov-report=html:htmlcov --cov-report=annotate:cov_annotate --mypy --showlocals --tb=short --cov=ultron8 tests" + _cmd += r" --cov-config=setup.cfg -vv --cov-append --cov-report=term-missing --cov-report=xml:cov.xml --cov-report=html:htmlcov --cov-report=annotate:cov_annotate --mypy --showlocals --tb=short --cov=ultron8 tests" ctx.run(_cmd) @@ -673,6 +673,7 @@ def clean_pyi(ctx, loc="local", verbose=0, dry_run=False): # call(pytest, loc="local", mockedfs=True), # call(pytest, loc="local", clionly=True), # call(pytest, loc="local", usersonly=True), + # call(pytest, loc="local", utilsonly=True), # call(pytest, loc="local", convertingtotestclientstarlette=True), # call(pytest, loc="local", loggeronly=True), call(pytest, loc="local"), diff --git a/tests/content/__init__.py b/tests/content/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/content/test_content_loader.py b/tests/content/test_content_loader.py new file mode 100644 index 00000000..6f86b295 --- /dev/null +++ b/tests/content/test_content_loader.py @@ -0,0 +1,77 @@ +# st2 +import os + +import pytest + +from ultron8.content.loader import LOG, ContentPackLoader + +CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) +RESOURCES_DIR = os.path.abspath(os.path.join(CURRENT_DIR, "../resources")) + + +class ContentLoaderTest: + def test_get_sensors(self): + packs_base_path = os.path.join(RESOURCES_DIR, "packs/") + loader = ContentPackLoader() + pack_sensors = loader.get_content( + base_dirs=[packs_base_path], content_type="sensors" + ) + assert pack_sensors.get("pack1", None) is not None + + def test_get_sensors_pack_missing_sensors(self): + loader = ContentPackLoader() + fail_pack_path = os.path.join(RESOURCES_DIR, "packs/pack2") + assert os.path.exists(fail_pack_path) + assert loader._get_sensors(fail_pack_path) == None + + def test_invalid_content_type(self): + packs_base_path = os.path.join(RESOURCES_DIR, "packs/") + loader = ContentPackLoader() + with pytest.raises(ValueError): + loader.get_content(base_dirs=[packs_base_path], content_type="stuff") + + def test_get_content_multiple_directories(self, mocker): + packs_base_path_1 = os.path.join(RESOURCES_DIR, "packs/") + packs_base_path_2 = os.path.join(RESOURCES_DIR, "packs2/") + base_dirs = [packs_base_path_1, packs_base_path_2] + + LOG.warning = mocker.Mock() + + loader = ContentPackLoader() + sensors = loader.get_content(base_dirs=base_dirs, content_type="sensors") + assert "pack1" in sensors # from packs/ + assert "pack3" in sensors # from packs2/ + + # Assert that a warning is emitted when a duplicated pack is found + expected_msg = ( + 'Pack "pack1" already found in ' + '"%s/packs/", ignoring content from ' + '"%s/packs2/"' % (RESOURCES_DIR, RESOURCES_DIR) + ) + LOG.warning.assert_called_once_with(expected_msg) + + def test_get_content_from_pack_success(self): + loader = ContentPackLoader() + pack_path = os.path.join(RESOURCES_DIR, "packs/pack1") + + sensors = loader.get_content_from_pack( + pack_dir=pack_path, content_type="sensors" + ) + assert sensors.endswith("packs/pack1/sensors") + + def test_get_content_from_pack_directory_doesnt_exist(self): + loader = ContentPackLoader() + pack_path = os.path.join(RESOURCES_DIR, "packs/pack100") + + message_regex = "Directory .*? doesn't exist" + with pytest.raises(ValueError, match=message_regex): + loader.get_content_from_pack(pack_dir=pack_path, content_type="sensors") + + def test_get_content_from_pack_no_sensors(self): + loader = ContentPackLoader() + pack_path = os.path.join(RESOURCES_DIR, "packs/pack2") + + result = loader.get_content_from_pack( + pack_dir=pack_path, content_type="sensors" + ) + assert result == None diff --git a/tests/content/test_content_utils.py b/tests/content/test_content_utils.py new file mode 100644 index 00000000..9a3baf6d --- /dev/null +++ b/tests/content/test_content_utils.py @@ -0,0 +1,231 @@ +# # st2common +# # from __future__ import absolute_import +# import os +# import os.path + +# # import unittest2 +# # from oslo_config import cfg + +# from st2common.constants.action import LIBS_DIR as ACTION_LIBS_DIR +# from st2common.content.utils import get_pack_base_path +# from st2common.content.utils import get_packs_base_paths +# from st2common.content.utils import get_aliases_base_paths +# from st2common.content.utils import get_pack_resource_file_abs_path +# from st2common.content.utils import get_pack_file_abs_path +# from st2common.content.utils import get_entry_point_abs_path +# from st2common.content.utils import get_action_libs_abs_path +# from st2common.content.utils import get_relative_path_to_pack_file +# from st2tests import config as tests_config +# from st2tests.fixturesloader import get_fixtures_packs_base_path + +# # TODO: We need to create fixures for everything inside of st2tests.config + + +# class ContentUtilsTestCase(unittest2.TestCase): +# @classmethod +# def setUpClass(cls): +# tests_config.parse_args() + +# def test_get_pack_base_paths(self): +# cfg.CONF.content.system_packs_base_path = '' +# cfg.CONF.content.packs_base_paths = '/opt/path1' +# result = get_packs_base_paths() +# self.assertEqual(result, ['/opt/path1']) + +# # Multiple paths, no trailing colon +# cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2' +# result = get_packs_base_paths() +# self.assertEqual(result, ['/opt/path1', '/opt/path2']) + +# # Multiple paths, trailing colon +# cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:' +# result = get_packs_base_paths() +# self.assertEqual(result, ['/opt/path1', '/opt/path2']) + +# # Multiple same paths +# cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:/opt/path1:/opt/path2' +# result = get_packs_base_paths() +# self.assertEqual(result, ['/opt/path1', '/opt/path2']) + +# # Assert system path is always first +# cfg.CONF.content.system_packs_base_path = '/opt/system' +# cfg.CONF.content.packs_base_paths = '/opt/path2:/opt/path1' +# result = get_packs_base_paths() +# self.assertEqual(result, ['/opt/system', '/opt/path2', '/opt/path1']) + +# # More scenarios +# orig_path = cfg.CONF.content.system_packs_base_path +# cfg.CONF.content.system_packs_base_path = '/tests/packs' + +# names = [ +# 'test_pack_1', +# 'test_pack_2', +# 'ma_pack' +# ] + +# for name in names: +# actual = get_pack_base_path(pack_name=name) +# expected = os.path.join(cfg.CONF.content.system_packs_base_path, +# name) +# self.assertEqual(actual, expected) + +# cfg.CONF.content.system_packs_base_path = orig_path + +# def test_get_aliases_base_paths(self): +# cfg.CONF.content.aliases_base_paths = '/opt/path1' +# result = get_aliases_base_paths() +# self.assertEqual(result, ['/opt/path1']) + +# # Multiple paths, no trailing colon +# cfg.CONF.content.aliases_base_paths = '/opt/path1:/opt/path2' +# result = get_aliases_base_paths() +# self.assertEqual(result, ['/opt/path1', '/opt/path2']) + +# # Multiple paths, trailing colon +# cfg.CONF.content.aliases_base_paths = '/opt/path1:/opt/path2:' +# result = get_aliases_base_paths() +# self.assertEqual(result, ['/opt/path1', '/opt/path2']) + +# # Multiple same paths +# cfg.CONF.content.aliases_base_paths = '/opt/path1:/opt/path2:/opt/path1:/opt/path2' +# result = get_aliases_base_paths() +# self.assertEqual(result, ['/opt/path1', '/opt/path2']) + +# def test_get_pack_resource_file_abs_path(self): +# # Mock the packs path to point to the fixtures directory +# cfg.CONF.content.packs_base_paths = get_fixtures_packs_base_path() + +# # Invalid resource type +# expected_msg = 'Invalid resource type: fooo' +# self.assertRaisesRegexp(ValueError, expected_msg, get_pack_resource_file_abs_path, +# pack_ref='dummy_pack_1', +# resource_type='fooo', +# file_path='test.py') + +# # Invalid paths (directory traversal and absolute paths) +# file_paths = ['/tmp/foo.py', '../foo.py', '/etc/passwd', '../../foo.py', +# '/opt/stackstorm/packs/invalid_pack/actions/my_action.py', +# '../../foo.py'] +# for file_path in file_paths: +# # action resource_type +# expected_msg = (r'Invalid file path: ".*%s"\. File path needs to be relative to the ' +# r'pack actions directory (.*). For example "my_action.py"\.' % +# (file_path)) +# self.assertRaisesRegexp(ValueError, expected_msg, get_pack_resource_file_abs_path, +# pack_ref='dummy_pack_1', +# resource_type='action', +# file_path=file_path) + +# # sensor resource_type +# expected_msg = (r'Invalid file path: ".*%s"\. File path needs to be relative to the ' +# r'pack sensors directory (.*). For example "my_sensor.py"\.' % +# (file_path)) +# self.assertRaisesRegexp(ValueError, expected_msg, get_pack_resource_file_abs_path, +# pack_ref='dummy_pack_1', +# resource_type='sensor', +# file_path=file_path) + +# # no resource type +# expected_msg = (r'Invalid file path: ".*%s"\. File path needs to be relative to the ' +# r'pack directory (.*). For example "my_action.py"\.' % +# (file_path)) +# self.assertRaisesRegexp(ValueError, expected_msg, get_pack_file_abs_path, +# pack_ref='dummy_pack_1', +# file_path=file_path) + +# # Valid paths +# file_paths = ['foo.py', 'a/foo.py', 'a/b/foo.py'] +# for file_path in file_paths: +# expected = os.path.join(get_fixtures_packs_base_path(), +# 'dummy_pack_1/actions', file_path) +# result = get_pack_resource_file_abs_path(pack_ref='dummy_pack_1', +# resource_type='action', +# file_path=file_path) +# self.assertEqual(result, expected) + +# def test_get_entry_point_absolute_path(self): +# orig_path = cfg.CONF.content.system_packs_base_path +# cfg.CONF.content.system_packs_base_path = '/tests/packs' +# acutal_path = get_entry_point_abs_path( +# pack='foo', +# entry_point='/tests/packs/foo/bar.py') +# self.assertEqual(acutal_path, '/tests/packs/foo/bar.py', 'Entry point path doesn\'t match.') +# cfg.CONF.content.system_packs_base_path = orig_path + +# def test_get_entry_point_absolute_path_empty(self): +# orig_path = cfg.CONF.content.system_packs_base_path +# cfg.CONF.content.system_packs_base_path = '/tests/packs' +# acutal_path = get_entry_point_abs_path(pack='foo', entry_point=None) +# self.assertEqual(acutal_path, None, 'Entry point path doesn\'t match.') +# acutal_path = get_entry_point_abs_path(pack='foo', entry_point='') +# self.assertEqual(acutal_path, None, 'Entry point path doesn\'t match.') +# cfg.CONF.content.system_packs_base_path = orig_path + +# def test_get_entry_point_relative_path(self): +# orig_path = cfg.CONF.content.system_packs_base_path +# cfg.CONF.content.system_packs_base_path = '/tests/packs' +# acutal_path = get_entry_point_abs_path(pack='foo', entry_point='foo/bar.py') +# expected_path = os.path.join(cfg.CONF.content.system_packs_base_path, 'foo', 'actions', +# 'foo/bar.py') +# self.assertEqual(acutal_path, expected_path, 'Entry point path doesn\'t match.') +# cfg.CONF.content.system_packs_base_path = orig_path + +# def test_get_action_libs_abs_path(self): +# orig_path = cfg.CONF.content.system_packs_base_path +# cfg.CONF.content.system_packs_base_path = '/tests/packs' + +# # entry point relative. +# acutal_path = get_action_libs_abs_path(pack='foo', entry_point='foo/bar.py') +# expected_path = os.path.join(cfg.CONF.content.system_packs_base_path, 'foo', 'actions', +# os.path.join('foo', ACTION_LIBS_DIR)) +# self.assertEqual(acutal_path, expected_path, 'Action libs path doesn\'t match.') + +# # entry point absolute. +# acutal_path = get_action_libs_abs_path( +# pack='foo', +# entry_point='/tests/packs/foo/tmp/foo.py') +# expected_path = os.path.join('/tests/packs/foo/tmp', ACTION_LIBS_DIR) +# self.assertEqual(acutal_path, expected_path, 'Action libs path doesn\'t match.') +# cfg.CONF.content.system_packs_base_path = orig_path + +# def test_get_relative_path_to_pack_file(self): +# packs_base_paths = get_fixtures_packs_base_path() + +# pack_ref = 'dummy_pack_1' + +# # 1. Valid paths +# file_path = os.path.join(packs_base_paths, 'dummy_pack_1/pack.yaml') +# result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) +# self.assertEqual(result, 'pack.yaml') + +# file_path = os.path.join(packs_base_paths, 'dummy_pack_1/actions/action.meta.yaml') +# result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) +# self.assertEqual(result, 'actions/action.meta.yaml') + +# file_path = os.path.join(packs_base_paths, 'dummy_pack_1/actions/lib/foo.py') +# result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) +# self.assertEqual(result, 'actions/lib/foo.py') + +# # Already relative +# file_path = 'actions/lib/foo2.py' +# result = get_relative_path_to_pack_file(pack_ref=pack_ref, file_path=file_path) +# self.assertEqual(result, 'actions/lib/foo2.py') + +# # 2. Invalid path - outside pack directory +# expected_msg = r'file_path (.*?) is not located inside the pack directory (.*?)' + +# file_path = os.path.join(packs_base_paths, 'dummy_pack_2/actions/lib/foo.py') +# self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, +# pack_ref=pack_ref, file_path=file_path) + +# file_path = '/tmp/foo/bar.py' +# self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, +# pack_ref=pack_ref, file_path=file_path) + +# file_path = os.path.join(packs_base_paths, '../dummy_pack_1/pack.yaml') +# self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, +# pack_ref=pack_ref, file_path=file_path) + +# file_path = os.path.join(packs_base_paths, '../../dummy_pack_1/pack.yaml') +# self.assertRaisesRegexp(ValueError, expected_msg, get_relative_path_to_pack_file, +# pack_ref=pack_ref, file_path=file_path) diff --git a/tests/utils/test_casts.py b/tests/utils/test_casts.py index 1b53c9ef..0e1bd3ed 100644 --- a/tests/utils/test_casts.py +++ b/tests/utils/test_casts.py @@ -1,9 +1,10 @@ -# st2 +# st2common import json -from ultron8.utils.casts import get_cast import pytest +from ultron8.utils.casts import get_cast + class CastsTestCase: def test_cast_string(self): diff --git a/tests/utils/test_crypto_utils.py b/tests/utils/test_crypto_utils.py index 17887863..db457322 100644 --- a/tests/utils/test_crypto_utils.py +++ b/tests/utils/test_crypto_utils.py @@ -1,27 +1,28 @@ -# st2 +# st2common +import binascii +import json import os +from cryptography.exceptions import InvalidSignature +import pytest import six -import json -import binascii - from six.moves import range -from cryptography.exceptions import InvalidSignature -from ultron8.utils.crypto import KEYCZAR_HEADER_SIZE -from ultron8.utils.crypto import AESKey -from ultron8.utils.crypto import read_crypto_key -from ultron8.utils.crypto import symmetric_encrypt -from ultron8.utils.crypto import symmetric_decrypt -from ultron8.utils.crypto import cryptography_symmetric_encrypt -from ultron8.utils.crypto import cryptography_symmetric_decrypt +from ultron8.utils.crypto import ( + KEYCZAR_HEADER_SIZE, + AESKey, + cryptography_symmetric_decrypt, + cryptography_symmetric_encrypt, + read_crypto_key, + symmetric_decrypt, + symmetric_encrypt, +) -import pytest +from tests.conftest import fixtures_path # from st2tests.fixturesloader import get_fixtures_base_path -from tests.conftest import fixtures_path __all__ = ["CryptoUtilsTestCase", "CryptoUtilsKeyczarCompatibilityTestCase"] diff --git a/tests/utils/test_ip_utils.py b/tests/utils/test_ip_utils.py index e6f6502d..05c24c81 100644 --- a/tests/utils/test_ip_utils.py +++ b/tests/utils/test_ip_utils.py @@ -1,9 +1,9 @@ import pytest -from ultron8.utils.ip_utils import split_host_port +from ultron8.utils.ip_utils import split_host_port, is_ipv6, is_ipv4 -class IPUtilsTests: +class TestIPUtilsTests: def test_host_port_split(self): # Simple IPv4 @@ -77,3 +77,23 @@ def test_host_port_split(self): # self.assertRaises(Exception, split_host_port, host_str) with pytest.raises(Exception): split_host_port(host_str) + + # Validate ipv6 str + host_str = "fd9a:49c7:f8dd:5156:0:0:0:0" + res = is_ipv6(host_str) + assert res + + # invalid ipv6 str + host_str = "fd9a:49c7:f8dd:5156:0:0:0:0:0" + res = is_ipv6(host_str) + assert not res + + # Validate ipv4 str + host_str = "127.0.0.1" + res = is_ipv4(host_str) + assert res + + # invalid ipv4 str + host_str = "127.0.0.1.1" + res = is_ipv4(host_str) + assert not res diff --git a/tests/utils/test_jinja_render_crypto_filters.py b/tests/utils/test_jinja_render_crypto_filters.py index 96d3c705..57bae6e4 100644 --- a/tests/utils/test_jinja_render_crypto_filters.py +++ b/tests/utils/test_jinja_render_crypto_filters.py @@ -1,4 +1,4 @@ -# # st2 +# # st2common # from oslo_config import cfg # from st2tests.base import CleanDbTestCase diff --git a/tests/utils/test_jinja_render_json_escape_filters.py b/tests/utils/test_jinja_render_json_escape_filters.py index 35636205..3f284577 100644 --- a/tests/utils/test_jinja_render_json_escape_filters.py +++ b/tests/utils/test_jinja_render_json_escape_filters.py @@ -1,9 +1,9 @@ -# st2 +# st2common from ultron8.utils import jinja as jinja_utils -class JinjaUtilsJsonEscapeTestCase: +class TestJinjaUtilsJsonEscapeTestCase: def test_doublequotes(self): env = jinja_utils.get_jinja_environment() template = "{{ test_str | json_escape }}" diff --git a/tests/utils/test_jinja_render_jsonpath_query_filters.py b/tests/utils/test_jinja_render_jsonpath_query_filters.py index 1b9e19b1..1308bc89 100644 --- a/tests/utils/test_jinja_render_jsonpath_query_filters.py +++ b/tests/utils/test_jinja_render_jsonpath_query_filters.py @@ -1,9 +1,9 @@ -# st2 +# st2common from ultron8.utils import jinja as jinja_utils -class JinjaUtilsJsonpathQueryTestCase: +class TestJinjaUtilsJsonpathQueryTestCase: def test_jsonpath_query_static(self): env = jinja_utils.get_jinja_environment() obj = { diff --git a/tests/utils/test_jinja_render_path_filters.py b/tests/utils/test_jinja_render_path_filters.py index cca5a692..cdd04d4a 100644 --- a/tests/utils/test_jinja_render_path_filters.py +++ b/tests/utils/test_jinja_render_path_filters.py @@ -1,9 +1,9 @@ -# st2 +# st2common from ultron8.utils import jinja as jinja_utils -class JinjaUtilsPathFilterTestCase: +class TestJinjaUtilsPathFilterTestCase: def test_basename(self): env = jinja_utils.get_jinja_environment() diff --git a/tests/utils/test_jinja_render_regex_filters.py b/tests/utils/test_jinja_render_regex_filters.py index 308e06b8..c2b175fb 100644 --- a/tests/utils/test_jinja_render_regex_filters.py +++ b/tests/utils/test_jinja_render_regex_filters.py @@ -1,10 +1,11 @@ -# st2 +# st2common -from ultron8.utils import jinja as jinja_utils import pytest +from ultron8.utils import jinja as jinja_utils + -class JinjaUtilsRegexFilterTestCase: +class TestJinjaUtilsRegexFilterTestCase: def test_filters_regex_match(self): env = jinja_utils.get_jinja_environment() diff --git a/tests/utils/test_jinja_render_time_filters.py b/tests/utils/test_jinja_render_time_filters.py index 3f02235f..ee97969c 100644 --- a/tests/utils/test_jinja_render_time_filters.py +++ b/tests/utils/test_jinja_render_time_filters.py @@ -1,10 +1,11 @@ -# st2 +# st2common -from ultron8.utils import jinja as jinja_utils import pytest +from ultron8.utils import jinja as jinja_utils + -class JinjaUtilsTimeFilterTestCase: +class TestJinjaUtilsTimeFilterTestCase: def test_to_human_time_filter(self): env = jinja_utils.get_jinja_environment() diff --git a/tests/utils/test_jinja_render_version_filters.py b/tests/utils/test_jinja_render_version_filters.py index efca9baa..5a62ac7d 100644 --- a/tests/utils/test_jinja_render_version_filters.py +++ b/tests/utils/test_jinja_render_version_filters.py @@ -1,9 +1,9 @@ -# st2 +# st2common from ultron8.utils import jinja as jinja_utils -class JinjaUtilsVersionsFilterTestCase: +class TestJinjaUtilsVersionsFilterTestCase: def test_version_compare(self): env = jinja_utils.get_jinja_environment() diff --git a/tests/utils/test_misc_utils.py b/tests/utils/test_misc_utils.py new file mode 100644 index 00000000..34263be0 --- /dev/null +++ b/tests/utils/test_misc_utils.py @@ -0,0 +1,101 @@ +# st2 + +from ultron8.utils.misc import ( + lowercase_value, + rstrip_last_char, + sanitize_output, + strip_shell_chars, +) +from ultron8.utils.ujson import fast_deepcopy + +__all__ = ["MiscUtilTestCase"] + + +class TestMiscUtilTestCase: + def test_rstrip_last_char(self): + assert rstrip_last_char(None, "\n") == None + assert rstrip_last_char("stuff", None) == "stuff" + assert rstrip_last_char("", "\n") == "" + assert rstrip_last_char("foo", "\n") == "foo" + assert rstrip_last_char("foo\n", "\n") == "foo" + assert rstrip_last_char("foo\n\n", "\n") == "foo\n" + assert rstrip_last_char("foo\r", "\r") == "foo" + assert rstrip_last_char("foo\r\r", "\r") == "foo\r" + assert rstrip_last_char("foo\r\n", "\r\n") == "foo" + assert rstrip_last_char("foo\r\r\n", "\r\n") == "foo\r" + assert rstrip_last_char("foo\n\r", "\r\n") == "foo\n\r" + + def test_strip_shell_chars(self): + assert strip_shell_chars(None) == None + assert strip_shell_chars("foo") == "foo" + assert strip_shell_chars("foo\r") == "foo" + assert strip_shell_chars("fo\ro\r") == "fo\ro" + assert strip_shell_chars("foo\n") == "foo" + assert strip_shell_chars("fo\no\n") == "fo\no" + assert strip_shell_chars("foo\r\n") == "foo" + assert strip_shell_chars("fo\no\r\n") == "fo\no" + assert strip_shell_chars("foo\r\n\r\n") == "foo\r\n" + + def test_lowercase_value(self): + value = "TEST" + expected_value = "test" + assert expected_value == lowercase_value(value=value) + + value = ["testA", "TESTb", "TESTC"] + expected_value = ["testa", "testb", "testc"] + assert expected_value == lowercase_value(value=value) + + value = {"testA": "testB", "testC": "TESTD", "TESTE": "TESTE"} + expected_value = {"testa": "testb", "testc": "testd", "teste": "teste"} + assert expected_value == lowercase_value(value=value) + + def test_fast_deepcopy_success(self): + values = [ + "a", + u"٩(̾●̮̮̃̾•̃̾)۶", + 1, + [1, 2, "3", "b"], + {"a": 1, "b": "3333", "c": "d"}, + ] + expected_values = [ + "a", + u"٩(̾●̮̮̃̾•̃̾)۶", + 1, + [1, 2, "3", "b"], + {"a": 1, "b": "3333", "c": "d"}, + ] + + for value, expected_value in zip(values, expected_values): + result = fast_deepcopy(value) + assert result == value + assert result == expected_value + + def test_sanitize_output_use_pyt_false(self): + # pty is not used, \r\n shouldn't be replaced with \n + input_strs = [ + "foo", + "foo\n", + "foo\r\n", + "foo\nbar\nbaz\n", + "foo\r\nbar\r\nbaz\r\n", + ] + expected = ["foo", "foo", "foo", "foo\nbar\nbaz", "foo\r\nbar\r\nbaz"] + + for input_str, expected_output in zip(input_strs, expected): + output = sanitize_output(input_str, uses_pty=False) + assert expected_output == output + + def test_sanitize_output_use_pyt_true(self): + # pty is used, \r\n should be replaced with \n + input_strs = [ + "foo", + "foo\n", + "foo\r\n", + "foo\nbar\nbaz\n", + "foo\r\nbar\r\nbaz\r\n", + ] + expected = ["foo", "foo", "foo", "foo\nbar\nbaz", "foo\nbar\nbaz"] + + for input_str, expected_output in zip(input_strs, expected): + output = sanitize_output(input_str, uses_pty=True) + assert expected_output == output diff --git a/tests/utils/test_queue_utils.py b/tests/utils/test_queue_utils.py new file mode 100644 index 00000000..25de8926 --- /dev/null +++ b/tests/utils/test_queue_utils.py @@ -0,0 +1,40 @@ +# st2common +import re + +import pytest + +import ultron8.utils.queues as queue_utils + + +class TestQueueUtils: + def test_get_queue_name(self): + with pytest.raises(ValueError): + queue_utils.get_queue_name(queue_name_base=None, queue_name_suffix=None) + with pytest.raises(ValueError): + queue_utils.get_queue_name(queue_name_base="", queue_name_suffix=None) + assert ( + queue_utils.get_queue_name( + queue_name_base="u8.test.watch", queue_name_suffix=None + ) + == "u8.test.watch" + ) + assert ( + queue_utils.get_queue_name( + queue_name_base="u8.test.watch", queue_name_suffix="" + ) + == "u8.test.watch" + ) + queue_name = queue_utils.get_queue_name( + queue_name_base="u8.test.watch", + queue_name_suffix="foo", + add_random_uuid_to_suffix=True, + ) + pattern = re.compile(r"u8.test.watch.foo-\w") + assert re.match(pattern, queue_name) + + queue_name = queue_utils.get_queue_name( + queue_name_base="u8.test.watch", + queue_name_suffix="foo", + add_random_uuid_to_suffix=False, + ) + assert queue_name == "u8.test.watch.foo" diff --git a/tests/utils/test_time_jinja_filters.py b/tests/utils/test_time_jinja_filters.py index a771274a..0a591d59 100644 --- a/tests/utils/test_time_jinja_filters.py +++ b/tests/utils/test_time_jinja_filters.py @@ -1,8 +1,9 @@ -# st2 +# st2common -from ultron8.expressions.functions import time import pytest +from ultron8.expressions.functions import time + class TestTimeJinjaFilters: def test_to_human_time_from_seconds(self): diff --git a/tests/utils/test_util_file_system.py b/tests/utils/test_util_file_system.py index 5240855e..1ceb145d 100644 --- a/tests/utils/test_util_file_system.py +++ b/tests/utils/test_util_file_system.py @@ -6,12 +6,12 @@ from ultron8.utils.file_system import get_file_list CURRENT_DIR = os.path.dirname(__file__) -EXAMPLES_DIR = os.path.join(CURRENT_DIR, "../../../examples/packs/chatops_tutorial") +EXAMPLES_DIR = os.path.join(CURRENT_DIR, "../../examples/packs/chatops_tutorial") @pytest.mark.utilsonly @pytest.mark.unittest -class FileSystemUtilsTestCase: +class TestFileSystemUtilsTestCase: def test_get_file_list(self): # Standard exclude pattern directory = os.path.join(EXAMPLES_DIR, "actions") @@ -25,7 +25,13 @@ def test_get_file_list(self): "workflows/fail_on_odd_silent.yaml", ] result = get_file_list(directory=directory, exclude_patterns=["*.pyc"]) - assert expected == result + assert expected[0] in result + assert expected[1] in result + assert expected[2] in result + assert expected[3] in result + assert expected[4] in result + assert expected[5] in result + assert expected[6] in result # Custom exclude pattern expected = ["pizza.py"] diff --git a/tests/utils/test_utils_jinja.py b/tests/utils/test_utils_jinja.py index a8761410..3dd668b4 100644 --- a/tests/utils/test_utils_jinja.py +++ b/tests/utils/test_utils_jinja.py @@ -1,7 +1,7 @@ from ultron8.utils import jinja as jinja_utils -class JinjaUtilsRenderTestCase: +class TestJinjaUtilsRenderTestCase: def test_render_values(self): actual = jinja_utils.render_values( mapping={"k1": "{{a}}", "k2": "{{b}}"}, context={"a": "v1", "b": "v2"} diff --git a/tests/utils/test_utils_shell.py b/tests/utils/test_utils_shell.py index db6e72e5..fdae13b8 100644 --- a/tests/utils/test_utils_shell.py +++ b/tests/utils/test_utils_shell.py @@ -12,7 +12,7 @@ @pytest.mark.utilsonly @pytest.mark.unittest -class ShellUtilsTestCase: +class TestShellUtilsTestCase: def test_quote_unix(self): arguments = ["foo", "foo bar", "foo1 bar1", '"foo"', '"foo" "bar"', "'foo bar'"] expected_values = [ diff --git a/ultron8/api/models/system/common.py b/ultron8/api/models/system/common.py index 7dd32121..039c18f8 100644 --- a/ultron8/api/models/system/common.py +++ b/ultron8/api/models/system/common.py @@ -1,16 +1,4 @@ -# Copyright 2019 Extreme Networks, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# st2common __all__ = [ "InvalidReferenceError", diff --git a/ultron8/api/services/pack.py b/ultron8/api/services/pack.py new file mode 100644 index 00000000..eaca72a1 --- /dev/null +++ b/ultron8/api/services/pack.py @@ -0,0 +1,198 @@ +# # st2common + +# import itertools +# import json + +# import requests +# import six + +# # from oslo_config import cfg + +# import logging + +# # from st2common import log as logging +# from st2common.persistence.pack import Pack +# from st2common.util.misc import lowercase_value + + +# __all__ = [ +# "get_pack_by_ref", +# "fetch_pack_index", +# "get_pack_from_index", +# "search_pack_index", +# ] + +# EXCLUDE_FIELDS = ["repo_url", "email"] + +# SEARCH_PRIORITY = ["name", "keywords"] + +# LOG = logging.getLogger(__name__) + + +# def _build_index_list(index_url): +# if not index_url: +# # Reversing the indexes list from config so that the indexes have +# # descending (left-to-right) priority. +# # When multiple indexes have a pack with a given name, the index +# # that comes first in the list will be used. +# index_urls = cfg.CONF.content.index_url[::-1] +# elif isinstance(index_url, str): +# index_urls = [index_url] +# elif hasattr(index_url, "__iter__"): +# index_urls = index_url +# else: +# raise TypeError('"index_url" should either be a string or an iterable object.') +# return index_urls + + +# def _fetch_and_compile_index(index_urls, logger=None, proxy_config=None): +# """ +# Go through the index list and compile results into a single object. +# """ +# status = [] +# index = {} + +# proxies_dict = {} +# verify = True + +# if proxy_config: +# https_proxy = proxy_config.get("https_proxy", None) +# http_proxy = proxy_config.get("http_proxy", None) +# ca_bundle_path = proxy_config.get("proxy_ca_bundle_path", None) + +# if https_proxy: +# proxies_dict["https"] = https_proxy +# verify = ca_bundle_path or True + +# if http_proxy: +# proxies_dict["http"] = http_proxy + +# for index_url in index_urls: +# index_status = {"url": index_url, "packs": 0, "message": None, "error": None} +# index_json = None + +# try: +# request = requests.get(index_url, proxies=proxies_dict, verify=verify) +# request.raise_for_status() +# index_json = request.json() +# except ValueError as e: +# index_status["error"] = "malformed" +# index_status["message"] = repr(e) +# except requests.exceptions.RequestException as e: +# index_status["error"] = "unresponsive" +# index_status["message"] = repr(e) +# except Exception as e: +# index_status["error"] = "other errors" +# index_status["message"] = repr(e) + +# if index_json == {}: +# index_status["error"] = "empty" +# index_status["message"] = "The index URL returned an empty object." +# elif type(index_json) is list: +# index_status["error"] = "malformed" +# index_status["message"] = "Expected an index object, got a list instead." +# elif index_json and "packs" not in index_json: +# index_status["error"] = "malformed" +# index_status["message"] = 'Index object is missing "packs" attribute.' + +# if index_status["error"]: +# logger.error("Index parsing error: %s" % json.dumps(index_status, indent=4)) +# else: +# # TODO: Notify on a duplicate pack aka pack being overwritten from a different index +# packs_data = index_json["packs"] +# index_status["message"] = "Success." +# index_status["packs"] = len(packs_data) +# index.update(packs_data) + +# status.append(index_status) + +# return index, status + + +# def get_pack_by_ref(pack_ref): +# """ +# Retrieve PackDB by the provided reference. +# """ +# pack_db = Pack.get_by_ref(pack_ref) +# return pack_db + + +# def fetch_pack_index(index_url=None, logger=None, allow_empty=False, proxy_config=None): +# """ +# Fetch the pack indexes (either from the config or provided as an argument) +# and return the object. +# """ +# logger = logger or LOG + +# index_urls = _build_index_list(index_url) +# index, status = _fetch_and_compile_index( +# index_urls=index_urls, logger=logger, proxy_config=proxy_config +# ) + +# # If one of the indexes on the list is unresponsive, we do not throw +# # immediately. The only case where an exception is raised is when no +# # results could be obtained from all listed indexes. +# # This behavior allows for mirrors / backups and handling connection +# # or network issues in one of the indexes. +# if not index and not allow_empty: +# raise ValueError( +# "No results from the %s: tried %s.\nStatus: %s" +# % ( +# ("index" if len(index_urls) == 1 else "indexes"), +# ", ".join(index_urls), +# json.dumps(status, indent=4), +# ) +# ) +# return (index, status) + + +# def get_pack_from_index(pack, proxy_config=None): +# """ +# Search index by pack name. +# Returns a pack. +# """ +# if not pack: +# raise ValueError("Pack name must be specified.") + +# index, _ = fetch_pack_index(proxy_config=proxy_config) + +# return index.get(pack) + + +# def search_pack_index( +# query, exclude=None, priority=None, case_sensitive=True, proxy_config=None +# ): +# """ +# Search the pack index by query. +# Returns a list of matches for a query. +# """ +# if not query: +# raise ValueError("Query must be specified.") + +# if not exclude: +# exclude = EXCLUDE_FIELDS +# if not priority: +# priority = SEARCH_PRIORITY + +# if not case_sensitive: +# query = str(query).lower() + +# index, _ = fetch_pack_index(proxy_config=proxy_config) + +# matches = [[] for i in range(len(priority) + 1)] +# for pack in six.itervalues(index): +# for key, value in six.iteritems(pack): +# if not hasattr(value, "__contains__"): +# value = str(value) + +# if not case_sensitive: +# value = lowercase_value(value=value) + +# if key not in exclude and query in value: +# if key in priority: +# matches[priority.index(key)].append(pack) +# else: +# matches[-1].append(pack) +# break + +# return list(itertools.chain.from_iterable(matches)) diff --git a/ultron8/api/settings.py b/ultron8/api/settings.py index 59079766..e2ada023 100644 --- a/ultron8/api/settings.py +++ b/ultron8/api/settings.py @@ -2,6 +2,7 @@ from datetime import timedelta import logging import os +import platform import uuid from pydantic import EmailStr @@ -23,6 +24,13 @@ "CRITICAL": logging.CRITICAL, } +PLATFORM_ULTRON_SYSTEM_BASE_PATH_MAP = { + "Darwin": "/usr/local/opt/ultron8", + "Linux": "/opt/ultron8", +} + +CURRENT_PLATFORM = platform.system() + def getenv_boolean(var_name: str, default_value: bool = False) -> bool: result = default_value @@ -129,6 +137,68 @@ def getenv_boolean(var_name: str, default_value: bool = False) -> bool: SCOPES = {"read": "Read", "write": "Write"} + +# ~~~~~ SYSTEM CONFIG OPTIONS ~~~~~ +SYSTEM_BASE_PATH = os.getenv( + "ULTRON_SYSTEM_BASE_PATH", PLATFORM_ULTRON_SYSTEM_BASE_PATH_MAP[CURRENT_PLATFORM] +) +SYSTEM_VALIDATE_TRIGGER_PARAMETERS = getenv_boolean( + "ULTRON_SYSTEM_VALIDATE_TRIGGER_PARAMETERS", default_value=False +) +SYSTEM_VALIDATE_TRIGGER_PAYLOAD = getenv_boolean( + "ULTRON_SYSTEM_VALIDATE_TRIGGER_PAYLOAD", default_value=False +) +SYSTEM_VALIDATE_OUTPUT_SCHEMA = getenv_boolean( + "ULTRON_SYSTEM_VALIDATE_OUTPUT_SCHEMA", default_value=False +) +SYSTEM_PACKS_BASE_PATH = os.path.join(SYSTEM_BASE_PATH, "packs") +SYSTEM_RUNNERS_BASE_PATH = os.path.join(SYSTEM_BASE_PATH, "runners") + +# ~~~~~ SYSTEM CONFIG OPTIONS ~~~~~ +CONTENT_PACK_GROUP = os.getenv("ULTRON_CONTENT_PACK_GROUP", "u8packs") +CONTENT_SYSTEM_PACKS_BASE_PATH = os.getenv( + "ULTRON_CONTENT_SYSTEM_PACKS_BASE_PATH", SYSTEM_PACKS_BASE_PATH +) +CONTENT_SYSTEM_RUNNERS_BASE_PATH = os.getenv( + "ULTRON_CONTENT_SYSTEM_RUNNERS_BASE_PATH", SYSTEM_RUNNERS_BASE_PATH +) +# Paths which will be searched for integration packs. +CONTENT_PACKS_BASE_PATHS = os.getenv("ULTRON_CONTENT_PACKS_BASE_PATHS", None) +# Paths which will be searched for runners. NOTE: This option has been deprecated and it's unused since Ultron8 v3.0.0 +CONTENT_RUNNERS_BASE_PATHS = os.getenv("ULTRON_CONTENT_RUNNERS_BASE_PATHS", None) +# A URL pointing to the pack index. StackStorm Exchange is used by default. +# Use a comma-separated list for multiple indexes if you want to get other packs discovered with "st2 pack search". +CONTENT_INDEX_URL = [f"{SERVER_HOST}/v1/index.json"] + +# ~~~~~ ACTIONRUNNER CONFIG OPTIONS ~~~~~ +# import sys;print(sys.executable)" + +# default_python_bin_path = sys.executable +# default_python3_bin_path = find_executable('python3') +# base_dir = os.path.dirname(os.path.realpath(default_python_bin_path)) +# default_virtualenv_bin_path = os.path.join(base_dir, 'virtualenv') + +# Python binary which will be used by Python actions. +# PYTHON_BINARY = +# Python 3 binary which will be used by Python actions for packs which use Python 3 virtual environment. +# PYTHON3_BINARY = +# Prefix for Python 3 installation (e.g. /opt/python3.6). If not specified, it tries to find Python 3 libraries in /usr/lib and /usr/local/lib. +# PYTHON3_PREFIX = +# Virtualenv binary which should be used to create pack virtualenvs. +# VIRTUALENV_BINARY = +# PYTHON_RUNNER_LOG_LEVEL = +# VIRTUALENV_OPTS = +# List of pip options to be passed to "pip install" command when installing pack dependencies into pack virtual environment. +# PIP_OPTS = +# True to store and stream action output (stdout and stderr) in real-time. +# STREAM_OUTPUT = +# +# Buffer size to use for real time action output streaming. 0 means unbuffered +# 1 means line buffered, -1 means system default, which usually means fully +# buffered and any other positive value means use a buffer of (approximately) +# that size +# STREAM_OUTPUT_BUFFER_SIZE = + # @dataclass # class SettingsConfigProxy: # api_v1_str: str diff --git a/ultron8/constants/action.py b/ultron8/constants/action.py new file mode 100644 index 00000000..1e5d5a8c --- /dev/null +++ b/ultron8/constants/action.py @@ -0,0 +1,121 @@ +# st2common + +__all__ = [ + "ACTION_NAME", + "ACTION_ID", + "LIBS_DIR", + "LIVEACTION_STATUS_REQUESTED", + "LIVEACTION_STATUS_SCHEDULED", + "LIVEACTION_STATUS_DELAYED", + "LIVEACTION_STATUS_RUNNING", + "LIVEACTION_STATUS_SUCCEEDED", + "LIVEACTION_STATUS_FAILED", + "LIVEACTION_STATUS_TIMED_OUT", + "LIVEACTION_STATUS_CANCELING", + "LIVEACTION_STATUS_CANCELED", + "LIVEACTION_STATUS_PENDING", + "LIVEACTION_STATUS_PAUSING", + "LIVEACTION_STATUS_PAUSED", + "LIVEACTION_STATUS_RESUMING", + "LIVEACTION_STATUSES", + "LIVEACTION_RUNNABLE_STATES", + "LIVEACTION_DELAYED_STATES", + "LIVEACTION_CANCELABLE_STATES", + "LIVEACTION_FAILED_STATES", + "LIVEACTION_COMPLETED_STATES", + "ACTION_OUTPUT_RESULT_DELIMITER", + "ACTION_CONTEXT_KV_PREFIX", + "ACTION_PARAMETERS_KV_PREFIX", + "ACTION_RESULTS_KV_PREFIX", + "WORKFLOW_RUNNER_TYPES", +] + + +ACTION_NAME = "name" +ACTION_ID = "id" +ACTION_PACK = "pack" + +LIBS_DIR = "lib" + +LIVEACTION_STATUS_REQUESTED = "requested" +LIVEACTION_STATUS_SCHEDULED = "scheduled" +LIVEACTION_STATUS_DELAYED = "delayed" +LIVEACTION_STATUS_RUNNING = "running" +LIVEACTION_STATUS_SUCCEEDED = "succeeded" +LIVEACTION_STATUS_FAILED = "failed" +LIVEACTION_STATUS_TIMED_OUT = "timeout" +LIVEACTION_STATUS_ABANDONED = "abandoned" +LIVEACTION_STATUS_CANCELING = "canceling" +LIVEACTION_STATUS_CANCELED = "canceled" +LIVEACTION_STATUS_PENDING = "pending" +LIVEACTION_STATUS_PAUSING = "pausing" +LIVEACTION_STATUS_PAUSED = "paused" +LIVEACTION_STATUS_RESUMING = "resuming" + +LIVEACTION_STATUSES = [ + LIVEACTION_STATUS_REQUESTED, + LIVEACTION_STATUS_SCHEDULED, + LIVEACTION_STATUS_DELAYED, + LIVEACTION_STATUS_RUNNING, + LIVEACTION_STATUS_SUCCEEDED, + LIVEACTION_STATUS_FAILED, + LIVEACTION_STATUS_TIMED_OUT, + LIVEACTION_STATUS_ABANDONED, + LIVEACTION_STATUS_CANCELING, + LIVEACTION_STATUS_CANCELED, + LIVEACTION_STATUS_PENDING, + LIVEACTION_STATUS_PAUSING, + LIVEACTION_STATUS_PAUSED, + LIVEACTION_STATUS_RESUMING, +] + +ACTION_OUTPUT_RESULT_DELIMITER = "%%%%%~=~=~=************=~=~=~%%%%" +ACTION_CONTEXT_KV_PREFIX = "action_context" +ACTION_PARAMETERS_KV_PREFIX = "action_parameters" +ACTION_RESULTS_KV_PREFIX = "action_results" + +LIVEACTION_RUNNABLE_STATES = [ + LIVEACTION_STATUS_REQUESTED, + LIVEACTION_STATUS_SCHEDULED, + LIVEACTION_STATUS_PAUSING, + LIVEACTION_STATUS_PAUSED, + LIVEACTION_STATUS_RESUMING, +] + +LIVEACTION_DELAYED_STATES = [LIVEACTION_STATUS_DELAYED] + +LIVEACTION_CANCELABLE_STATES = [ + LIVEACTION_STATUS_REQUESTED, + LIVEACTION_STATUS_SCHEDULED, + LIVEACTION_STATUS_DELAYED, + LIVEACTION_STATUS_RUNNING, + LIVEACTION_STATUS_PAUSING, + LIVEACTION_STATUS_PAUSED, + LIVEACTION_STATUS_RESUMING, +] + +LIVEACTION_COMPLETED_STATES = [ + LIVEACTION_STATUS_SUCCEEDED, + LIVEACTION_STATUS_FAILED, + LIVEACTION_STATUS_TIMED_OUT, + LIVEACTION_STATUS_CANCELED, + LIVEACTION_STATUS_ABANDONED, +] + +LIVEACTION_FAILED_STATES = [ + LIVEACTION_STATUS_FAILED, + LIVEACTION_STATUS_TIMED_OUT, + LIVEACTION_STATUS_ABANDONED, +] + +LIVEACTION_PAUSE_STATES = [LIVEACTION_STATUS_PAUSING, LIVEACTION_STATUS_PAUSED] + +LIVEACTION_CANCEL_STATES = [LIVEACTION_STATUS_CANCELING, LIVEACTION_STATUS_CANCELED] + +WORKFLOW_RUNNER_TYPES = ["action-chain", "mistral-v2", "orquesta"] + +# Linux's limit for param size +_LINUX_PARAM_LIMIT = 131072 +# Overhead for `--parameters=` + 2 to grow on. +_ST2_PARAM_BUFFER = 15 +MAX_PARAM_LENGTH = _LINUX_PARAM_LIMIT - _ST2_PARAM_BUFFER diff --git a/ultron8/constants/error_messages.py b/ultron8/constants/error_messages.py new file mode 100644 index 00000000..bc244d84 --- /dev/null +++ b/ultron8/constants/error_messages.py @@ -0,0 +1,18 @@ +# st2common + +__all__ = ["PACK_VIRTUALENV_DOESNT_EXIST", "PACK_VIRTUALENV_USES_PYTHON3"] + +PACK_VIRTUALENV_DOESNT_EXIST = """ +The virtual environment (%(virtualenv_path)s) for pack "%(pack)s" does not exist. Normally this is +created when you install a pack using "st2 pack install". If you installed your pack by some other +means, you can create a new virtual environment using the command: +"st2 run packs.setup_virtualenv packs=%(pack)s" +""" + +PACK_VIRTUALENV_USES_PYTHON3 = """ +Virtual environment (%(virtualenv_path)s) for pack "%(pack)s" is using Python 3. +Using Python 3 virtual environments in mixed deployments is only supported for Python runner +actions and not sensors. If you want to run this sensor, please re-recreate the +virtual environment with python2 binary: +"st2 run packs.setup_virtualenv packs=%(pack)s python3=false" +""" diff --git a/ultron8/constants/exit_codes.py b/ultron8/constants/exit_codes.py new file mode 100644 index 00000000..7254b53c --- /dev/null +++ b/ultron8/constants/exit_codes.py @@ -0,0 +1,13 @@ +# st2common + +__all__ = [ + "SUCCESS_EXIT_CODE", + "FAILURE_EXIT_CODE", + "SIGKILL_EXIT_CODE", + "SIGTERM_EXIT_CODE", +] + +SUCCESS_EXIT_CODE = 0 +FAILURE_EXIT_CODE = 1 +SIGKILL_EXIT_CODE = 9 +SIGTERM_EXIT_CODE = 15 diff --git a/ultron8/constants/garbage_collection.py b/ultron8/constants/garbage_collection.py new file mode 100644 index 00000000..7470b2a2 --- /dev/null +++ b/ultron8/constants/garbage_collection.py @@ -0,0 +1,21 @@ +# st2common + +__all__ = [ + "DEFAULT_COLLECTION_INTERVAL", + "DEFAULT_SLEEP_DELAY", + "MINIMUM_TTL_DAYS", + "MINIMUM_TTL_DAYS_EXECUTION_OUTPUT", +] + + +# Default garbage collection interval (in seconds) +DEFAULT_COLLECTION_INTERVAL = 600 + +# How to long to wait / sleep between collection of different object types (in seconds) +DEFAULT_SLEEP_DELAY = 2 + +# Minimum value for the TTL. If user supplies value lower than this, we will throw. +MINIMUM_TTL_DAYS = 7 + +# Minimum TTL in days for action execution output objects. +MINIMUM_TTL_DAYS_EXECUTION_OUTPUT = 1 diff --git a/ultron8/constants/keyvalue.py b/ultron8/constants/keyvalue.py index 6a14a7fd..aef6c119 100644 --- a/ultron8/constants/keyvalue.py +++ b/ultron8/constants/keyvalue.py @@ -1,4 +1,4 @@ -# st2 +# st2common __all__ = [ "ALLOWED_SCOPES", @@ -18,8 +18,8 @@ # Parent namespace for all items in key-value store DATASTORE_PARENT_SCOPE = "u8kv" DATASTORE_SCOPE_SEPARATOR = ( - "." -) # To separate scope from datastore namespace. E.g. u8kv.system + "." # To separate scope from datastore namespace. E.g. u8kv.system +) # Namespace to contain all system/global scoped variables in key-value store. SYSTEM_SCOPE = "system" diff --git a/ultron8/constants/meta.py b/ultron8/constants/meta.py new file mode 100644 index 00000000..603fffd0 --- /dev/null +++ b/ultron8/constants/meta.py @@ -0,0 +1,7 @@ +# st2common +import yaml + +__all__ = ["ALLOWED_EXTS", "PARSER_FUNCS"] + +ALLOWED_EXTS = [".yaml", ".yml"] +PARSER_FUNCS = {".yml": yaml.safe_load, ".yaml": yaml.safe_load} diff --git a/ultron8/constants/packs.py b/ultron8/constants/packs.py index 2c9813fc..5e4988c0 100644 --- a/ultron8/constants/packs.py +++ b/ultron8/constants/packs.py @@ -1,4 +1,4 @@ -# st2 +# st2common __all__ = [ "PACKS_PACK_NAME", diff --git a/ultron8/constants/secrets.py b/ultron8/constants/secrets.py new file mode 100644 index 00000000..c705a07c --- /dev/null +++ b/ultron8/constants/secrets.py @@ -0,0 +1,18 @@ +# st2common + +__all__ = ["MASKED_ATTRIBUTES_BLACKLIST", "MASKED_ATTRIBUTE_VALUE"] + +# A blacklist of attributes which should be masked in the log messages by default. +# Note: If an attribute is an object or a dict, we try to recursively process it and mask the +# values. +MASKED_ATTRIBUTES_BLACKLIST = [ + "password", + "auth_token", + "token", + "secret", + "credentials", + "st2_auth_token", +] + +# Value with which the masked attribute values are replaced +MASKED_ATTRIBUTE_VALUE = "********" diff --git a/ultron8/constants/timer.py b/ultron8/constants/timer.py new file mode 100644 index 00000000..21b344cf --- /dev/null +++ b/ultron8/constants/timer.py @@ -0,0 +1,8 @@ +# st2common + +__all__ = ["TIMER_ENABLED_LOG_LINE", "TIMER_DISABLED_LOG_LINE"] + + +# Integration tests look for these loglines to validate timer enable/disable +TIMER_ENABLED_LOG_LINE = "Timer is enabled." +TIMER_DISABLED_LOG_LINE = "Timer is disabled." diff --git a/ultron8/constants/trace.py b/ultron8/constants/trace.py new file mode 100644 index 00000000..23dce112 --- /dev/null +++ b/ultron8/constants/trace.py @@ -0,0 +1,7 @@ +# st2common + +__all__ = ["TRACE_CONTEXT", "TRACE_ID"] + + +TRACE_CONTEXT = "trace_context" +TRACE_ID = "trace_tag" diff --git a/ultron8/constants/types.py b/ultron8/constants/types.py new file mode 100644 index 00000000..c98d1d2c --- /dev/null +++ b/ultron8/constants/types.py @@ -0,0 +1,46 @@ +# st2common +from enum import Enum + +__all__ = ["ResourceType"] + + +class ResourceType(str, Enum): + """ + Enum representing a valid resource type in a system. + """ + + # System resources + RUNNER_TYPE = "runner_type" + + # Pack resources + PACK = "pack" + ACTION = "action" + ACTION_ALIAS = "action_alias" + SENSOR_TYPE = "sensor_type" + TRIGGER_TYPE = "trigger_type" + TRIGGER = "trigger" + TRIGGER_INSTANCE = "trigger_instance" + RULE = "rule" + RULE_ENFORCEMENT = "rule_enforcement" + + # Note: Policy type is a global resource and policy belong to a pack + POLICY_TYPE = "policy_type" + POLICY = "policy" + + # Other resources + EXECUTION = "execution" + EXECUTION_REQUEST = "execution_request" + KEY_VALUE_PAIR = "key_value_pair" + + WEBHOOK = "webhook" + TIMER = "timer" + API_KEY = "api_key" + TRACE = "trace" + TIMER = "timer" + + # Special resource type for stream related stuff + STREAM = "stream" + + INQUIRY = "inquiry" + + UNKNOWN = "unknown" diff --git a/ultron8/content/__init__.py b/ultron8/content/__init__.py new file mode 100644 index 00000000..d0cd0a5c --- /dev/null +++ b/ultron8/content/__init__.py @@ -0,0 +1,3 @@ +"""Modules that deal with interacting w/ the local filesystem.""" + +# st2common diff --git a/ultron8/content/bootstrap.py b/ultron8/content/bootstrap.py new file mode 100644 index 00000000..4be1e53f --- /dev/null +++ b/ultron8/content/bootstrap.py @@ -0,0 +1,399 @@ +# # st2common +# import os +# import sys +# import logging + +# from oslo_config import cfg + +# import st2common +# from st2common import config +# from st2common.script_setup import setup as common_setup +# from st2common.script_setup import teardown as common_teardown +# from st2common.bootstrap.base import ResourceRegistrar +# import st2common.bootstrap.triggersregistrar as triggers_registrar +# import st2common.bootstrap.sensorsregistrar as sensors_registrar +# import st2common.bootstrap.actionsregistrar as actions_registrar +# import st2common.bootstrap.aliasesregistrar as aliases_registrar +# import st2common.bootstrap.policiesregistrar as policies_registrar +# import st2common.bootstrap.runnersregistrar as runners_registrar +# import st2common.bootstrap.rulesregistrar as rules_registrar +# import st2common.bootstrap.ruletypesregistrar as rule_types_registrar +# import st2common.bootstrap.configsregistrar as configs_registrar +# import st2common.content.utils as content_utils +# from st2common.metrics.base import Timer +# from st2common.util.virtualenvs import setup_pack_virtualenv + +# __all__ = [ +# 'main' +# ] + +# LOG = logging.getLogger('st2common.content.bootstrap') + +# cfg.CONF.register_cli_opt(cfg.BoolOpt('experimental', default=False)) + + +# def register_opts(): +# content_opts = [ +# cfg.BoolOpt('all', default=False, help='Register sensors, actions and rules.'), +# cfg.BoolOpt('triggers', default=False, help='Register triggers.'), +# cfg.BoolOpt('sensors', default=False, help='Register sensors.'), +# cfg.BoolOpt('actions', default=False, help='Register actions.'), +# cfg.BoolOpt('runners', default=False, help='Register runners.'), +# cfg.BoolOpt('rules', default=False, help='Register rules.'), +# cfg.BoolOpt('aliases', default=False, help='Register aliases.'), +# cfg.BoolOpt('policies', default=False, help='Register policies.'), +# cfg.BoolOpt('configs', default=False, help='Register and load pack configs.'), + +# cfg.StrOpt('pack', default=None, help='Directory to the pack to register content from.'), +# cfg.StrOpt('runner-dir', default=None, help='Directory to load runners from.'), +# cfg.BoolOpt('setup-virtualenvs', default=False, help=('Setup Python virtual environments ' +# 'all the Python runner actions.')), + +# # General options +# # Note: This value should default to False since we want fail on failure behavior by +# # default. +# cfg.BoolOpt('no-fail-on-failure', default=False, +# help=('Don\'t exit with non-zero if some resource registration fails.')), +# # Note: Fail on failure is now a default behavior. This flag is only left here for backward +# # compatibility reasons, but it's not actually used. +# cfg.BoolOpt('fail-on-failure', default=True, +# help=('Exit with non-zero if some resource registration fails.')) +# ] +# try: +# cfg.CONF.register_cli_opts(content_opts, group='register') +# except: +# sys.stderr.write('Failed registering opts.\n') + + +# register_opts() + + +# def setup_virtualenvs(): +# """ +# Setup Python virtual environments for all the registered or the provided pack. +# """ + +# LOG.info('=========================================================') +# LOG.info('########### Setting up virtual environments #############') +# LOG.info('=========================================================') +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registrar = ResourceRegistrar() + +# if pack_dir: +# pack_name = os.path.basename(pack_dir) +# pack_names = [pack_name] + +# # 1. Register pack +# registrar.register_pack(pack_name=pack_name, pack_dir=pack_dir) +# else: +# # 1. Register pack +# base_dirs = content_utils.get_packs_base_paths() +# registrar.register_packs(base_dirs=base_dirs) + +# # 2. Retrieve available packs (aka packs which have been registered) +# pack_names = registrar.get_registered_packs() + +# setup_count = 0 +# for pack_name in pack_names: +# try: +# setup_pack_virtualenv(pack_name=pack_name, update=True, logger=LOG) +# except Exception as e: +# exc_info = not fail_on_failure +# LOG.warning('Failed to setup virtualenv for pack "%s": %s', pack_name, e, +# exc_info=exc_info) + +# if fail_on_failure: +# raise e +# else: +# setup_count += 1 + +# LOG.info('Setup virtualenv for %s pack(s).' % (setup_count)) + + +# def register_triggers(): +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registered_count = 0 + +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering triggers #####################') +# LOG.info('=========================================================') +# with Timer(key='st2.register.triggers'): +# registered_count = triggers_registrar.register_triggers( +# pack_dir=pack_dir, +# fail_on_failure=fail_on_failure +# ) +# except Exception as e: +# exc_info = not fail_on_failure +# LOG.warning('Failed to register sensors: %s', e, exc_info=exc_info) + +# if fail_on_failure: +# raise e + +# LOG.info('Registered %s triggers.' % (registered_count)) + + +# def register_sensors(): +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registered_count = 0 + +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering sensors ######################') +# LOG.info('=========================================================') +# with Timer(key='st2.register.sensors'): +# registered_count = sensors_registrar.register_sensors( +# pack_dir=pack_dir, +# fail_on_failure=fail_on_failure +# ) +# except Exception as e: +# exc_info = not fail_on_failure +# LOG.warning('Failed to register sensors: %s', e, exc_info=exc_info) + +# if fail_on_failure: +# raise e + +# LOG.info('Registered %s sensors.' % (registered_count)) + + +# def register_runners(): +# # Register runners +# registered_count = 0 +# fail_on_failure = cfg.CONF.register.fail_on_failure + +# # 1. Register runner types +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering runners ######################') +# LOG.info('=========================================================') +# with Timer(key='st2.register.runners'): +# registered_count = runners_registrar.register_runners( +# fail_on_failure=fail_on_failure, +# experimental=False +# ) +# except Exception as error: +# exc_info = not fail_on_failure + +# # TODO: Narrow exception window +# LOG.warning('Failed to register runners: %s', error, exc_info=exc_info) + +# if fail_on_failure: +# raise error + +# LOG.info('Registered %s runners.', registered_count) + + +# def register_actions(): +# # Register runnertypes and actions. The order is important because actions require action +# # types to be present in the system. +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registered_count = 0 + +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering actions ######################') +# LOG.info('=========================================================') +# with Timer(key='st2.register.actions'): +# registered_count = actions_registrar.register_actions( +# pack_dir=pack_dir, +# fail_on_failure=fail_on_failure +# ) +# except Exception as e: +# exc_info = not fail_on_failure +# LOG.warning('Failed to register actions: %s', e, exc_info=exc_info) + +# if fail_on_failure: +# raise e + +# LOG.info('Registered %s actions.' % (registered_count)) + + +# def register_rules(): +# # Register ruletypes and rules. +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registered_count = 0 + +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering rules ########################') +# LOG.info('=========================================================') +# rule_types_registrar.register_rule_types() +# except Exception as e: +# LOG.warning('Failed to register rule types: %s', e, exc_info=True) +# return + +# try: +# with Timer(key='st2.register.rules'): +# registered_count = rules_registrar.register_rules( +# pack_dir=pack_dir, +# fail_on_failure=fail_on_failure +# ) +# except Exception as e: +# exc_info = not fail_on_failure +# LOG.warning('Failed to register rules: %s', e, exc_info=exc_info) + +# if fail_on_failure: +# raise e + +# LOG.info('Registered %s rules.', registered_count) + + +# def register_aliases(): +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registered_count = 0 + +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering aliases ######################') +# LOG.info('=========================================================') +# with Timer(key='st2.register.aliases'): +# registered_count = aliases_registrar.register_aliases( +# pack_dir=pack_dir, +# fail_on_failure=fail_on_failure +# ) +# except Exception as e: +# if fail_on_failure: +# raise e + +# LOG.warning('Failed to register aliases.', exc_info=True) + +# LOG.info('Registered %s aliases.', registered_count) + + +# def register_policies(): +# # Register policy types and policies. +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registered_type_count = 0 + +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering policy types #################') +# LOG.info('=========================================================') +# with Timer(key='st2.register.policies'): +# registered_type_count = policies_registrar.register_policy_types(st2common) +# except Exception: +# LOG.warning('Failed to register policy types.', exc_info=True) + +# LOG.info('Registered %s policy types.', registered_type_count) + +# registered_count = 0 +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering policies #####################') +# LOG.info('=========================================================') +# registered_count = policies_registrar.register_policies(pack_dir=pack_dir, +# fail_on_failure=fail_on_failure) +# except Exception as e: +# exc_info = not fail_on_failure +# LOG.warning('Failed to register policies: %s', e, exc_info=exc_info) + +# if fail_on_failure: +# raise e + +# LOG.info('Registered %s policies.', registered_count) + + +# def register_configs(): +# pack_dir = cfg.CONF.register.pack +# fail_on_failure = not cfg.CONF.register.no_fail_on_failure + +# registered_count = 0 + +# try: +# LOG.info('=========================================================') +# LOG.info('############## Registering configs ######################') +# LOG.info('=========================================================') +# with Timer(key='st2.register.configs'): +# registered_count = configs_registrar.register_configs( +# pack_dir=pack_dir, +# fail_on_failure=fail_on_failure, +# validate_configs=True +# ) +# except Exception as e: +# exc_info = not fail_on_failure +# LOG.warning('Failed to register configs: %s', e, exc_info=exc_info) + +# if fail_on_failure: +# raise e + +# LOG.info('Registered %s configs.' % (registered_count)) + + +# def register_content(): +# register_all = cfg.CONF.register.all + +# if register_all: +# register_triggers() +# register_sensors() +# register_runners() +# register_actions() +# register_rules() +# register_aliases() +# register_policies() +# register_configs() + +# if cfg.CONF.register.triggers and not register_all: +# register_triggers() + +# if cfg.CONF.register.sensors and not register_all: +# register_sensors() + +# if cfg.CONF.register.runners and not register_all: +# register_runners() + +# if cfg.CONF.register.actions and not register_all: +# # If --register-runners is passed, registering runners again would be duplicate. +# # If it's not passed, we still want to register runners. Otherwise, actions will complain +# # about runners not being registered. +# if not cfg.CONF.register.runners: +# register_runners() +# register_actions() + +# if cfg.CONF.register.rules and not register_all: +# register_rules() + +# if cfg.CONF.register.aliases and not register_all: +# register_aliases() + +# if cfg.CONF.register.policies and not register_all: +# register_policies() + +# if cfg.CONF.register.configs and not register_all: +# register_configs() + +# if cfg.CONF.register.setup_virtualenvs: +# setup_virtualenvs() + + +# def setup(argv): +# common_setup(config=config, setup_db=True, register_mq_exchanges=True, +# register_internal_trigger_types=True) + + +# def teardown(): +# common_teardown() + + +# def main(argv): +# setup(argv) +# register_content() +# teardown() + + +# # This script registers actions and rules from content-packs. +# if __name__ == '__main__': +# main(sys.argv[1:]) diff --git a/ultron8/content/loader.py b/ultron8/content/loader.py new file mode 100644 index 00000000..1ccb5c22 --- /dev/null +++ b/ultron8/content/loader.py @@ -0,0 +1,244 @@ +# st2common + +import logging +import os + +import six +from yaml.parser import ParserError + +# from st2common import log as logging +from ultron8.constants.meta import ALLOWED_EXTS, PARSER_FUNCS +from ultron8.constants.packs import MANIFEST_FILE_NAME + +__all__ = ["ContentPackLoader", "MetaLoader"] + +LOG = logging.getLogger(__name__) + + +class ContentPackLoader(object): + """ + Class for loading pack and pack content information from directories on disk. + """ + + # TODO: Rename "get_content" methods since they don't actually return + # content - they just return a path + + ALLOWED_CONTENT_TYPES = [ + "triggers", + "sensors", + "actions", + "rules", + "aliases", + "policies", + ] + + def get_packs(self, base_dirs): + """ + Retrieve a list of packs in the provided directories. + + :return: Dictionary where the key is pack name and the value is full path to the pack + directory. + :rtype: ``dict`` + """ + assert isinstance(base_dirs, list) + + result = {} + for base_dir in base_dirs: + if not os.path.isdir(base_dir): + raise ValueError('Directory "%s" doesn\'t exist' % (base_dir)) + + packs_in_dir = self._get_packs_from_dir(base_dir=base_dir) + result.update(packs_in_dir) + + return result + + def get_content(self, base_dirs, content_type): + """ + Retrieve content from the provided directories. + + Provided directories are searched from left to right. If a pack with the same name exists + in multiple directories, first pack which is found wins. + + :param base_dirs: Directories to look into. + :type base_dirs: ``list`` + + :param content_type: Content type to look for (sensors, actions, rules). + :type content_type: ``str`` + + :rtype: ``dict`` + """ + assert isinstance(base_dirs, list) + + if content_type not in self.ALLOWED_CONTENT_TYPES: + raise ValueError("Unsupported content_type: %s" % (content_type)) + + content = {} + pack_to_dir_map = {} + for base_dir in base_dirs: + if not os.path.isdir(base_dir): + raise ValueError('Directory "%s" doesn\'t exist' % (base_dir)) + + dir_content = self._get_content_from_dir( + base_dir=base_dir, content_type=content_type + ) + + # Check for duplicate packs + for pack_name, pack_content in six.iteritems(dir_content): + if pack_name in content: + pack_dir = pack_to_dir_map[pack_name] + LOG.warning( + 'Pack "%s" already found in "%s", ignoring content from "%s"' + % (pack_name, pack_dir, base_dir) + ) + else: + content[pack_name] = pack_content + pack_to_dir_map[pack_name] = base_dir + + return content + + def get_content_from_pack(self, pack_dir, content_type): + """ + Retrieve content from the provided pack directory. + + :param pack_dir: Path to the pack directory. + :type pack_dir: ``str`` + + :param content_type: Content type to look for (sensors, actions, rules). + :type content_type: ``str`` + + :rtype: ``str`` + """ + if content_type not in self.ALLOWED_CONTENT_TYPES: + raise ValueError("Unsupported content_type: %s" % (content_type)) + + if not os.path.isdir(pack_dir): + raise ValueError('Directory "%s" doesn\'t exist' % (pack_dir)) + + content = self._get_content_from_pack_dir( + pack_dir=pack_dir, content_type=content_type + ) + return content + + def _get_packs_from_dir(self, base_dir): + result = {} + for pack_name in os.listdir(base_dir): + pack_dir = os.path.join(base_dir, pack_name) + pack_manifest_file = os.path.join(pack_dir, MANIFEST_FILE_NAME) + + if os.path.isdir(pack_dir) and os.path.isfile(pack_manifest_file): + result[pack_name] = pack_dir + + return result + + def _get_content_from_dir(self, base_dir, content_type): + content = {} + for pack in os.listdir(base_dir): + # TODO: Use function from util which escapes the name + pack_dir = os.path.join(base_dir, pack) + + # Ignore missing or non directories + try: + pack_content = self._get_content_from_pack_dir( + pack_dir=pack_dir, content_type=content_type + ) + except ValueError: + continue + else: + content[pack] = pack_content + + return content + + def _get_content_from_pack_dir(self, pack_dir, content_type): + content_types = dict( + triggers=self._get_triggers, + sensors=self._get_sensors, + actions=self._get_actions, + rules=self._get_rules, + aliases=self._get_aliases, + policies=self._get_policies, + ) + + get_func = content_types.get(content_type) + + if get_func is None: + raise ValueError("Invalid content_type: %s" % (content_type)) + + if not os.path.isdir(pack_dir): + raise ValueError('Directory "%s" doesn\'t exist' % (pack_dir)) + + pack_content = get_func(pack_dir=pack_dir) + return pack_content + + def _get_triggers(self, pack_dir): + return self._get_folder(pack_dir=pack_dir, content_type="triggers") + + def _get_sensors(self, pack_dir): + return self._get_folder(pack_dir=pack_dir, content_type="sensors") + + def _get_actions(self, pack_dir): + return self._get_folder(pack_dir=pack_dir, content_type="actions") + + def _get_rules(self, pack_dir): + return self._get_folder(pack_dir=pack_dir, content_type="rules") + + def _get_aliases(self, pack_dir): + return self._get_folder(pack_dir=pack_dir, content_type="aliases") + + def _get_policies(self, pack_dir): + return self._get_folder(pack_dir=pack_dir, content_type="policies") + + def _get_folder(self, pack_dir, content_type): + path = os.path.join(pack_dir, content_type) + if not os.path.isdir(path): + return None + return path + + +class MetaLoader(object): + """ + Class for loading and parsing pack and resource metadata files. + """ + + def load(self, file_path, expected_type=None): + """ + Loads content from file_path if file_path's extension + is one of allowed ones (See ALLOWED_EXTS). + + Throws UnsupportedMetaException on disallowed filetypes. + Throws ValueError on malformed meta. + + :param file_path: Absolute path to the file to load content from. + :type file_path: ``str`` + + :param expected_type: Expected type for the loaded and parsed content (optional). + :type expected_type: ``object`` + + :rtype: ``dict`` + """ + file_name, file_ext = os.path.splitext(file_path) + + if file_ext not in ALLOWED_EXTS: + raise Exception( + "Unsupported meta type %s, file %s. Allowed: %s" + % (file_ext, file_path, ALLOWED_EXTS) + ) + + result = self._load(PARSER_FUNCS[file_ext], file_path) + + if expected_type and not isinstance(result, expected_type): + actual_type = type(result).__name__ + error = 'Expected "%s", got "%s"' % (expected_type.__name__, actual_type) + raise ValueError(error) + + return result + + def _load(self, parser_func, file_path): + with open(file_path, "r") as fd: + try: + return parser_func(fd) + except ValueError: + LOG.exception("Failed loading content from %s.", file_path) + raise + except ParserError: + LOG.exception("Failed loading content from %s.", file_path) + raise diff --git a/ultron8/content/utils.py b/ultron8/content/utils.py new file mode 100644 index 00000000..1d6aa00e --- /dev/null +++ b/ultron8/content/utils.py @@ -0,0 +1,413 @@ +# st2common + +import os +import os.path + +from ultron8.api.settings import ( + CONTENT_PACK_GROUP, + CONTENT_PACKS_BASE_PATHS, + CONTENT_SYSTEM_PACKS_BASE_PATH, +) +from ultron8.constants.action import LIBS_DIR as ACTION_LIBS_DIR +from ultron8.utils.shell import quote_unix +from ultron8.utils.types import OrderedSet + +# from oslo_config import cfg + + +__all__ = [ + "get_pack_group", + "get_system_packs_base_path", + "get_packs_base_paths", + "get_pack_base_path", + "get_pack_directory", + "get_pack_file_abs_path", + "get_pack_resource_file_abs_path", + "get_relative_path_to_pack_file", + "check_pack_directory_exists", + "check_pack_content_directory_exists", +] + +INVALID_FILE_PATH_ERROR = """ +Invalid file path: "%s". File path needs to be relative to the pack%sdirectory (%s). +For example "my_%s.py". +""".strip().replace( + "\n", " " +) + +# Cache which stores pack name -> pack base path mappings +PACK_NAME_TO_BASE_PATH_CACHE = {} + + +def get_pack_group(): + """ + Return a name of the group with write permissions to pack directory. + + :rtype: ``str`` + """ + return CONTENT_PACK_GROUP + + +def get_system_packs_base_path(): + """ + Return a path to the directory where system packs are stored. + + :rtype: ``str`` + """ + return CONTENT_SYSTEM_PACKS_BASE_PATH + + +def get_packs_base_paths(): + """ + Return a list of base paths which are searched for integration packs. + + :rtype: ``list`` + """ + system_packs_base_path = get_system_packs_base_path() + packs_base_paths = CONTENT_PACKS_BASE_PATHS or "" + + # Remove trailing colon (if present) + if packs_base_paths.endswith(":"): + packs_base_paths = packs_base_paths[:-1] + + result = [] + # System path is always first + if system_packs_base_path: + result.append(system_packs_base_path) + + packs_base_paths = packs_base_paths.split(":") + + result = result + packs_base_paths + result = [path for path in result if path] + result = list(OrderedSet(result)) + return result + + +def check_pack_directory_exists(pack): + """ + Check if a provided pack exists in one of the pack paths. + + :param pack: Pack name. + :type pack: ``str`` + + :rtype: ``bool`` + """ + packs_base_paths = get_packs_base_paths() + + for base_dir in packs_base_paths: + pack_path = os.path.join(base_dir, pack) + if os.path.exists(pack_path): + return True + + return False + + +def check_pack_content_directory_exists(pack, content_type): + """ + Check if a provided pack exists in one of the pack paths. + + :param pack: Pack name. + :type pack: ``str`` + + :param content_type: Content type (actions, sensors, rules). + :type content_type: ``str`` + + :rtype: ``bool`` + """ + packs_base_paths = get_packs_base_paths() + + for base_dir in packs_base_paths: + pack_content_pack = os.path.join(base_dir, pack, content_type) + if os.path.exists(pack_content_pack): + return True + + return False + + +def get_pack_base_path(pack_name, include_trailing_slash=False, use_pack_cache=False): + """ + Return full absolute base path to the content pack directory. + + Note: This function looks for a pack in all the load paths and return path to the first pack + which matched the provided name. + + If a pack is not found, we return a pack which points to the first packs directory (this is + here for backward compatibility reasons). + + :param pack_name: Content pack name. + :type pack_name: ``str`` + + :param include_trailing_slash: True to include trailing slash. + :type include_trailing_slash: ``bool`` + + :param use_pack_cache: True to cache base paths on per-pack basis. This help in situations + where this method is called multiple times with the same pack name. + :type use_pack_cache`` ``bool`` + + :rtype: ``str`` + """ + if not pack_name: + return None + + if use_pack_cache and pack_name in PACK_NAME_TO_BASE_PATH_CACHE: + return PACK_NAME_TO_BASE_PATH_CACHE[pack_name] + + packs_base_paths = get_packs_base_paths() + for packs_base_path in packs_base_paths: + pack_base_path = os.path.join(packs_base_path, quote_unix(pack_name)) + pack_base_path = os.path.abspath(pack_base_path) + + if os.path.isdir(pack_base_path): + if include_trailing_slash and not pack_base_path.endswith(os.path.sep): + pack_base_path += os.path.sep + + PACK_NAME_TO_BASE_PATH_CACHE[pack_name] = pack_base_path + return pack_base_path + + # Path with the provided name not found + pack_base_path = os.path.join(packs_base_paths[0], quote_unix(pack_name)) + pack_base_path = os.path.abspath(pack_base_path) + + if include_trailing_slash and not pack_base_path.endswith(os.path.sep): + pack_base_path += os.path.sep + + PACK_NAME_TO_BASE_PATH_CACHE[pack_name] = pack_base_path + return pack_base_path + + +def get_pack_directory(pack_name): + """ + Retrieve a directory for the provided pack. + + If a directory for the provided pack doesn't exist in any of the search paths, None + is returned instead. + + Note: If same pack exists in multiple search path, path to the first one is returned. + + :param pack_name: Pack name. + :type pack_name: ``str`` + + :return: Pack to the pack directory. + :rtype: ``str`` or ``None`` + """ + packs_base_paths = get_packs_base_paths() + for packs_base_path in packs_base_paths: + pack_base_path = os.path.join(packs_base_path, quote_unix(pack_name)) + pack_base_path = os.path.abspath(pack_base_path) + + if os.path.isdir(pack_base_path): + return pack_base_path + + return None + + +def get_entry_point_abs_path(pack=None, entry_point=None, use_pack_cache=False): + """ + Return full absolute path of an action entry point in a pack. + + :param pack: Content pack reference. + :type pack: ``str`` + + :param entry_point: Action entry point. + :type entry_point: ``str`` + + :rtype: ``str`` + """ + if not entry_point: + return None + + if os.path.isabs(entry_point): + pack_base_path = get_pack_base_path( + pack_name=pack, use_pack_cache=use_pack_cache + ) + common_prefix = os.path.commonprefix([pack_base_path, entry_point]) + + if common_prefix != pack_base_path: + raise ValueError( + 'Entry point file "%s" is located outside of the pack directory' + % (entry_point) + ) + + return entry_point + + entry_point_abs_path = get_pack_resource_file_abs_path( + pack_ref=pack, resource_type="action", file_path=entry_point + ) + return entry_point_abs_path + + +def get_pack_file_abs_path( + pack_ref, file_path, resource_type=None, use_pack_cache=False +): + """ + Retrieve full absolute path to the pack file. + + Note: This function also takes care of sanitizing ``file_name`` argument + preventing directory traversal and similar attacks. + + :param pack_ref: Pack reference (needs to be the same as directory on disk). + :type pack_ref: ``str`` + + :pack file_path: Resource file path relative to the pack directory (e.g. my_file.py or + actions/directory/my_file.py) + :type file_path: ``str`` + + param: resource_type: Optional resource type. If provided, more user-friendly exception + is thrown on error. + :type resource_type: ``str`` + + :rtype: ``str`` + """ + pack_base_path = get_pack_base_path( + pack_name=pack_ref, use_pack_cache=use_pack_cache + ) + + if resource_type: + resource_type_plural = " %ss " % (resource_type) + resource_base_path = os.path.join(pack_base_path, "%ss/" % (resource_type)) + else: + resource_type_plural = " " + resource_base_path = pack_base_path + + path_components = [] + path_components.append(pack_base_path) + + # Normalize the path to prevent directory traversal + normalized_file_path = os.path.normpath("/" + file_path).lstrip("/") + + if normalized_file_path != file_path: + msg = INVALID_FILE_PATH_ERROR % ( + file_path, + resource_type_plural, + resource_base_path, + resource_type or "action", + ) + raise ValueError(msg) + + path_components.append(normalized_file_path) + result = os.path.join(*path_components) + + assert normalized_file_path in result + + # Final safety check for common prefix to avoid traversal attack + common_prefix = os.path.commonprefix([pack_base_path, result]) + if common_prefix != pack_base_path: + msg = INVALID_FILE_PATH_ERROR % ( + file_path, + resource_type_plural, + resource_base_path, + resource_type or "action", + ) + raise ValueError(msg) + + return result + + +def get_pack_resource_file_abs_path(pack_ref, resource_type, file_path): + """ + Retrieve full absolute path to the pack resource file. + + Note: This function also takes care of sanitizing ``file_name`` argument + preventing directory traversal and similar attacks. + + :param pack_ref: Pack reference (needs to be the same as directory on disk). + :type pack_ref: ``str`` + + :param resource_type: Pack resource type (e.g. action, sensor, etc.). + :type resource_type: ``str`` + + :pack file_path: Resource file path relative to the pack directory (e.g. my_file.py or + directory/my_file.py) + :type file_path: ``str`` + + :rtype: ``str`` + """ + path_components = [] + if resource_type == "action": + path_components.append("actions/") + elif resource_type == "sensor": + path_components.append("sensors/") + elif resource_type == "rule": + path_components.append("rules/") + else: + raise ValueError("Invalid resource type: %s" % (resource_type)) + + path_components.append(file_path) + file_path = os.path.join(*path_components) + result = get_pack_file_abs_path( + pack_ref=pack_ref, file_path=file_path, resource_type=resource_type + ) + return result + + +def get_relative_path_to_pack_file(pack_ref, file_path, use_pack_cache=False): + """ + Retrieve a file path which is relative to the provided pack directory. + + :param pack_ref: Pack reference. + :type pack_ref: ``str`` + + :param file_path: Full absolute path to a pack file. + :type file_path: ``str`` + + :rtype: ``str`` + """ + pack_base_path = get_pack_base_path( + pack_name=pack_ref, use_pack_cache=use_pack_cache + ) + + if not os.path.isabs(file_path): + return file_path + + file_path = os.path.abspath(file_path) + + common_prefix = os.path.commonprefix([pack_base_path, file_path]) + if common_prefix != pack_base_path: + raise ValueError( + "file_path (%s) is not located inside the pack directory (%s)" + % (file_path, pack_base_path) + ) + + relative_path = os.path.relpath(file_path, common_prefix) + return relative_path + + +def get_action_libs_abs_path(pack=None, entry_point=None): + """ + Return full absolute path of libs for an action. + + :param pack_name: Content pack name. + :type pack_name: ``str`` + :param entry_point: Action entry point. + :type entry_point: ``str`` + + :rtype: ``str`` + """ + entry_point_abs_path = get_entry_point_abs_path(pack=pack, entry_point=entry_point) + if entry_point_abs_path is not None: + return os.path.join(os.path.dirname(entry_point_abs_path), ACTION_LIBS_DIR) + else: + return None + + +def get_aliases_base_paths(): + """ + Return a list of base paths which are searched for action aliases. + + :rtype: ``list`` + """ + # aliases_base_paths = cfg.CONF.content.aliases_base_paths or '' + aliases_base_paths = "" + + # Remove trailing colon (if present) + if aliases_base_paths.endswith(":"): + aliases_base_paths = aliases_base_paths[:-1] + + result = [] + + aliases_base_paths = aliases_base_paths.split(":") + + result = aliases_base_paths + result = [path for path in result if path] + result = list(OrderedSet(result)) + return result diff --git a/ultron8/content/validators.py b/ultron8/content/validators.py new file mode 100644 index 00000000..95b1363d --- /dev/null +++ b/ultron8/content/validators.py @@ -0,0 +1,50 @@ +# # st2common + +# from __future__ import absolute_import +# import os +# from pkg_resources import get_distribution + +# from st2common.constants.pack import USER_PACK_NAME_BLACKLIST + +# __all__ = [ +# 'RequirementsValidator', +# 'validate_pack_name' +# ] + + +# class RequirementsValidator(object): + +# @staticmethod +# def validate(requirements_file): +# if not os.path.exists(requirements_file): +# raise Exception('Requirements file %s not found.' % requirements_file) +# missing = [] +# with open(requirements_file, 'r') as f: +# for line in f: +# rqmnt = line.strip() +# try: +# get_distribution(rqmnt) +# except: +# missing.append(rqmnt) +# return missing + + +# def validate_pack_name(name): +# """ +# Validate the content pack name. + +# Throws Exception on invalid name. + +# :param name: Content pack name to validate. +# :type name: ``str`` + +# :rtype: ``str`` +# """ +# if not name: +# raise ValueError('Content pack name cannot be empty') + +# if name.lower() in USER_PACK_NAME_BLACKLIST: +# raise ValueError('Name "%s" is blacklisted and can\'t be used' % +# (name.lower())) + +# return name diff --git a/ultron8/exceptions/db.py b/ultron8/exceptions/db.py index 89eaa6b5..da5cbd8c 100644 --- a/ultron8/exceptions/db.py +++ b/ultron8/exceptions/db.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from ultron8.exceptions import UltronBaseException diff --git a/ultron8/expressions/functions/data.py b/ultron8/expressions/functions/data.py index 5dcc31ac..2f8e1e37 100644 --- a/ultron8/expressions/functions/data.py +++ b/ultron8/expressions/functions/data.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import absolute_import - import json + import jsonpath_rw import six import yaml diff --git a/ultron8/expressions/functions/datastore.py b/ultron8/expressions/functions/datastore.py index b5068998..1e0bb365 100644 --- a/ultron8/expressions/functions/datastore.py +++ b/ultron8/expressions/functions/datastore.py @@ -1,4 +1,4 @@ -# # st2 +# # st2common # # # TODO: Fix me, need to add services, and crypto util diff --git a/ultron8/expressions/functions/path.py b/ultron8/expressions/functions/path.py index 046774e8..30dfde5f 100644 --- a/ultron8/expressions/functions/path.py +++ b/ultron8/expressions/functions/path.py @@ -1,4 +1,4 @@ -# st2 +# st2common import os __all__ = ["basename", "dirname"] diff --git a/ultron8/expressions/functions/regex.py b/ultron8/expressions/functions/regex.py index 6eb53704..cf5861eb 100644 --- a/ultron8/expressions/functions/regex.py +++ b/ultron8/expressions/functions/regex.py @@ -1,5 +1,6 @@ -# st2 +# st2common import re + import six __all__ = ["regex_match", "regex_replace", "regex_search", "regex_substring"] diff --git a/ultron8/expressions/functions/time.py b/ultron8/expressions/functions/time.py index db988995..045e5900 100644 --- a/ultron8/expressions/functions/time.py +++ b/ultron8/expressions/functions/time.py @@ -1,9 +1,9 @@ -# st2 - -import six +# st2common import datetime +import six + __all__ = ["to_human_time_from_seconds"] long_int = int diff --git a/ultron8/expressions/functions/version.py b/ultron8/expressions/functions/version.py index b455a78a..41dfcbe4 100644 --- a/ultron8/expressions/functions/version.py +++ b/ultron8/expressions/functions/version.py @@ -1,4 +1,4 @@ -# st2 +# st2common import semver __all__ = [ diff --git a/ultron8/persistence/__init__.py b/ultron8/persistence/__init__.py new file mode 100644 index 00000000..805ce642 --- /dev/null +++ b/ultron8/persistence/__init__.py @@ -0,0 +1 @@ +"""Module that deals with writing things to disk.""" diff --git a/ultron8/persistence/base.py b/ultron8/persistence/base.py new file mode 100644 index 00000000..962842bb --- /dev/null +++ b/ultron8/persistence/base.py @@ -0,0 +1,374 @@ +# # Copyright 2019 Extreme Networks, Inc. +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. + +# from __future__ import absolute_import +# import abc + +# import six + +# from st2common import log as logging +# from st2common.exceptions.db import StackStormDBObjectConflictError +# from st2common.models.system.common import ResourceReference + + +# __all__ = [ +# 'Access', + +# 'ContentPackResource', +# 'StatusBasedResource' +# ] + +# LOG = logging.getLogger(__name__) + + +# @six.add_metaclass(abc.ABCMeta) +# class Access(object): +# impl = None +# publisher = None +# dispatcher = None + +# # ModelAPI class for this resource +# api_model_cls = None + +# # A list of operations for which we should dispatch a trigger +# dispatch_trigger_for_operations = [] + +# # Maps model operation name (e.g. create, update, delete) to the trigger reference which is +# # used when dispatching a trigger +# operation_to_trigger_ref_map = {} + +# @classmethod +# @abc.abstractmethod +# def _get_impl(cls): +# pass + +# @classmethod +# @abc.abstractmethod +# def _get_publisher(cls): +# return None + +# @classmethod +# def _get_dispatcher(cls): +# """ +# Return a dispatcher class which is used for dispatching triggers. +# """ +# # Late import to avoid very expensive in-direct jsonschema import (~1 second) when this +# # function is not called / used +# from st2common.transport.reactor import TriggerDispatcher + +# if not cls.dispatcher: +# cls.dispatcher = TriggerDispatcher(LOG) + +# return cls.dispatcher + +# @classmethod +# @abc.abstractmethod +# def _get_by_object(cls, object): +# return None + +# @classmethod +# def get_by_name(cls, value): +# return cls._get_impl().get_by_name(value) + +# @classmethod +# def get_by_id(cls, value): +# return cls._get_impl().get_by_id(value) + +# @classmethod +# def get_by_uid(cls, value): +# return cls._get_impl().get_by_uid(value) + +# @classmethod +# def get_by_ref(cls, value): +# return cls._get_impl().get_by_ref(value) + +# @classmethod +# def get_by_pack(cls, value): +# return cls._get_impl().get_by_pack(value) + +# @classmethod +# def get(cls, *args, **kwargs): +# return cls._get_impl().get(*args, **kwargs) + +# @classmethod +# def get_all(cls, *args, **kwargs): +# return cls._get_impl().get_all(*args, **kwargs) + +# @classmethod +# def count(cls, *args, **kwargs): +# return cls._get_impl().count(*args, **kwargs) + +# @classmethod +# def query(cls, *args, **kwargs): +# return cls._get_impl().query(*args, **kwargs) + +# @classmethod +# def distinct(cls, *args, **kwargs): +# return cls._get_impl().distinct(*args, **kwargs) + +# @classmethod +# def aggregate(cls, *args, **kwargs): +# return cls._get_impl().aggregate(*args, **kwargs) + +# @classmethod +# def insert(cls, model_object, publish=True, dispatch_trigger=True, +# log_not_unique_error_as_debug=False): +# # Late import to avoid very expensive in-direct import (~1 second) when this function +# # is not called / used +# from mongoengine import NotUniqueError + +# if model_object.id: +# raise ValueError('id for object %s was unexpected.' % model_object) +# try: +# model_object = cls._get_impl().insert(model_object) +# except NotUniqueError as e: +# if log_not_unique_error_as_debug: +# LOG.debug('Conflict while trying to save in DB: %s.', six.text_type(e)) +# else: +# LOG.exception('Conflict while trying to save in DB.') +# # On a conflict determine the conflicting object and return its id in +# # the raised exception. +# conflict_object = cls._get_by_object(model_object) +# conflict_id = str(conflict_object.id) if conflict_object else None +# message = six.text_type(e) +# raise StackStormDBObjectConflictError(message=message, conflict_id=conflict_id, +# model_object=model_object) + +# # Publish internal event on the message bus +# if publish: +# try: +# cls.publish_create(model_object) +# except: +# LOG.exception('Publish failed.') + +# # Dispatch trigger +# if dispatch_trigger: +# try: +# cls.dispatch_create_trigger(model_object) +# except: +# LOG.exception('Trigger dispatch failed.') + +# return model_object + +# @classmethod +# def add_or_update(cls, model_object, publish=True, dispatch_trigger=True, validate=True, +# log_not_unique_error_as_debug=False): +# # Late import to avoid very expensive in-direct import (~1 second) when this function +# # is not called / used +# from mongoengine import NotUniqueError + +# pre_persist_id = model_object.id +# try: +# model_object = cls._get_impl().add_or_update(model_object, validate=True) +# except NotUniqueError as e: +# if log_not_unique_error_as_debug: +# LOG.debug('Conflict while trying to save in DB: %s.', six.text_type(e)) +# else: +# LOG.exception('Conflict while trying to save in DB.') +# # On a conflict determine the conflicting object and return its id in +# # the raised exception. +# conflict_object = cls._get_by_object(model_object) +# conflict_id = str(conflict_object.id) if conflict_object else None +# message = six.text_type(e) +# raise StackStormDBObjectConflictError(message=message, conflict_id=conflict_id, +# model_object=model_object) + +# is_update = str(pre_persist_id) == str(model_object.id) + +# # Publish internal event on the message bus +# if publish: +# try: +# if is_update: +# cls.publish_update(model_object) +# else: +# cls.publish_create(model_object) +# except: +# LOG.exception('Publish failed.') + +# # Dispatch trigger +# if dispatch_trigger: +# try: +# if is_update: +# cls.dispatch_update_trigger(model_object) +# else: +# cls.dispatch_create_trigger(model_object) +# except: +# LOG.exception('Trigger dispatch failed.') + +# return model_object + +# @classmethod +# def update(cls, model_object, publish=True, dispatch_trigger=True, **kwargs): +# """ +# Use this method when - +# * upsert=False is desired +# * special operators like push, push_all are to be used. +# """ +# cls._get_impl().update(model_object, **kwargs) +# # update does not return the object but a flag; likely success/fail but docs +# # are not very good on this one so ignoring. Explicitly get the object from +# # DB abd return. +# model_object = cls.get_by_id(model_object.id) + +# # Publish internal event on the message bus +# if publish: +# try: +# cls.publish_update(model_object) +# except: +# LOG.exception('Publish failed.') + +# # Dispatch trigger +# if dispatch_trigger: +# try: +# cls.dispatch_update_trigger(model_object) +# except: +# LOG.exception('Trigger dispatch failed.') + +# return model_object + +# @classmethod +# def delete(cls, model_object, publish=True, dispatch_trigger=True): +# persisted_object = cls._get_impl().delete(model_object) + +# # Publish internal event on the message bus +# if publish: +# try: +# cls.publish_delete(model_object) +# except Exception: +# LOG.exception('Publish failed.') + +# # Dispatch trigger +# if dispatch_trigger: +# try: +# cls.dispatch_delete_trigger(model_object) +# except Exception: +# LOG.exception('Trigger dispatch failed.') + +# return persisted_object + +# #################################################### +# # Internal event bus message publish related methods +# #################################################### + +# @classmethod +# def publish_create(cls, model_object): +# publisher = cls._get_publisher() +# if publisher: +# publisher.publish_create(model_object) + +# @classmethod +# def publish_update(cls, model_object): +# publisher = cls._get_publisher() +# if publisher: +# publisher.publish_update(model_object) + +# @classmethod +# def publish_delete(cls, model_object): +# publisher = cls._get_publisher() +# if publisher: +# publisher.publish_delete(model_object) + +# ############################################ +# # Internal trigger dispatch related methods +# ########################################### + +# @classmethod +# def dispatch_create_trigger(cls, model_object): +# """ +# Dispatch a resource-specific trigger which indicates a new resource has been created. +# """ +# return cls._dispatch_operation_trigger(operation='create', model_object=model_object) + +# @classmethod +# def dispatch_update_trigger(cls, model_object): +# """ +# Dispatch a resource-specific trigger which indicates an existing resource has been updated. +# """ +# return cls._dispatch_operation_trigger(operation='update', model_object=model_object) + +# @classmethod +# def dispatch_delete_trigger(cls, model_object): +# """ +# Dispatch a resource-specific trigger which indicates an existing resource has been +# deleted. +# """ +# return cls._dispatch_operation_trigger(operation='delete', model_object=model_object) + +# @classmethod +# def _get_trigger_ref_for_operation(cls, operation): +# trigger_ref = cls.operation_to_trigger_ref_map.get(operation, None) + +# if not trigger_ref: +# raise ValueError('Trigger ref not specified for operation: %s' % (operation)) + +# return trigger_ref + +# @classmethod +# def _dispatch_operation_trigger(cls, operation, model_object): +# if operation not in cls.dispatch_trigger_for_operations: +# return + +# trigger = cls._get_trigger_ref_for_operation(operation=operation) + +# object_payload = cls.api_model_cls.from_model(model_object, mask_secrets=True).__json__() +# payload = { +# 'object': object_payload +# } +# return cls._dispatch_trigger(operation=operation, trigger=trigger, payload=payload) + +# @classmethod +# def _dispatch_trigger(cls, operation, trigger, payload): +# if operation not in cls.dispatch_trigger_for_operations: +# return + +# dispatcher = cls._get_dispatcher() +# return dispatcher.dispatch(trigger=trigger, payload=payload) + + +# class ContentPackResource(Access): + +# @classmethod +# def get_by_ref(cls, ref): +# if not ref: +# return None + +# ref_obj = ResourceReference.from_string_reference(ref=ref) +# result = cls.query(name=ref_obj.name, +# pack=ref_obj.pack).first() +# return result + +# @classmethod +# def _get_by_object(cls, object): +# # For an object with a resourcepack pack.name is unique. +# name = getattr(object, 'name', '') +# pack = getattr(object, 'pack', '') +# return cls.get_by_ref(ResourceReference.to_string_reference(pack=pack, name=name)) + + +# class StatusBasedResource(Access): +# """Persistence layer for models that needs to publish status to the message queue.""" + +# @classmethod +# def publish_status(cls, model_object): +# """Publish the object status to the message queue. + +# Publish the instance of the model as payload with the status +# as routing key to the message queue via the StatePublisher. + +# :param model_object: An instance of the model. +# :type model_object: ``object`` +# """ +# publisher = cls._get_publisher() +# if publisher: +# publisher.publish_state(model_object, getattr(model_object, 'status', None)) diff --git a/ultron8/persistence/pack.py b/ultron8/persistence/pack.py new file mode 100644 index 00000000..a2a98d2c --- /dev/null +++ b/ultron8/persistence/pack.py @@ -0,0 +1,49 @@ +# # Copyright 2019 Extreme Networks, Inc. +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. + +# from __future__ import absolute_import +# from st2common.persistence import base +# from st2common.models.db.pack import pack_access +# from st2common.models.db.pack import config_schema_access +# from st2common.models.db.pack import config_access + +# __all__ = [ +# 'Pack', +# 'ConfigSchema', +# 'Config' +# ] + + +# class Pack(base.Access): +# impl = pack_access + +# @classmethod +# def _get_impl(cls): +# return cls.impl + + +# class ConfigSchema(base.Access): +# impl = config_schema_access + +# @classmethod +# def _get_impl(cls): +# return cls.impl + + +# class Config(base.Access): +# impl = config_access + +# @classmethod +# def _get_impl(cls): +# return cls.impl diff --git a/ultron8/persistence/trigger.py b/ultron8/persistence/trigger.py new file mode 100644 index 00000000..478b67f4 --- /dev/null +++ b/ultron8/persistence/trigger.py @@ -0,0 +1,96 @@ +# # Copyright 2019 Extreme Networks, Inc. +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. + +# from __future__ import absolute_import + +# from st2common import log as logging +# from st2common import transport +# from st2common.exceptions.db import StackStormDBObjectNotFoundError +# from st2common.models.db.trigger import triggertype_access, trigger_access, triggerinstance_access +# from st2common.persistence.base import (Access, ContentPackResource) + +# __all__ = [ +# 'TriggerType', +# 'Trigger', +# 'TriggerInstance' +# ] + +# LOG = logging.getLogger(__name__) + + +# class TriggerType(ContentPackResource): +# impl = triggertype_access + +# @classmethod +# def _get_impl(cls): +# return cls.impl + + +# class Trigger(ContentPackResource): +# impl = trigger_access +# publisher = None + +# @classmethod +# def _get_impl(cls): +# return cls.impl + +# @classmethod +# def _get_publisher(cls): +# if not cls.publisher: +# cls.publisher = transport.reactor.TriggerCUDPublisher() +# return cls.publisher + +# @classmethod +# def delete_if_unreferenced(cls, model_object, publish=True, dispatch_trigger=True): +# # Found in the innards of mongoengine. +# # e.g. {'pk': ObjectId('5609e91832ed356d04a93cc0')} +# delete_query = model_object._object_key +# delete_query['ref_count__lte'] = 0 +# cls._get_impl().delete_by_query(**delete_query) + +# # Since delete_by_query cannot tell if teh delete actually happened check with a get call +# # if the trigger was deleted. Unfortuantely, this opens up to races on delete. +# confirmed_delete = False +# try: +# cls.get_by_id(model_object.id) +# except (StackStormDBObjectNotFoundError, ValueError): +# confirmed_delete = True + +# # Publish internal event on the message bus +# if confirmed_delete and publish: +# try: +# cls.publish_delete(model_object) +# except Exception: +# LOG.exception('Publish failed.') + +# # Dispatch trigger +# if confirmed_delete and dispatch_trigger: +# try: +# cls.dispatch_delete_trigger(model_object) +# except Exception: +# LOG.exception('Trigger dispatch failed.') + +# return model_object + + +# class TriggerInstance(Access): +# impl = triggerinstance_access + +# @classmethod +# def _get_impl(cls): +# return cls.impl + +# @classmethod +# def delete_by_query(cls, *args, **query): +# return cls._get_impl().delete_by_query(*args, **query) diff --git a/ultron8/utils/casts.py b/ultron8/utils/casts.py index 5988fe7f..61bbbb9f 100644 --- a/ultron8/utils/casts.py +++ b/ultron8/utils/casts.py @@ -1,4 +1,4 @@ -# st2 +# st2common import ast import json diff --git a/ultron8/utils/crypto.py b/ultron8/utils/crypto.py index 6466d69e..a300a126 100644 --- a/ultron8/utils/crypto.py +++ b/ultron8/utils/crypto.py @@ -1,4 +1,4 @@ -# st2 +# st2common """ Module for handling symmetric encryption and decryption of short text values (mostly used for @@ -19,21 +19,16 @@ instead of SHA1, etc.). """ -import os -import json -import binascii import base64 - +import binascii from hashlib import sha1 +import json +import os -import six - -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers import algorithms -from cryptography.hazmat.primitives.ciphers import modes -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives import hmac from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, hmac +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +import six __all__ = [ "KEYCZAR_HEADER_SIZE", diff --git a/ultron8/utils/jinja.py b/ultron8/utils/jinja.py index 4b3f689a..3eab0313 100644 --- a/ultron8/utils/jinja.py +++ b/ultron8/utils/jinja.py @@ -1,11 +1,10 @@ -# st2 +# st2common import json -import re -import six - import logging +import re +import six __all__ = ["get_jinja_environment", "render_values", "is_jinja_expression"] diff --git a/ultron8/utils/misc.py b/ultron8/utils/misc.py new file mode 100644 index 00000000..8b6d9b36 --- /dev/null +++ b/ultron8/utils/misc.py @@ -0,0 +1,212 @@ +# st2 + +import collections +import functools +import logging +import os +import re +import sys + +import six + +__all__ = [ + "prefix_dict_keys", + "compare_path_file_name", + "get_field_name_from_mongoengine_error", + "sanitize_output", + "strip_shell_chars", + "rstrip_last_char", + "lowercase_value", +] + + +def prefix_dict_keys(dictionary, prefix="_"): + """ + Prefix dictionary keys with a provided prefix. + + :param dictionary: Dictionary whose keys to prefix. + :type dictionary: ``dict`` + + :param prefix: Key prefix. + :type prefix: ``str`` + + :rtype: ``dict``: + """ + result = {} + + for key, value in six.iteritems(dictionary): + result["%s%s" % (prefix, key)] = value + + return result + + +def compare_path_file_name(file_path_a, file_path_b): + """ + Custom compare function which compares full absolute file paths just using + the file name. + + This function can be used with ``sorted`` or ``list.sort`` function. + """ + file_name_a = os.path.basename(file_path_a) + file_name_b = os.path.basename(file_path_b) + + return (file_name_a > file_name_b) - (file_name_a < file_name_b) + + +def sanitize_output(input_str, uses_pty=False): + """ + Function which sanitizes paramiko output (stdout / stderr). + + It strips trailing carriage return and new line characters and if pty is used, it also replaces + all occurrences of \r\n with \n. + + By default when pty is used, all \n characters are convered to \r\n and that's not desired + in our remote runner action output. + + :param input_str: Input string to be sanitized. + :type input_str: ``str`` + + :rtype: ``str`` + + """ + output = strip_shell_chars(input_str) + + if uses_pty: + output = output.replace("\r\n", "\n") + + return output + + +def strip_shell_chars(input_str): + """ + Strips the last '\r' or '\n' or '\r\n' string at the end of + the input string. This is typically used to strip ``stdout`` + and ``stderr`` streams of those characters. + + :param input_str: Input string to be stripped. + :type input_str: ``str`` + + :rtype: ``str`` + """ + stripped_str = rstrip_last_char(input_str, "\n") + stripped_str = rstrip_last_char(stripped_str, "\r") + return stripped_str + + +def rstrip_last_char(input_str, char_to_strip): + """ + Strips the last `char_to_strip` from input_str if + input_str ends with `char_to_strip`. + + :param input_str: Input string to be stripped. + :type input_str: ``str`` + + :rtype: ``str`` + """ + if not input_str: + return input_str + + if not char_to_strip: + return input_str + + if input_str.endswith(char_to_strip): + return input_str[: -len(char_to_strip)] + + return input_str + + +def deep_update(d, u): + """ + Perform deep merge / update of the target dict. + """ + + for k, v in six.iteritems(u): + if isinstance(v, collections.Mapping): + r = deep_update(d.get(k, {}), v) + d[k] = r + else: + d[k] = u[k] + + return d + + +def get_normalized_file_path(file_path): + """ + Return a full normalized file path for the provided path string. + + :rtype: ``str`` + """ + if hasattr(sys, "frozen"): # support for py2exe + file_path = "logging%s__init__%s" % (os.sep, file_path[-4:]) + elif file_path[-4:].lower() in [".pyc", ".pyo"]: + file_path = file_path[:-4] + ".py" + else: + file_path = file_path + + file_path = os.path.normcase(file_path) + return file_path + + +def lowercase_value(value): + """ + Lowercase the provided value. + + In case of a list, all the string item values are lowercases and in case of a dictionary, all + of the string keys and values are lowercased. + """ + if isinstance(value, six.string_types): + result = value.lower() + elif isinstance(value, (list, tuple)): + result = [str(item).lower() for item in value] + elif isinstance(value, dict): + result = {} + for key, value in six.iteritems(value): + result[key.lower()] = str(value).lower() + else: + result = value + + return result + + +def get_field_name_from_mongoengine_error(exc): + """ + Try to extract field name from mongoengine error. + + If field name is unable to be extracted, original exception is returned instead. + """ + msg = str(exc) + + match = re.match('Cannot resolve field "(.+?)"', msg) + + if match: + return match.groups()[0] + + return msg + + +def ignore_and_log_exception( + exc_classes=(Exception,), logger=None, level=logging.WARNING +): + """ + Decorator which catches the provided exception classes and logs them instead of letting them + bubble all the way up. + """ + exc_classes = tuple(exc_classes) + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except exc_classes as e: + if len(args) >= 1 and getattr(args[0], "__class__", None): + func_name = "%s.%s" % (args[0].__class__.__name__, func.__name__) + else: + func_name = func.__name__ + + message = 'Exception in fuction "%s": %s' % (func_name, str(e)) + logger.log(level, message) + + return wrapper + + return decorator diff --git a/ultron8/utils/pack_management.py b/ultron8/utils/pack_management.py index 9e557262..0d4ab76d 100644 --- a/ultron8/utils/pack_management.py +++ b/ultron8/utils/pack_management.py @@ -1,8 +1,6 @@ -""" -Module containing pack management related functions. -""" - -# from __future__ import absolute_import +# """ +# Module containing pack management related functions. +# """ # import os # import shutil @@ -11,24 +9,25 @@ # import re # import six -# from oslo_config import cfg +# # from oslo_config import cfg # from git.repo import Repo # from gitdb.exc import BadName, BadObject # from lockfile import LockFile # from distutils.spawn import find_executable -# from st2common import log as logging -# from st2common.content import utils -# from st2common.constants.pack import MANIFEST_FILE_NAME -# from st2common.constants.pack import PACK_RESERVED_CHARACTERS -# from st2common.constants.pack import PACK_VERSION_SEPARATOR -# from st2common.constants.pack import PACK_VERSION_REGEX -# from st2common.services.packs import get_pack_from_index +# import logging +# # from st2common import log as logging +# from ultron8.content import utils +# from ultron8.constants.packs import MANIFEST_FILE_NAME +# from ultron8.constants.packs import PACK_RESERVED_CHARACTERS +# from ultron8.constants.packs import PACK_VERSION_SEPARATOR +# from ultron8.constants.packs import PACK_VERSION_REGEX +# from ultron8.api.services.pack import get_pack_from_index # from ultron8.utils.pack import get_pack_metadata # from ultron8.utils.pack import get_pack_ref_from_metadata # from ultron8.utils.green import shell # from ultron8.utils.versioning import complex_semver_match -# from ultron8.utils.versioning import get_stackstorm_version +# from ultron8.utils.versioning import get_ultron_version # from ultron8.utils.versioning import get_python_version # __all__ = [ @@ -46,7 +45,7 @@ # LOG = logging.getLogger(__name__) # CONFIG_FILE = 'config.yaml' -# CURRENT_STACKSTORM_VERSION = get_stackstorm_version() +# CURRENT_STACKSTORM_VERSION = get_ultron_version() # CURRENT_PYTHON_VERSION = get_python_version() # SUDO_BINARY = find_executable('sudo') diff --git a/ultron8/utils/queues.py b/ultron8/utils/queues.py new file mode 100644 index 00000000..b1553e2b --- /dev/null +++ b/ultron8/utils/queues.py @@ -0,0 +1,38 @@ +# st2common +import uuid + + +def get_queue_name(queue_name_base, queue_name_suffix, add_random_uuid_to_suffix=True): + """ + Get a queue name based on base name and suffix. You can also specify if you need a random + UUID at the end of the final name generated. Format returned is + ``queue_name_base.queue_.queue_name_suffix-UUID``. + + :param queue_name_base: Base name for the queue. + :type queue_name_base: ``str`` + + :param queue_name_suffix: Base string for the suffix. + :type queue_name_suffix: ``str`` + + :param add_random_uuid_to_suffix: A boolean to indicate a UUID suffix to be + added to name or not. + :type add_random_uuid_to_suffix: ``boolean`` + + :rtype: ``str`` + """ + if not queue_name_base: + raise ValueError("Queue name base cannot be empty.") + + if not queue_name_suffix: + return queue_name_base + + queue_suffix = queue_name_suffix + if add_random_uuid_to_suffix: + # Pick last 10 digits of uuid. Arbitrary but unique enough. Long queue names + # might cause issues in RabbitMQ. + u_hex = uuid.uuid4().hex + uuid_suffix = uuid.uuid4().hex[len(u_hex) - 10 :] + queue_suffix = "%s-%s" % (queue_name_suffix, uuid_suffix) + + queue_name = "%s.%s" % (queue_name_base, queue_suffix) + return queue_name diff --git a/ultron8/utils/reference.py b/ultron8/utils/reference.py new file mode 100644 index 00000000..c0b20ae4 --- /dev/null +++ b/ultron8/utils/reference.py @@ -0,0 +1,74 @@ +# st2common +from ultron8.api.models.system.common import ResourceReference +from ultron8.exceptions import db + + +def get_ref_from_model(model): + if model is None: + raise ValueError("Model has None value.") + model_id = getattr(model, "id", None) + if model_id is None: + raise db.UltronDBObjectMalformedError("model %s must contain id." % str(model)) + reference = {"id": str(model_id), "name": getattr(model, "name", None)} + return reference + + +def get_model_from_ref(db_api, reference): + if reference is None: + raise db.UltronDBObjectNotFoundError("No reference supplied.") + model_id = reference.get("id", None) + if model_id is not None: + return db_api.get_by_id(model_id) + model_name = reference.get("name", None) + if model_name is None: + raise db.UltronDBObjectNotFoundError("Both name and id are None.") + return db_api.get_by_name(model_name) + + +def get_model_by_resource_ref(db_api, ref): + """ + Retrieve a DB model based on the resource reference. + + :param db_api: Class of the object to retrieve. + :type db_api: ``object`` + + :param ref: Resource reference. + :type ref: ``str`` + + :return: Retrieved object. + """ + ref_obj = ResourceReference.from_string_reference(ref=ref) + result = db_api.query(name=ref_obj.name, pack=ref_obj.pack).first() + return result + + +def get_resource_ref_from_model(model): + """ + Return a ResourceReference given db_model. + + :param model: DB model that contains name and pack. + :type model: ``object`` + + :return: ResourceReference. + """ + try: + name = model.name + pack = model.pack + except AttributeError: + raise Exception( + "Cannot build ResourceReference for model: %s. Name or pack missing." + % model + ) + return ResourceReference(name=name, pack=pack) + + +def get_str_resource_ref_from_model(model): + """ + Return a resource reference as string given db_model. + + :param model: DB model that contains name and pack. + :type model: ``object`` + + :return: String representation of ResourceReference. + """ + return get_resource_ref_from_model(model).ref diff --git a/ultron8/utils/schema/__init__.py b/ultron8/utils/schema/__init__.py new file mode 100644 index 00000000..c86ce028 --- /dev/null +++ b/ultron8/utils/schema/__init__.py @@ -0,0 +1,393 @@ +# TODO: Turn this into pydantic. This is a prereq to getting utils/packs working. +# # Copyright 2019 Extreme Networks, Inc. +# # +# # Licensed under the Apache License, Version 2.0 (the "License"); +# # you may not use this file except in compliance with the License. +# # You may obtain a copy of the License at +# # +# # http://www.apache.org/licenses/LICENSE-2.0 +# # +# # Unless required by applicable law or agreed to in writing, software +# # distributed under the License is distributed on an "AS IS" BASIS, +# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# # See the License for the specific language governing permissions and +# # limitations under the License. + +# from __future__ import absolute_import +# import os +# import copy + +# import six +# import jsonschema +# from jsonschema import _validators +# from jsonschema.validators import create + +# from st2common.exceptions.action import InvalidActionParameterException +# from st2common.util import jsonify +# from st2common.util.misc import deep_update + +# __all__ = [ +# 'get_validator', +# 'get_draft_schema', +# 'get_action_parameters_schema', +# 'get_schema_for_action_parameters', +# 'get_schema_for_resource_parameters', +# 'is_property_type_single', +# 'is_property_type_list', +# 'is_property_type_anyof', +# 'is_property_type_oneof', +# 'is_property_nullable', +# 'is_attribute_type_array', +# 'is_attribute_type_object', +# 'validate' +# ] + +# # https://github.com/json-schema/json-schema/blob/master/draft-04/schema +# # The source material is licensed under the AFL or BSD license. +# # Both draft 4 and custom schema has additionalProperties set to false by default. +# # The custom schema differs from draft 4 with the extension of position, immutable, +# # and draft 3 version of required. +# PATH = os.path.join(os.path.dirname(os.path.realpath(__file__))) +# SCHEMAS = { +# 'draft4': jsonify.load_file(os.path.join(PATH, 'draft4.json')), +# 'custom': jsonify.load_file(os.path.join(PATH, 'custom.json')), + +# # Custom schema for action params which doesn't allow parameter "type" attribute to be array +# 'action_params': jsonify.load_file(os.path.join(PATH, 'action_params.json')), +# 'action_output_schema': jsonify.load_file(os.path.join(PATH, 'action_output_schema.json')) +# } + +# SCHEMA_ANY_TYPE = { +# "anyOf": [ +# {"type": "array"}, +# {"type": "boolean"}, +# {"type": "integer"}, +# {"type": "number"}, +# {"type": "object"}, +# {"type": "string"} +# ] +# } + +# RUNNER_PARAM_OVERRIDABLE_ATTRS = [ +# 'default', +# 'description', +# 'enum', +# 'immutable', +# 'required' +# ] + + +# def get_draft_schema(version='custom', additional_properties=False): +# schema = copy.deepcopy(SCHEMAS[version]) +# if additional_properties and 'additionalProperties' in schema: +# del schema['additionalProperties'] +# return schema + + +# def get_action_output_schema(additional_properties=True): +# """ +# Return a generic schema which is used for validating action output. +# """ +# return get_draft_schema( +# version='action_output_schema', +# additional_properties=additional_properties +# ) + + +# def get_action_parameters_schema(additional_properties=False): +# """ +# Return a generic schema which is used for validating action parameters definition. +# """ +# return get_draft_schema(version='action_params', additional_properties=additional_properties) + + +# CustomValidator = create( +# meta_schema=get_draft_schema(version='custom', additional_properties=True), +# validators={ +# u"$ref": _validators.ref, +# u"additionalItems": _validators.additionalItems, +# u"additionalProperties": _validators.additionalProperties, +# u"allOf": _validators.allOf_draft4, +# u"anyOf": _validators.anyOf_draft4, +# u"dependencies": _validators.dependencies, +# u"enum": _validators.enum, +# u"format": _validators.format, +# u"items": _validators.items, +# u"maxItems": _validators.maxItems, +# u"maxLength": _validators.maxLength, +# u"maxProperties": _validators.maxProperties_draft4, +# u"maximum": _validators.maximum, +# u"minItems": _validators.minItems, +# u"minLength": _validators.minLength, +# u"minProperties": _validators.minProperties_draft4, +# u"minimum": _validators.minimum, +# u"multipleOf": _validators.multipleOf, +# u"not": _validators.not_draft4, +# u"oneOf": _validators.oneOf_draft4, +# u"pattern": _validators.pattern, +# u"patternProperties": _validators.patternProperties, +# u"properties": _validators.properties_draft3, +# u"type": _validators.type_draft4, +# u"uniqueItems": _validators.uniqueItems, +# }, +# version="custom_validator", +# ) + + +# def is_property_type_single(property_schema): +# return (isinstance(property_schema, dict) and +# 'anyOf' not in list(property_schema.keys()) and +# 'oneOf' not in list(property_schema.keys()) and +# not isinstance(property_schema.get('type', 'string'), list)) + + +# def is_property_type_list(property_schema): +# return (isinstance(property_schema, dict) and +# isinstance(property_schema.get('type', 'string'), list)) + + +# def is_property_type_anyof(property_schema): +# return isinstance(property_schema, dict) and 'anyOf' in list(property_schema.keys()) + + +# def is_property_type_oneof(property_schema): +# return isinstance(property_schema, dict) and 'oneOf' in list(property_schema.keys()) + + +# def is_property_nullable(property_type_schema): +# # For anyOf and oneOf, the property_schema is a list of types. +# if isinstance(property_type_schema, list): +# return len([t for t in property_type_schema +# if ((isinstance(t, six.string_types) and t == 'null') or +# (isinstance(t, dict) and t.get('type', 'string') == 'null'))]) > 0 + +# return (isinstance(property_type_schema, dict) and +# property_type_schema.get('type', 'string') == 'null') + + +# def is_attribute_type_array(attribute_type): +# return (attribute_type == 'array' or +# (isinstance(attribute_type, list) and 'array' in attribute_type)) + + +# def is_attribute_type_object(attribute_type): +# return (attribute_type == 'object' or +# (isinstance(attribute_type, list) and 'object' in attribute_type)) + + +# def assign_default_values(instance, schema): +# """ +# Assign default values on the provided instance based on the schema default specification. +# """ +# instance = copy.deepcopy(instance) +# instance_is_dict = isinstance(instance, dict) +# instance_is_array = isinstance(instance, list) + +# if not instance_is_dict and not instance_is_array: +# return instance + +# properties = schema.get('properties', {}) + +# for property_name, property_data in six.iteritems(properties): +# has_default_value = 'default' in property_data +# default_value = property_data.get('default', None) + +# # Assign default value on the instance so the validation doesn't fail if requires is true +# # but the value is not provided +# if has_default_value: +# if instance_is_dict and instance.get(property_name, None) is None: +# instance[property_name] = default_value +# elif instance_is_array: +# for index, _ in enumerate(instance): +# if instance[index].get(property_name, None) is None: +# instance[index][property_name] = default_value + +# # Support for nested properties (array and object) +# attribute_type = property_data.get('type', None) +# schema_items = property_data.get('items', {}) + +# # Array +# if (is_attribute_type_array(attribute_type) and +# schema_items and schema_items.get('properties', {})): +# array_instance = instance.get(property_name, None) +# array_schema = schema['properties'][property_name]['items'] + +# if array_instance is not None: +# # Note: We don't perform subschema assignment if no value is provided +# instance[property_name] = assign_default_values(instance=array_instance, +# schema=array_schema) + +# # Object +# if is_attribute_type_object(attribute_type) and property_data.get('properties', {}): +# object_instance = instance.get(property_name, None) +# object_schema = schema['properties'][property_name] + +# if object_instance is not None: +# # Note: We don't perform subschema assignment if no value is provided +# instance[property_name] = assign_default_values(instance=object_instance, +# schema=object_schema) + +# return instance + + +# def modify_schema_allow_default_none(schema): +# """ +# Manipulate the provided schema so None is also an allowed value for each attribute which +# defines a default value of None. +# """ +# schema = copy.deepcopy(schema) +# properties = schema.get('properties', {}) + +# for property_name, property_data in six.iteritems(properties): +# is_optional = not property_data.get('required', False) +# has_default_value = 'default' in property_data +# default_value = property_data.get('default', None) +# property_schema = schema['properties'][property_name] + +# if (has_default_value or is_optional) and default_value is None: +# # If property is anyOf and oneOf then it has to be process differently. +# if (is_property_type_anyof(property_schema) and +# not is_property_nullable(property_schema['anyOf'])): +# property_schema['anyOf'].append({'type': 'null'}) +# elif (is_property_type_oneof(property_schema) and +# not is_property_nullable(property_schema['oneOf'])): +# property_schema['oneOf'].append({'type': 'null'}) +# elif (is_property_type_list(property_schema) and +# not is_property_nullable(property_schema.get('type'))): +# property_schema['type'].append('null') +# elif (is_property_type_single(property_schema) and +# not is_property_nullable(property_schema.get('type'))): +# property_schema['type'] = [property_schema.get('type', 'string'), 'null'] + +# # Support for nested properties (array and object) +# attribute_type = property_data.get('type', None) +# schema_items = property_data.get('items', {}) + +# # Array +# if (is_attribute_type_array(attribute_type) and +# schema_items and schema_items.get('properties', {})): +# array_schema = schema_items +# array_schema = modify_schema_allow_default_none(schema=array_schema) +# schema['properties'][property_name]['items'] = array_schema + +# # Object +# if is_attribute_type_object(attribute_type) and property_data.get('properties', {}): +# object_schema = property_data +# object_schema = modify_schema_allow_default_none(schema=object_schema) +# schema['properties'][property_name] = object_schema + +# return schema + + +# def validate(instance, schema, cls=None, use_default=True, allow_default_none=False, *args, +# **kwargs): +# """ +# Custom validate function which supports default arguments combined with the "required" +# property. + +# Note: This function returns cleaned instance with default values assigned. + +# :param use_default: True to support the use of the optional "default" property. +# :type use_default: ``bool`` +# """ + +# instance = copy.deepcopy(instance) +# schema_type = schema.get('type', None) +# instance_is_dict = isinstance(instance, dict) + +# if use_default and allow_default_none: +# schema = modify_schema_allow_default_none(schema=schema) + +# if use_default and schema_type == 'object' and instance_is_dict: +# instance = assign_default_values(instance=instance, schema=schema) + +# # pylint: disable=assignment-from-no-return +# jsonschema.validate(instance=instance, schema=schema, cls=cls, *args, **kwargs) + +# return instance + + +# VALIDATORS = { +# 'draft4': jsonschema.Draft4Validator, +# 'custom': CustomValidator +# } + + +# def get_validator(version='custom'): +# validator = VALIDATORS[version] +# return validator + + +# def validate_runner_parameter_attribute_override(action_ref, param_name, attr_name, +# runner_param_attr_value, action_param_attr_value): +# """ +# Validate that the provided parameter from the action schema can override the +# runner parameter. +# """ +# param_values_are_the_same = action_param_attr_value == runner_param_attr_value +# if (attr_name not in RUNNER_PARAM_OVERRIDABLE_ATTRS and not param_values_are_the_same): +# raise InvalidActionParameterException( +# 'The attribute "%s" for the runner parameter "%s" in action "%s" ' +# 'cannot be overridden.' % (attr_name, param_name, action_ref)) + +# return True + + +# def get_schema_for_action_parameters(action_db, runnertype_db=None): +# """ +# Dynamically construct JSON schema for the provided action from the parameters metadata. + +# Note: This schema is used to validate parameters which are passed to the action. +# """ +# if not runnertype_db: +# from st2common.util.action_db import get_runnertype_by_name +# runnertype_db = get_runnertype_by_name(action_db.runner_type['name']) + +# # Note: We need to perform a deep merge because user can only specify a single parameter +# # attribute when overriding it in an action metadata. +# parameters_schema = {} +# deep_update(parameters_schema, runnertype_db.runner_parameters) +# deep_update(parameters_schema, action_db.parameters) + +# # Perform validation, make sure user is not providing parameters which can't +# # be overriden +# runner_parameter_names = list(runnertype_db.runner_parameters.keys()) + +# for name, schema in six.iteritems(action_db.parameters): +# if name not in runner_parameter_names: +# continue + +# for attribute, value in six.iteritems(schema): +# runner_param_value = runnertype_db.runner_parameters[name].get(attribute) +# validate_runner_parameter_attribute_override(action_ref=action_db.ref, +# param_name=name, +# attr_name=attribute, +# runner_param_attr_value=runner_param_value, +# action_param_attr_value=value) + +# schema = get_schema_for_resource_parameters(parameters_schema=parameters_schema) + +# if parameters_schema: +# schema['title'] = action_db.name +# if action_db.description: +# schema['description'] = action_db.description + +# return schema + + +# def get_schema_for_resource_parameters(parameters_schema, allow_additional_properties=False): +# """ +# Dynamically construct JSON schema for the provided resource from the parameters metadata. +# """ +# def normalize(x): +# return {k: v if v else SCHEMA_ANY_TYPE for k, v in six.iteritems(x)} + +# schema = {} +# properties = {} +# properties.update(normalize(parameters_schema)) +# if properties: +# schema['type'] = 'object' +# schema['properties'] = properties +# schema['additionalProperties'] = allow_additional_properties + +# return schema diff --git a/ultron8/utils/schema/action_output_schema.json b/ultron8/utils/schema/action_output_schema.json new file mode 100644 index 00000000..3e92536c --- /dev/null +++ b/ultron8/utils/schema/action_output_schema.json @@ -0,0 +1,160 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uri" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { + "type": "boolean", + "default": false + }, + "secret": { + "type": "boolean", + "default": false + }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" } + ] + }, + "position": { + "type": "number", + "minimum": 0 + }, + "immutable": { + "type": "boolean", + "default": false + }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {}, + "additionalProperties": false +} diff --git a/ultron8/utils/schema/action_params.json b/ultron8/utils/schema/action_params.json new file mode 100644 index 00000000..3e92536c --- /dev/null +++ b/ultron8/utils/schema/action_params.json @@ -0,0 +1,160 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uri" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { + "type": "boolean", + "default": false + }, + "secret": { + "type": "boolean", + "default": false + }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" } + ] + }, + "position": { + "type": "number", + "minimum": 0 + }, + "immutable": { + "type": "boolean", + "default": false + }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {}, + "additionalProperties": false +} diff --git a/ultron8/utils/schema/custom.json b/ultron8/utils/schema/custom.json new file mode 100644 index 00000000..05c38d76 --- /dev/null +++ b/ultron8/utils/schema/custom.json @@ -0,0 +1,166 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uri" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { + "type": "boolean", + "default": false + }, + "secret": { + "type": "boolean", + "default": false + }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "position": { + "type": "number", + "minimum": 0 + }, + "immutable": { + "type": "boolean", + "default": false + }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {}, + "additionalProperties": false +} diff --git a/ultron8/utils/schema/draft4.json b/ultron8/utils/schema/draft4.json new file mode 100644 index 00000000..c0c1fedc --- /dev/null +++ b/ultron8/utils/schema/draft4.json @@ -0,0 +1,151 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uri" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {}, + "additionalProperties": false +} diff --git a/ultron8/utils/types.py b/ultron8/utils/types.py new file mode 100644 index 00000000..7e53df12 --- /dev/null +++ b/ultron8/utils/types.py @@ -0,0 +1,81 @@ +# Copyright 2019 Extreme Networks, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Based on http://code.activestate.com/recipes/576694/ (MIT license) +""" + +from __future__ import absolute_import + +import collections + +__all__ = ["OrderedSet"] + + +class OrderedSet(collections.MutableSet): + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def discard(self, key): + if key in self.map: + key, prev, next = self.map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def pop(self, last=True): + if not self: + raise KeyError("set is empty") + key = self.end[1][0] if last else self.end[2][0] + self.discard(key) + return key + + def __repr__(self): + if not self: + return "%s()" % (self.__class__.__name__,) + return "%s(%r)" % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) diff --git a/ultron8/utils/ujson.py b/ultron8/utils/ujson.py new file mode 100644 index 00000000..f9fab49f --- /dev/null +++ b/ultron8/utils/ujson.py @@ -0,0 +1,30 @@ +# st2common + +import copy + +import ujson + +__all__ = ["fast_deepcopy"] + + +def fast_deepcopy(value, fall_back_to_deepcopy=True): + """ + Perform a fast deepcopy of the provided value. + + :param fall_back_to_deepcopy: True to fall back to copy.deepcopy() in case ujson throws an + exception. + :type fall_back_to_deepcopy: ``bool`` + """ + # NOTE: ujson round-trip is up to 10 times faster on smaller and larger dicts compared + # to copy.deepcopy(), but it has some edge cases with non-simple types such as datetimes - + try: + value = ujson.loads(ujson.dumps(value)) + except (OverflowError, ValueError) as e: + # NOTE: ujson doesn't support 5 or 6 bytes utf-8 sequences which we use + # in our tests so we fall back to deep copy + if not fall_back_to_deepcopy: + raise e + + value = copy.deepcopy(value) + + return value diff --git a/ultron8/utils/versioning.py b/ultron8/utils/versioning.py index 26089f4c..3cbda13d 100644 --- a/ultron8/utils/versioning.py +++ b/ultron8/utils/versioning.py @@ -32,10 +32,6 @@ def get_ultron_version(): """ Return a valid semver version string for the currently running StackStorm version. """ - # Special handling for dev versions which are not valid semver identifiers - if "dev" in ultron_version and ultron_version.count(".") == 1: - version = ultron_version.replace("dev", ".0") - return version return ultron_version