From 6d689c193d8425d6662b390b61995cab1d9668ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Melero=20Fern=C3=A1ndez?= Date: Wed, 25 Sep 2019 08:55:00 +0100 Subject: [PATCH] Public release (#2) --- .gitignore | 79 ++++ .travis.yml | 26 ++ AUTHORS.md | 21 + CHANGELOG.md | 5 + CODE_OF_CONDUCT.md | 76 ++++ CONTRIBUTING.md | 41 ++ LICENCE | 21 + MANIFEST.in | 12 + Makefile | 91 ++++ README.md | 61 ++- docker-compose.yml | 22 + docs/CODE_OF_CONDUCT.md | 1 + docs/CONTRIBUTING.md | 1 + docs/Makefile | 20 + docs/README.md | 1 + docs/conf.py | 153 +++++++ .../a_walk_around_the_settings.rst | 80 ++++ docs/explanations/main.rst | 8 + docs/explanations/microservice_base_class.rst | 48 ++ .../microservice_cookiecutter_template.rst | 54 +++ docs/explanations/microservice_lifecycle.rst | 132 ++++++ docs/how_to_guides/install_tamarco.rst | 8 + docs/how_to_guides/main.rst | 13 + docs/how_to_guides/make_the_docs.rst | 9 + .../how_to_guides/setup_a_logging_backend.rst | 109 +++++ .../how_to_guides/setup_a_metrics_backend.rst | 109 +++++ .../how_to_guides/setup_a_setting_backend.rst | 120 +++++ .../setup_settings_for_a_resource.rst | 49 ++ ...p_settings_for_a_specific_microservice.rst | 32 ++ docs/how_to_guides/use_logging_resource.rst | 103 +++++ docs/how_to_guides/use_metrics_resource.rst | 114 +++++ docs/index.rst | 20 + docs/reference/main.rst | 40 ++ docs/tutorials/main.rst | 6 + docs/tutorials/quick_start.rst | 28 ++ .../write_your_first_microservice.rst | 169 +++++++ examples/http_resource/microservice.py | 62 +++ examples/metrics_resource/microservice.py | 58 +++ .../metrics_resource/prometheus_example.py | 54 +++ examples/settings_watcher/microservice.py | 48 ++ examples/status_resource/microservice.py | 57 +++ pyproject.toml | 18 + requirements_dev.txt | 42 ++ setup.cfg | 89 ++++ setup.py | 74 ++++ sonar-project.properties | 4 + tamarco/__init__.py | 3 + tamarco/codecs/__init__.py | 0 tamarco/codecs/interface.py | 10 + tamarco/codecs/json.py | 17 + tamarco/codecs/pickle.py | 17 + tamarco/codecs/yaml.py | 15 + tamarco/core/__init__.py | 0 tamarco/core/dependency_resolver.py | 37 ++ tamarco/core/logging/__init__.py | 0 tamarco/core/logging/formatters/__init__.py | 0 tamarco/core/logging/formatters/colored.py | 73 +++ tamarco/core/logging/formatters/logstash.py | 235 ++++++++++ tamarco/core/logging/formatters/syslog.py | 54 +++ tamarco/core/logging/handlers/__init__.py | 0 tamarco/core/logging/handlers/asyncronous.py | 64 +++ .../core/logging/handlers/elasticsearch.py | 83 ++++ tamarco/core/logging/handlers/http.py | 108 +++++ tamarco/core/logging/handlers/logstash.py | 63 +++ tamarco/core/logging/handlers/redis.py | 110 +++++ tamarco/core/logging/logging.py | 256 +++++++++++ tamarco/core/microservice.py | 418 ++++++++++++++++++ tamarco/core/patterns/__init__.py | 5 + tamarco/core/patterns/flyweight.py | 126 ++++++ tamarco/core/patterns/proxy.py | 210 +++++++++ tamarco/core/patterns/singleton.py | 21 + tamarco/core/settings/__init__.py | 0 tamarco/core/settings/backends/__init__.py | 11 + tamarco/core/settings/backends/dictionary.py | 86 ++++ tamarco/core/settings/backends/etcd.py | 220 +++++++++ tamarco/core/settings/backends/file_based.py | 34 ++ tamarco/core/settings/backends/interface.py | 52 +++ tamarco/core/settings/setting_proxy.py | 74 ++++ tamarco/core/settings/settings.py | 327 ++++++++++++++ tamarco/core/settings/utils/__init__.py | 2 + tamarco/core/settings/utils/etcd_tool.py | 107 +++++ tamarco/core/settings/utils/utils.py | 70 +++ tamarco/core/signals.py | 85 ++++ tamarco/core/tasks.py | 243 ++++++++++ tamarco/core/utils.py | 150 +++++++ tamarco/resources/__init__.py | 0 tamarco/resources/bases.py | 188 ++++++++ tamarco/resources/basic/__init__.py | 0 tamarco/resources/basic/metrics/__init__.py | 3 + tamarco/resources/basic/metrics/collector.py | 95 ++++ tamarco/resources/basic/metrics/manager.py | 48 ++ .../basic/metrics/meters/__init__.py | 6 + .../resources/basic/metrics/meters/base.py | 203 +++++++++ .../resources/basic/metrics/meters/counter.py | 176 ++++++++ .../resources/basic/metrics/meters/gauge.py | 103 +++++ .../resources/basic/metrics/meters/summary.py | 108 +++++ .../basic/metrics/reporters/__init__.py | 5 + .../resources/basic/metrics/reporters/base.py | 55 +++ .../basic/metrics/reporters/carbon.py | 42 ++ .../resources/basic/metrics/reporters/file.py | 30 ++ .../basic/metrics/reporters/prometheus.py | 140 ++++++ .../basic/metrics/reporters/stdout.py | 14 + tamarco/resources/basic/metrics/resource.py | 174 ++++++++ tamarco/resources/basic/metrics/settings.py | 8 + tamarco/resources/basic/registry/__init__.py | 0 tamarco/resources/basic/registry/resource.py | 69 +++ tamarco/resources/basic/registry/settings.py | 4 + tamarco/resources/basic/status/__init__.py | 0 tamarco/resources/basic/status/resource.py | 127 ++++++ tamarco/resources/basic/status/settings.py | 1 + .../resources/basic/status/status_codes.py | 10 + tamarco/resources/debug/__init__.py | 0 tamarco/resources/debug/config.py | 1 + tamarco/resources/debug/memory_analizer.py | 48 ++ tamarco/resources/debug/pdb.py | 18 + tamarco/resources/debug/profiler.py | 82 ++++ tamarco/resources/io/__init__.py | 0 tamarco/resources/io/http/__init__.py | 0 tamarco/resources/io/http/resource.py | 194 ++++++++ tamarco/tamarco | 22 + tamarco/tools/__init__.py | 0 tamarco/tools/ci.py | 123 ++++++ tamarco/tools/etcd.py | 52 +++ .../project_template/cookiecutter.json | 8 + .../.coveragerc | 35 ++ .../.gitignore | 70 +++ .../Dockerfile | 15 + .../HISTORY.md | 6 + .../{{ cookiecutter.project_name }}/README.md | 12 + .../{{ cookiecutter.project_name }}/app.py | 5 + .../docs/Makefile | 177 ++++++++ .../docs/authors.rst | 13 + .../docs/conf.py | 275 ++++++++++++ .../docs/history.rst | 1 + .../docs/index.rst | 20 + .../docs/installation.rst | 14 + .../docs/make.bat | 242 ++++++++++ .../docs/mkdsupport.py | 43 ++ .../docs/readme.rst | 1 + .../docs/usage.rst | 7 + .../requirements.txt | 1 + .../{{ cookiecutter.project_name }}/setup.cfg | 26 ++ .../tests/__init__.py | 0 .../tests/conftest.py | 0 .../tests/custom_settings/settings.yml | 0 .../tests/functional/__init__.py | 0 .../tests/unit/__init__.py | 0 .../__init__.py | 1 + .../logic/__init__.py | 0 .../{{ cookiecutter.project_slug }}/meters.py | 0 .../microservice.py | 10 + tamarco/tools/project/start_project.py | 11 + tests/__init__.py | 0 tests/conftest.py | 69 +++ tests/custom_settings/settings.yml | 34 ++ tests/functional/__init__.py | 0 tests/functional/conftest.py | 41 ++ tests/functional/core/__init__.py | 0 tests/functional/core/settings/__init__.py | 0 tests/functional/core/settings/conftest.py | 29 ++ .../core/settings/files/__init__.py | 0 .../core/settings/files/settings.json | 24 + .../core/settings/files/settings.py | 7 + .../core/settings/files/settings.yaml | 16 + .../functional/core/settings/test_backends.py | 115 +++++ .../core/settings/utils/__init__.py | 0 .../core/settings/utils/settings.yml | 24 + .../core/settings/utils/test_etcd_tool.py | 73 +++ tests/functional/core/test_logging.py | 39 ++ tests/functional/resources/__init__.py | 0 tests/functional/resources/basic/__init__.py | 0 .../resources/basic/metrics/__init__.py | 0 .../resources/basic/metrics/test_metrics.py | 48 ++ .../resources/basic/registry/__init__.py | 0 .../resources/basic/registry/test_resource.py | 19 + .../resources/basic/status/__init__.py | 0 .../resources/basic/status/conftest.py | 69 +++ .../resources/basic/status/test_resource.py | 40 ++ tests/functional/resources/io/__init__.py | 0 .../functional/resources/io/http/__init__.py | 0 .../resources/io/http/test_resource.py | 23 + tests/unit/__init__.py | 0 tests/unit/codecs/__init__.py | 0 tests/unit/codecs/test_codecs.py | 27 ++ tests/unit/core/__init__.py | 0 tests/unit/core/settings/__init__.py | 0 .../unit/core/settings/example_settings.json | 4 + tests/unit/core/settings/example_settings.py | 1 + .../unit/core/settings/example_settings.yaml | 3 + .../core/settings/test_dict_deep_update.py | 14 + .../core/settings/test_settings_backends.py | 106 +++++ tests/unit/core/test_dependency_resolver.py | 36 ++ tests/unit/core/test_logging.py | 180 ++++++++ tests/unit/core/test_microservice.py | 268 +++++++++++ tests/unit/core/test_signals.py | 62 +++ tests/unit/core/test_tasks.py | 82 ++++ tests/unit/core/test_utils.py | 0 tests/unit/resources/__init__.py | 0 tests/unit/resources/basic/__init__.py | 0 .../unit/resources/basic/metrics/__init__.py | 0 .../unit/resources/basic/metrics/conftest.py | 47 ++ .../basic/metrics/meters/__init__.py | 0 .../basic/metrics/meters/test_base.py | 94 ++++ .../basic/metrics/meters/test_counter.py | 103 +++++ .../basic/metrics/meters/test_custom.py | 81 ++++ .../basic/metrics/meters/test_gauge.py | 71 +++ .../basic/metrics/meters/test_summary.py | 66 +++ .../basic/metrics/reporters/__init__.py | 0 .../basic/metrics/reporters/test_base.py | 35 ++ .../basic/metrics/reporters/test_carbon.py | 22 + .../basic/metrics/reporters/test_file.py | 12 + .../metrics/reporters/test_prometheus.py | 17 + .../basic/metrics/reporters/test_stdout.py | 15 + .../resources/basic/metrics/test_collector.py | 26 ++ .../resources/basic/metrics/test_manager.py | 11 + .../unit/resources/basic/registry/__init__.py | 0 .../resources/basic/registry/test_resource.py | 42 ++ tests/unit/resources/basic/status/__init__.py | 0 .../resources/basic/status/test_status.py | 144 ++++++ tests/unit/resources/debug/__init__.py | 0 tests/unit/resources/debug/test_profiler.py | 85 ++++ tests/unit/resources/io/__init__.py | 0 tests/unit/resources/io/http/test_http.py | 65 +++ tests/utils.py | 6 + 224 files changed, 11309 insertions(+), 2 deletions(-) create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 AUTHORS.md create mode 100644 CHANGELOG.md create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 LICENCE create mode 100644 MANIFEST.in create mode 100644 Makefile create mode 100644 docker-compose.yml create mode 120000 docs/CODE_OF_CONDUCT.md create mode 120000 docs/CONTRIBUTING.md create mode 100644 docs/Makefile create mode 120000 docs/README.md create mode 100644 docs/conf.py create mode 100644 docs/explanations/a_walk_around_the_settings.rst create mode 100644 docs/explanations/main.rst create mode 100644 docs/explanations/microservice_base_class.rst create mode 100644 docs/explanations/microservice_cookiecutter_template.rst create mode 100644 docs/explanations/microservice_lifecycle.rst create mode 100644 docs/how_to_guides/install_tamarco.rst create mode 100644 docs/how_to_guides/main.rst create mode 100644 docs/how_to_guides/make_the_docs.rst create mode 100644 docs/how_to_guides/setup_a_logging_backend.rst create mode 100644 docs/how_to_guides/setup_a_metrics_backend.rst create mode 100644 docs/how_to_guides/setup_a_setting_backend.rst create mode 100644 docs/how_to_guides/setup_settings_for_a_resource.rst create mode 100644 docs/how_to_guides/setup_settings_for_a_specific_microservice.rst create mode 100644 docs/how_to_guides/use_logging_resource.rst create mode 100644 docs/how_to_guides/use_metrics_resource.rst create mode 100644 docs/index.rst create mode 100644 docs/reference/main.rst create mode 100644 docs/tutorials/main.rst create mode 100644 docs/tutorials/quick_start.rst create mode 100644 docs/tutorials/write_your_first_microservice.rst create mode 100755 examples/http_resource/microservice.py create mode 100755 examples/metrics_resource/microservice.py create mode 100755 examples/metrics_resource/prometheus_example.py create mode 100644 examples/settings_watcher/microservice.py create mode 100755 examples/status_resource/microservice.py create mode 100644 pyproject.toml create mode 100644 requirements_dev.txt create mode 100644 setup.cfg create mode 100755 setup.py create mode 100644 sonar-project.properties create mode 100755 tamarco/__init__.py create mode 100755 tamarco/codecs/__init__.py create mode 100755 tamarco/codecs/interface.py create mode 100755 tamarco/codecs/json.py create mode 100755 tamarco/codecs/pickle.py create mode 100755 tamarco/codecs/yaml.py create mode 100755 tamarco/core/__init__.py create mode 100755 tamarco/core/dependency_resolver.py create mode 100755 tamarco/core/logging/__init__.py create mode 100755 tamarco/core/logging/formatters/__init__.py create mode 100755 tamarco/core/logging/formatters/colored.py create mode 100755 tamarco/core/logging/formatters/logstash.py create mode 100755 tamarco/core/logging/formatters/syslog.py create mode 100755 tamarco/core/logging/handlers/__init__.py create mode 100755 tamarco/core/logging/handlers/asyncronous.py create mode 100755 tamarco/core/logging/handlers/elasticsearch.py create mode 100644 tamarco/core/logging/handlers/http.py create mode 100755 tamarco/core/logging/handlers/logstash.py create mode 100755 tamarco/core/logging/handlers/redis.py create mode 100755 tamarco/core/logging/logging.py create mode 100755 tamarco/core/microservice.py create mode 100755 tamarco/core/patterns/__init__.py create mode 100644 tamarco/core/patterns/flyweight.py create mode 100755 tamarco/core/patterns/proxy.py create mode 100755 tamarco/core/patterns/singleton.py create mode 100755 tamarco/core/settings/__init__.py create mode 100755 tamarco/core/settings/backends/__init__.py create mode 100755 tamarco/core/settings/backends/dictionary.py create mode 100755 tamarco/core/settings/backends/etcd.py create mode 100755 tamarco/core/settings/backends/file_based.py create mode 100755 tamarco/core/settings/backends/interface.py create mode 100755 tamarco/core/settings/setting_proxy.py create mode 100755 tamarco/core/settings/settings.py create mode 100644 tamarco/core/settings/utils/__init__.py create mode 100644 tamarco/core/settings/utils/etcd_tool.py create mode 100644 tamarco/core/settings/utils/utils.py create mode 100755 tamarco/core/signals.py create mode 100755 tamarco/core/tasks.py create mode 100755 tamarco/core/utils.py create mode 100755 tamarco/resources/__init__.py create mode 100755 tamarco/resources/bases.py create mode 100755 tamarco/resources/basic/__init__.py create mode 100755 tamarco/resources/basic/metrics/__init__.py create mode 100755 tamarco/resources/basic/metrics/collector.py create mode 100755 tamarco/resources/basic/metrics/manager.py create mode 100755 tamarco/resources/basic/metrics/meters/__init__.py create mode 100755 tamarco/resources/basic/metrics/meters/base.py create mode 100755 tamarco/resources/basic/metrics/meters/counter.py create mode 100644 tamarco/resources/basic/metrics/meters/gauge.py create mode 100755 tamarco/resources/basic/metrics/meters/summary.py create mode 100755 tamarco/resources/basic/metrics/reporters/__init__.py create mode 100755 tamarco/resources/basic/metrics/reporters/base.py create mode 100755 tamarco/resources/basic/metrics/reporters/carbon.py create mode 100755 tamarco/resources/basic/metrics/reporters/file.py create mode 100644 tamarco/resources/basic/metrics/reporters/prometheus.py create mode 100755 tamarco/resources/basic/metrics/reporters/stdout.py create mode 100755 tamarco/resources/basic/metrics/resource.py create mode 100644 tamarco/resources/basic/metrics/settings.py create mode 100644 tamarco/resources/basic/registry/__init__.py create mode 100644 tamarco/resources/basic/registry/resource.py create mode 100644 tamarco/resources/basic/registry/settings.py create mode 100755 tamarco/resources/basic/status/__init__.py create mode 100755 tamarco/resources/basic/status/resource.py create mode 100644 tamarco/resources/basic/status/settings.py create mode 100755 tamarco/resources/basic/status/status_codes.py create mode 100755 tamarco/resources/debug/__init__.py create mode 100644 tamarco/resources/debug/config.py create mode 100755 tamarco/resources/debug/memory_analizer.py create mode 100755 tamarco/resources/debug/pdb.py create mode 100644 tamarco/resources/debug/profiler.py create mode 100755 tamarco/resources/io/__init__.py create mode 100755 tamarco/resources/io/http/__init__.py create mode 100755 tamarco/resources/io/http/resource.py create mode 100755 tamarco/tamarco create mode 100755 tamarco/tools/__init__.py create mode 100755 tamarco/tools/ci.py create mode 100644 tamarco/tools/etcd.py create mode 100755 tamarco/tools/project/project_template/cookiecutter.json create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.coveragerc create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.gitignore create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/Dockerfile create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/HISTORY.md create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/README.md create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/app.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/Makefile create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/authors.rst create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/conf.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/history.rst create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/index.rst create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/installation.rst create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/make.bat create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/mkdsupport.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/readme.rst create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/usage.rst create mode 100644 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/requirements.txt create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/setup.cfg create mode 100644 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/__init__.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/conftest.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/custom_settings/settings.yml create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/functional/__init__.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/unit/__init__.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/__init__.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/logic/__init__.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/meters.py create mode 100755 tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/microservice.py create mode 100755 tamarco/tools/project/start_project.py create mode 100755 tests/__init__.py create mode 100755 tests/conftest.py create mode 100755 tests/custom_settings/settings.yml create mode 100755 tests/functional/__init__.py create mode 100644 tests/functional/conftest.py create mode 100755 tests/functional/core/__init__.py create mode 100755 tests/functional/core/settings/__init__.py create mode 100755 tests/functional/core/settings/conftest.py create mode 100755 tests/functional/core/settings/files/__init__.py create mode 100755 tests/functional/core/settings/files/settings.json create mode 100755 tests/functional/core/settings/files/settings.py create mode 100755 tests/functional/core/settings/files/settings.yaml create mode 100755 tests/functional/core/settings/test_backends.py create mode 100755 tests/functional/core/settings/utils/__init__.py create mode 100644 tests/functional/core/settings/utils/settings.yml create mode 100644 tests/functional/core/settings/utils/test_etcd_tool.py create mode 100644 tests/functional/core/test_logging.py create mode 100755 tests/functional/resources/__init__.py create mode 100755 tests/functional/resources/basic/__init__.py create mode 100755 tests/functional/resources/basic/metrics/__init__.py create mode 100755 tests/functional/resources/basic/metrics/test_metrics.py create mode 100644 tests/functional/resources/basic/registry/__init__.py create mode 100644 tests/functional/resources/basic/registry/test_resource.py create mode 100755 tests/functional/resources/basic/status/__init__.py create mode 100644 tests/functional/resources/basic/status/conftest.py create mode 100755 tests/functional/resources/basic/status/test_resource.py create mode 100755 tests/functional/resources/io/__init__.py create mode 100755 tests/functional/resources/io/http/__init__.py create mode 100755 tests/functional/resources/io/http/test_resource.py create mode 100755 tests/unit/__init__.py create mode 100644 tests/unit/codecs/__init__.py create mode 100644 tests/unit/codecs/test_codecs.py create mode 100755 tests/unit/core/__init__.py create mode 100755 tests/unit/core/settings/__init__.py create mode 100644 tests/unit/core/settings/example_settings.json create mode 100644 tests/unit/core/settings/example_settings.py create mode 100644 tests/unit/core/settings/example_settings.yaml create mode 100755 tests/unit/core/settings/test_dict_deep_update.py create mode 100644 tests/unit/core/settings/test_settings_backends.py create mode 100755 tests/unit/core/test_dependency_resolver.py create mode 100644 tests/unit/core/test_logging.py create mode 100755 tests/unit/core/test_microservice.py create mode 100644 tests/unit/core/test_signals.py create mode 100755 tests/unit/core/test_tasks.py create mode 100644 tests/unit/core/test_utils.py create mode 100755 tests/unit/resources/__init__.py create mode 100755 tests/unit/resources/basic/__init__.py create mode 100755 tests/unit/resources/basic/metrics/__init__.py create mode 100755 tests/unit/resources/basic/metrics/conftest.py create mode 100644 tests/unit/resources/basic/metrics/meters/__init__.py create mode 100644 tests/unit/resources/basic/metrics/meters/test_base.py create mode 100644 tests/unit/resources/basic/metrics/meters/test_counter.py create mode 100644 tests/unit/resources/basic/metrics/meters/test_custom.py create mode 100644 tests/unit/resources/basic/metrics/meters/test_gauge.py create mode 100644 tests/unit/resources/basic/metrics/meters/test_summary.py create mode 100644 tests/unit/resources/basic/metrics/reporters/__init__.py create mode 100644 tests/unit/resources/basic/metrics/reporters/test_base.py create mode 100644 tests/unit/resources/basic/metrics/reporters/test_carbon.py create mode 100644 tests/unit/resources/basic/metrics/reporters/test_file.py create mode 100644 tests/unit/resources/basic/metrics/reporters/test_prometheus.py create mode 100644 tests/unit/resources/basic/metrics/reporters/test_stdout.py create mode 100755 tests/unit/resources/basic/metrics/test_collector.py create mode 100755 tests/unit/resources/basic/metrics/test_manager.py create mode 100644 tests/unit/resources/basic/registry/__init__.py create mode 100644 tests/unit/resources/basic/registry/test_resource.py create mode 100755 tests/unit/resources/basic/status/__init__.py create mode 100755 tests/unit/resources/basic/status/test_status.py create mode 100644 tests/unit/resources/debug/__init__.py create mode 100644 tests/unit/resources/debug/test_profiler.py create mode 100755 tests/unit/resources/io/__init__.py create mode 100644 tests/unit/resources/io/http/test_http.py create mode 100644 tests/utils.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..097d5ab --- /dev/null +++ b/.gitignore @@ -0,0 +1,79 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.flake8 +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +reports +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# pyenv python configuration file +.python-version + +# Virtual environments +virtualenv + +# IDE config +.idea +.vscode + +.env + +docs/apidoc + +# Sonarqube +.scannerwork diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..b5a1be4 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,26 @@ +language: python +services: + - docker +# The default distribution (ubuntu 14.04) don't support Python 3.7. +dist: xenial +python: + - "3.6" + - "3.7" +install: + - pip install -r requirements_dev.txt +script: make test && make linters +notifications: + email: + - opensource@system73.com +deploy: + provider: pypi + user: + secure: "X+TaBBCa/BlcxivMzvMFBM9AI2DD0+GC7os6BIx+fzXoAk+0r1lHNL+GzAAMaTqDHxOW+miEHIvJVb86ueki/LQyYEBbLJPcmuokeCWtnJSW1bVG+qO1IDMS+tL8OAYaXxenpqTVMCWcjdvHsWVyOO+V7hdFAUCIN1UnxfsWz+xN24hd5LJpo3pXyYqzfejATP9qtUMLYT/woGLVo0Ie/WjuUYXvXz7Qz63bjEpL5iw2JLQr57rPk0a4KDw6Zvoqy+MHT2/gzqYmJqJeXZBKLxmC/A4X6dzL9IzqgUoVO/XzU61FkHBlo05/bPf1+LHUpDtJWxAayQzEHZ4THQtCxmPOk+XEIMquIcij3U7AQHX8Y6xqZPu7dml+e08LA+LeGhZBbykhTePn2ir9ZlZoe2bGhwT4lKORb8TdpFFIevP9YNeIeYR0Cfph6tnss7p2bLYSGJQw9F7LCMtvlK1zCcud4NSv1sP80axm4XFNWJHqnRpJI7hf5D4LfvpxmrvgWY10d17wTGuvp4dKnacqsCjVB2YBgh+Fdxc8Bn1tBFfnF0Y9kfztrHlL11nDGg0aSyIOmQXzB6YVesuckKta9TtvSmzR0Ogr/XY7Il8wkoKjgizjuCoqwEt0iaQZ7drkEOZLO0DYou+3kr5j4nwvZHqQxHbTZflcqEpcv9MpnCU=" + password: + secure: "BSyZWIgKAK7EaMEIjGcjq0ZgKY8EBtC6UIogKgAEb7ap1OoWcPGxp+8U0TBvnnksf27eOrWnfkV6AIrpbkgTvxrUa3JqIs3mF9y0dYuOV5f+G4tVg0Eb6Uok13DMsQYygJBUntAkSHYx82Tm95kObJpddw/tjxweEQG2Iyig0zo5KzJwzlCRZYDmyvMD/x2/bx9gsL+cSUNkH89wshYnumG2F7jkC1YX01yzAGXZlMYrvh0i1rLd+7Hho2LTYva+NMq15OBYbRMot0UnDq9SE1GT8CrjOIC2ahC7ykMVaZeCpSWQaLrKUfhZwiPY9XYX2IpApu6E8EUeZIGvEenSg7OARQ7GJWAvGXm2tNf+VRPzNur5KADQKZtneXWrEMlcVtrwuxzFH6bWgTx+O3qx7SttIVRqy0dLhd3pB7mNYSUcxrhV4AmgJ0+O6+RKguxNRve186Yb7mqB0UGiKyN9I9V+hlE3vhry8gmRm7BcImxhOWL5mq2xhyOa6Jun/2dpcUTSOZue55rhGbsJBxOPRcnZ9sxl3Z4tOGQEY+oVNUA2Xz4wtFd7TrPXf5/52ur4S5bxZmYy+VKNHfQzv7lWtxMf6wt1uoCxX8dXUUyms2oh4zhEm0/FRC41I32tPSki78Ciy2KAQS7FAFGyiL/x58fyBRRFBQRVyQD9yaMZT6Y=" + # Necessary when the build has more than one job (Travis uses one job per Python version). + # With this setting only the first job upload the package to pypi, preventing the others from failing. + skip_existing: true + on: + tags: true + branch: master \ No newline at end of file diff --git a/AUTHORS.md b/AUTHORS.md new file mode 100644 index 0000000..c4a32ac --- /dev/null +++ b/AUTHORS.md @@ -0,0 +1,21 @@ +Tamarco is written and maintained by the System73 Engineering Team. + +# Core Team + +* Alexis Lopez +* Ayoze Fernández @ay0o +* Elena Curbelo @ecurbelo +* Jesus Rubio @barrenao +* José Melero @jmf-mordis +* Victor Pérez @ivictorpd + + +# Previous members of the core team + +* Raul Marrero @rulox +* Sara Báez @Syrka +* Sergio Medina, main design of the framework @lumasepa + + +# Contributors + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..4c4f2bb --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Changelog + +### 0.1.0 + +* Public release diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..366f8d7 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at [opensource@system73.com](opensource@system73.com). +All complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..9b56690 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,41 @@ +# Contribution guide +Welcome to the project!! First of all we want to thank you, we would like to have new collaborators and contributions. + +This project is governed by the Tamarco [Code of Conduct](https://github.com/system73/tamarco/blob/master/CODE_OF_CONDUCT.md) and we expect that all our members follow them. + +## Your first contribution + +There are so many ways to help, improve the documentation, write tutorials or examples, improve the docstrings, make +tests, report bugs, etc. + +You can take a look at the tickets with the tag `good first issue`. + +## Running tests and linters + +All the contributions must have at least unit tests. + +Make sure that the test are in the correct place. We have separated the tests in two categories, the unit tests +(`test/unit`) and the functional tests (`test/functional`). Inside each folder the test should follow the same structure +than the main package. For example, a unit test of `tamarco/core/microservice.py` should be placed in +`tests/unit/core/test_microservice.py`. + +Functional tests are considered those that do some kind of I/O, such as those that need third party services (AMQP, +Kafka, Postgres, ...), open servers (http and websocket resource), manage files or wait for an event. The goal is +maintain unit tests that can be passed quickly during development. + +Most of the functional tests need docker and docker-compose installed in the system to use some third party services. + +Before summit a pull request, please check that all the tests and linters are passing. + +``` +make test +make linters +``` + +## Code review process + +The project maintainers will leave the feedback. + +* You need at least two approvals from core developers. +* The tests and linters should pass in the CI. +* The code must have at least the 80% of coverage. diff --git a/LICENCE b/LICENCE new file mode 100644 index 0000000..8f4375e --- /dev/null +++ b/LICENCE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019-present System73 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..4765b5e --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,12 @@ + +include HISTORY.md +include README.md + +recursive-include tests * +recursive-include tamarco * +recursive-include examples * +recursive-include docs * +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] + +recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..1b3e335 --- /dev/null +++ b/Makefile @@ -0,0 +1,91 @@ +.PHONY: clean clean-test clean-pyc clean-build docs help +.DEFAULT_GOAL := help +define BROWSER_PYSCRIPT +import os, webbrowser, sys +try: + from urllib import pathname2url +except: + from urllib.request import pathname2url + +webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) +endef +export BROWSER_PYSCRIPT + +BROWSER := python -c "$$BROWSER_PYSCRIPT" + +help: Makefile + @echo + @echo " Choose a command run in "$(PROJECT_NAME)":" + @echo + @sed -n 's/^##//p' $< | column -t -s ':' | sed -e 's/^/ /' + @echo + +## clean: remove all build, test, coverage and Python artifacts +clean: clean-build clean-pyc clean-test + +## clean-build: remove build artifacts +clean-build: + rm -fr build/ + rm -fr dist/ + rm -fr .eggs/ + find . -name '*.egg-info' -exec rm -fr {} + + find . -name '*.egg' -exec rm -f {} + + + ## clean-pyc: remove Python file artifacts +clean-pyc: + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + find . -name '__pycache__' -exec rm -fr {} + + + ## clean-test: remove tes: and coverage artifacts +clean-test: + rm -fr .tox/ + rm -f .coverage + rm -fr htmlcov/ + rm -fr reports/ + rm -fr .pytest_cache + rm -fr .cache + rm -fr .coverage.* + +## setup-test: setup the current environment to run the tests +setup-test: clean + mkdir -p reports + +## test: run the tests +test: setup-test + pytest -vv --junit-xml=reports/test.xml + +## test-coverage: run the tests with coverage +test-coverage: setup-test + coverage run -m pytest tests -vv --junit-xml=reports/test.xml + coverage combine && coverage xml && coverage html + +## linters: run flake 8 +linters: + python -m flake8 . && python -m black . --check + +## docs: generate Sphinx HTML documentation, including API docs +docs: + rm -rf docs/apidoc/ + $sphinx-apidoc -o docs/apidoc tamarco + $(MAKE) -C docs clean + $(MAKE) -C docs html + $(BROWSER) docs/_build/html/index.html + +## servedocs: compile the docs watching for changes and open the doc in the browser +servedocs: docs + watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . + +## dist: build the package +dist: clean + python setup.py sdist + ls -l dist + +## install: install the package to the active Python's site-packages +install: clean + python setup.py install + +## format-code: formats the code with a code formatter +format-code: + black . diff --git a/README.md b/README.md index e8a6ae5..dea343a 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,59 @@ -# tamarco -Python microservices framework +# Tamarco + +[![Build Status](https://travis-ci.com/System73/tamarco.svg?branch=master)](https://travis-ci.com/System73/tamarco) + +Microservices framework designed for asyncio and Python. + +## Features + +* Lifecycle management. +* Standardized settings via etcd. +* Automatic logging configuration. Support for sending logs to an ELK stack. +* Application metrics via Prometheus. +* Designed for asyncio. +* Messaging patterns. The framework comes with support for AMQP and Kafka via external resources. The AMQP resource has +implemented publish/subscribe, request/response and push/pull patterns. +* Custom encoders and decoders. +* Pluging oriented architecture. Anyone can create a new resource to add new functionality. External resources are integrated +into the framework transparently for the user. +* Graceful shutdown. + +## Resources + +The framework allows to write external resources and integrate them in the lifecycle of a microservice easily. List with + the available resources: + +* Metrics. +* Registry. +* Status. +* Profiler. +* Memory analizer. +* HTTP. +* Kafka. Not released yet. +* AMQP. Not released yet. +* Postgres. Not released yet. +* Influxdb. Not released yet. +* Redis. Not released yet. +* Websocket. Not released yet. + +Let us know if you have written a resource. + +## Documentation + +Can be built doing a `make docs`. It will be publicly available soon. + +## Examples + +There are several examples in the `examples` folder. + +To run them, install tamarco, launch the docker-compose (not necessary for all the examples) and run it. + +```python3 +pip install tamarco +docker-compose up -d +python examples/http_resource/microservice.py +``` + +## Requirements + +Support for Python >= 3.6. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..630c913 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,22 @@ +version: '2.2' + +services: + etcd: + image: "quay.io/coreos/etcd:v3.3" + container_name: "etcd" + environment: + ETCD_LISTEN_CLIENT_URLS: http://0.0.0.0:2379 + ETCD_ADVERTISE_CLIENT_URLS: http://0.0.0.0:2379 + ETCD_API_VERSION: "2" + ports: + - 2379:2379 + restart: always + + redis: + image: "grokzen/redis-cluster:4.0.9" + container_name: "redis" + restart: always + environment: + STANDALONE: "true" + ports: + - '7000-7007:7000-7007' diff --git a/docs/CODE_OF_CONDUCT.md b/docs/CODE_OF_CONDUCT.md new file mode 120000 index 0000000..0400d57 --- /dev/null +++ b/docs/CODE_OF_CONDUCT.md @@ -0,0 +1 @@ +../CODE_OF_CONDUCT.md \ No newline at end of file diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md new file mode 120000 index 0000000..44fcc63 --- /dev/null +++ b/docs/CONTRIBUTING.md @@ -0,0 +1 @@ +../CONTRIBUTING.md \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..afbd1a2 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = python -msphinx +SPHINXPROJ = Tamarco +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/README.md b/docs/README.md new file mode 120000 index 0000000..32d46ee --- /dev/null +++ b/docs/README.md @@ -0,0 +1 @@ +../README.md \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..d76f97c --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys + +sys.path.insert(0, os.path.abspath("..")) + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.imgmath", + "sphinx.ext.viewcode", + "sphinx.ext.napoleon", + "m2r", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = {".rst": "restructuredtext", ".md": "markdown"} + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "Tamarco" +copyright = "2019, System73" +author = "System73" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = "0.13.1" +# The full version, including alpha/beta/rc tags. +release = version + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = "Tamarcodoc" + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [(master_doc, "Tamarco.tex", "Tamarco Documentation", "System73", "manual")] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, "tamarco", "Tamarco Documentation", [author], 1)] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "Tamarco", + "Tamarco Documentation", + author, + "Tamarco", + "Microservices Framework that aims to accomplish the 12 Factor app", + "Miscellaneous", + ) +] diff --git a/docs/explanations/a_walk_around_the_settings.rst b/docs/explanations/a_walk_around_the_settings.rst new file mode 100644 index 0000000..4fce3d3 --- /dev/null +++ b/docs/explanations/a_walk_around_the_settings.rst @@ -0,0 +1,80 @@ +A walk around the settings +========================== + +Tamarco is an automation framework for managing the lifecycle and resources of the microservices. The +configuration has a critical role in the framework, all the other resources and components of the framework +strongly depend on the settings. + +When you have thousands of microservices running in production the way to provide the configuration of the system +becomes critical. Some desirable characteristics of a microservice settings framework are: + +#) | The configuration should be centralized. A microservice compiled in a container should be able to run in different + | environments without any change in the code. For example, the network location of a database or its credentials + | aren't going to be the same in a production environment or a staging environment. + +#) | The configuration should be able to change in runtime without restarting the microservices. For example, you should + | be able to update the configuration of your WebSocket server without close the existing connections. + +#) | The configuration should have redundancy. One of the advantages of a microservice architecture is the facility to + | obtain redundancy in your services, you should be able to run the microservices in several machines if someone + | fails, the others should be able to work correctly. Nothing of this has a sense if your services aren't able to + | read the configuration, so to take the benefits of this architectural advantage, all critical services of the + | system must be redundant as well. + +The external backend supported by this framework right now is etcd v2, we strongly recommend its use in production +with Tamarco. + +Other settings backends are available to develop: + +* Dictionary +* File based (YML or JSON) + +Settings structure +------------------ + +The settings can be configured from a simple YAML in etcd [Link of how to configure an etcd from a file]. A generic +setting could be the following: + +.. code-block:: yaml + + etcd_ready: true + system: + deploy_name: tamarco_tutorial + logging: + profile: PRODUCTION + file: false + stdout: true + resources: + amqp: + host: 172.31.0.102 + port: 5672 + vhost: / + user: guest + password: guest + connection_timeout: 10 + queues_prefix: "" + kafka: + bootstrap_servers: 172.31.0.1:9092,172.31.0.2:9092 + microservices: + http_server: + application_cache_seconds: 10 + +The etcd_ready setting is written by the etcd configuration script when it finishes configuring all the other settings. +This prevents the microservices from reading the settings before the environment is properly configured. + +All the other tamarco settings are inside a root_path named “system”. The settings under the root path are: + +* Deploy_name. Name that identifies a deploy, used by default by logging and metrics resources with the purpose of +distinct logs and metrics from different deploys. Possible use cases: allow to filter logs of deploys in different +regions or by develop, staging and production with the same monitoring system. + +* Logging: Configuration of the logging of the system, it is out of resources because this configuration can’t be +avoided since it is a core component, all the microservices and all resources emit logs. More information about the +possible configuration in [TODO link to logging section]. + +* Resources: configurations of the resources of the system, it can be used by one or more microservices. +:ref:`setup_setting_for_a_resource` + +* Microservice: configuration of the business logic of each microservice. This section also has a special property, +all the other settings can be configured by in this section for a specific microservice. +:ref:`setup_setting_for_a_specific_microservice`. diff --git a/docs/explanations/main.rst b/docs/explanations/main.rst new file mode 100644 index 0000000..c5c5982 --- /dev/null +++ b/docs/explanations/main.rst @@ -0,0 +1,8 @@ +============ +Explanations +============ + +.. include:: a_walk_around_the_settings.rst +.. include:: microservice_lifecycle.rst +.. include:: microservice_base_class.rst +.. include:: microservice_cookiecutter_template.rst diff --git a/docs/explanations/microservice_base_class.rst b/docs/explanations/microservice_base_class.rst new file mode 100644 index 0000000..7131828 --- /dev/null +++ b/docs/explanations/microservice_base_class.rst @@ -0,0 +1,48 @@ +Microservice base class +======================= + +All the microservices must inherit from the Tamarco Microservice class. Let's take a deeper look into this class. + +To launch the microservice, we use the `run` function:: + +.. code-block:: python + + from tamarco.core.microservice import Microservice + + class MyMicroservice(Microservice): + [...] + + ms = MyMicroservice() + ms.run() + + +When we run the microservice, there is a certain order in the setup of the service and then the event loop is running +until an unrecoverable error occurs, or it is stopped. + +Setup steps: + +1. Configure and load the microservice settings (and of its resources if used). +1. Configure and start the logging service. +1. Pre-start stage: run all the Tamarco resources `pre_start` methods (only the resources actually used by the +microservice). This method can be overriden if we want to do some coding in this step. But don't forget to call to the +Tamarco function too! + +.. code-block:: python + async def pre_start(self): + await super().pre_start() + [...] + +1. Start stage: run all the Tamarco resources `start` methods (only the resources actually used by the microservice). +Also collects all the task declared in the microservice (using the `@task` decorator in a method) and launch them. +Generally in this stage is when the database connections, or other services used by the resources are started. +This `start` method can be overriden if we want to do some coding in this step. But don't forget to call to the +Tamarco function too! +1. Post-start stage: run all the Tamarco resources `post_start` methods (only the resources actually used by the +microservice). This method can be overriden if we want to do some coding in this step. But don't forget to call to the +Tamarco function too! +1. Stop stage: run all the Tamarco resources `stop` methods (only the resources actually used by the microservice). +In this stage all resources and tasks are stopped. This method can be overriden if we want to do some coding in this +step. But don't forget to call to the Tamarco function too! +2. Post-stop stage: run all the Tamarco resources `post_stop` methods (only the resources actually used by the +microservice). This step is useful if you want to make some instructions when the microservice stops. This `post_stop` +method can be overriden if we want to do some coding in this step. But don't forget to call to the Tamarco function too! \ No newline at end of file diff --git a/docs/explanations/microservice_cookiecutter_template.rst b/docs/explanations/microservice_cookiecutter_template.rst new file mode 100644 index 0000000..cd39802 --- /dev/null +++ b/docs/explanations/microservice_cookiecutter_template.rst @@ -0,0 +1,54 @@ +Microservice cookicutter template +================================= + +When you install the tamarco python package is available a _tamarco_ command. Calling this command you can create a new +microservice skeleton answering before a few questions:: + + $ tamarco start_project + +1. Project name: project name. In the same directory when you execute the tamarco command the script will create a +folder with this name and all the initial files insite it. Used also in the docs and README files. +1. Project slug: project short name. Inside of the project name folder, a folder with this name is created and all the +microservice logic code should be here. Used also in the docs files. +1. Full name: author's full name. Used in the docs files. +1. Email: author's email. Used in the docs files. +1. Version: initial project version. It will be copied to the setup.cfg file. +1. Project short description: this text will be in the initial README file created. + + +The project skeleton will be:: + + + | + |- docs (folder with the files to generate Sphinx documentation) + | + |- tests (here will be store the microservice tests) + | + |- + | | + | |- logic (microservice business logic code) + | | + | |- resources (code related with the microservice resources: databases, ...) + | | + | |- meters.py (application meters: prometheus, ...) + | | + | |- microservice.py (microservice class inherited from Tamarco Microservice class) + | + |- .coveragerc (coverage configuration file) + | + |- .gitignore + | + |- app.py (entrypoint file for the microservice) + | + |- Dockerfile + | + |- HISTORY.md + | + |- Makefile (run the tests, generate docs, create virtual environments, install requirements, ...) + | + |- README.md + | + |- requirements.txt + | + |- setup.cfg (several python packages configurations: bumpversion, flake8, pytest, ...) + | \ No newline at end of file diff --git a/docs/explanations/microservice_lifecycle.rst b/docs/explanations/microservice_lifecycle.rst new file mode 100644 index 0000000..af90fd6 --- /dev/null +++ b/docs/explanations/microservice_lifecycle.rst @@ -0,0 +1,132 @@ +Microservice lifecycle +====================== + +Start +----- + +When the microservice is initialized, the following steps are performed, automatically: + +#. | Start provisional logging with default parameters. Needed in case of some error before being able to read the final + | logging configuration from the settings. +#. | Initialize the settings. All the other resources of the framework depend on being able to read the centralized + | configuration. +#. | Initialize the logging with the proper settings. With the settings available, the next step is to be sure that all + | the resources can send proper log messages in case of failure before starting them. +#. | Call the pre_start of the microservice, that triggers the pre_start of the microservices. Operations that need to + | be performed before starting the microservice. For example, a HTTP server could need to render some templates before + | start the server. It is not advisable to perform I/O operations in the pre_start statement. +#. | Call the start of the microservice, they are going to start all the resources. In the start statement the resources + | are expected to perform the initial I/O operations, start a server, connect to a database, etc. +#. | Call the post_start of the microservice, it is going to call the post_start of all the resources. In this step all + | the resources should be working normally because they should be started in the previous step. + +Tamarco builds a dependency graph of the order in that the resources should be initialized. + +Status of a resource +-------------------- + +All the resources should report their state, it can be one of the followings: + +#. NOT_STARTED +#. CONNECTING +#. STARTED +#. STOPPING +#. STOPPED +#. FAILED + +The status of all the resources are exposed via an HTTP API and used by the default restart policies to detect when a +resource is failing. + +Resource restart policies +------------------------- + +The status resources come by default with the microservice and their responsibility is to apply the +restart policies of the microservice and report the state of the resources via an HTTP API. + +There are two settings to control automatically that a resource should do when it has a FAILED status: + +.. code-block:: yaml + + system: + resources: + status: + restart_policy: + resources: + restart_microservice_on_failure: ['redis'] + restart_resource_on_failure: ['kafka'] + +Where the microservice is identified by the name of the resource instance in the microservice class. + +Keep in mind that the most recommended way is not to use these restart policies and implement a circuit breaker in each +resource. But sometimes you could want a simpler solution and in some cases, the default restart policies can be an +acceptable way to go. + +Stop +---- + +The shut down of a microservice can be triggered by a restart policy (restart_microservice_on_failure), by a system +signal, by a resource (not recommended, a resource shouldn't have the responsibility of stopping a service) or by +business code. + +A service only should be stopped calling the method `stop_gracefully` of the microservice instance. + +The shut down is performed doing the following steps: + +#. Call stop() method of the microservice, it is going to call the stop() of all the resources. +#. Call post_stop() method of the microservice, it is going to call the post_stop() method of all the resources. +#. | The exit is going to be forced after 30 seconds if the microservice didn't finish the shut down in this time or + | some resource raises an exception stopping the service. + + +Overwrite lifecycle methods +--------------------------- + +The lifecycle methods are designed to be overwritten by the user, allowing to execute code at a certain point of the +lifecycle. Just take into account that these methods are asynchronous and that the `super()` method should be called. + +The available methods are: + +* pre_start +* start +* post_start +* stop +* post_stop + +.. code-block:: python + + from tamarco import Microservice + + class LifecycleMicroservice(Microservice): + + async def pre_start(self): + print("Before pre_start of the service") + await super().pre_start() + print("After pre_start of the service") + + async def start(self): + print("Before start of the service") + await super().start() + print("After start of the service") + + async def post_start(self): + print("Before post_start of the service") + await super().start() + print("After post_start of the service") + + async def stop(self): + print("Before stop of the service") + await super().stop() + print("After stop of the service") + + async def post_stop(self): + print("Before post_stop of the service") + await super().stop() + print("After post_stop of the service") + + + def main(): + microservice = LifecycleMicroservice(Microservice) + microservice.run() + + def __name__ == '__main__': + main() diff --git a/docs/how_to_guides/install_tamarco.rst b/docs/how_to_guides/install_tamarco.rst new file mode 100644 index 0000000..f232f3c --- /dev/null +++ b/docs/how_to_guides/install_tamarco.rst @@ -0,0 +1,8 @@ +How to install Tamarco +====================== + +Tamarco is compatible with Python >= 3.6. Recommended version is Python 3.7. + +To install Tamarco, simply run this command in your terminal of choice:: + + $ pip3 install tamarco diff --git a/docs/how_to_guides/main.rst b/docs/how_to_guides/main.rst new file mode 100644 index 0000000..1df3d8d --- /dev/null +++ b/docs/how_to_guides/main.rst @@ -0,0 +1,13 @@ +============= +How-To Guides +============= + +.. include:: install_tamarco.rst +.. include:: make_the_docs.rst +.. include:: setup_a_logging_backend.rst +.. include:: setup_a_metrics_backend.rst +.. include:: setup_a_setting_backend.rst +.. include:: setup_settings_for_a_specific_microservice.rst +.. include:: setup_settings_for_a_resource.rst +.. include:: use_logging_resource.rst +.. include:: use_metrics_resource.rst diff --git a/docs/how_to_guides/make_the_docs.rst b/docs/how_to_guides/make_the_docs.rst new file mode 100644 index 0000000..e45aea6 --- /dev/null +++ b/docs/how_to_guides/make_the_docs.rst @@ -0,0 +1,9 @@ +How to make doc +=============== + + +```bash +$ make docs +``` + +The documentation is generated in `docs/_build/html/`. \ No newline at end of file diff --git a/docs/how_to_guides/setup_a_logging_backend.rst b/docs/how_to_guides/setup_a_logging_backend.rst new file mode 100644 index 0000000..f8916c2 --- /dev/null +++ b/docs/how_to_guides/setup_a_logging_backend.rst @@ -0,0 +1,109 @@ +How to setup the logging +======================== + +The profile +----------- + +Two different profiles are allowed: + +* DEVELOP. The logging level is set to debug. +* PRODUCTION. The logging level is set to info. + +The profile setting needs to be in capital letters. + +.. code-block:: yaml + + system: + logging: + profile: + + +Stdout +------ + +The logging by stdout can be enabled or disabled: + +It comes with the + +.. code-block:: yaml + + system: + logging: + stdout: true + + +File handler +------------ + +Write all logs in files with a `RotatingFileHandler`. It is enabled +when the system/logging/file_path exits, saving the logs in the specified location. + +.. code-block:: yaml + + system: + logging: + file_path: + + +Logstash +-------- + +Logstash is the log collector used by Tamarco, it collects, processes, enriches and unifies all the logs sent by different +components of an infrastructure. Logstash supports multiple choices for the log ingestion, we support three of them +simply by activating the corresponding settings: + + +Logstash UDP handler +```````````````````` + +Send logs to Logstash using a raw UDP socket. + +.. code-block:: yaml + + system: + logging: + logstash: + enabled: true + host: 127.0.0.1 + port: 5044 + fqdn: false + version: 1 + + +Logstash Redis handler +`````````````````````` + +Send logs to Logstash using the Redis pubsub pattern. + +.. code-block:: yaml + + system: + logging: + redis: + enabled: true + host: 127.0.0.1 + port: 6379 + password: my_password + ssl: false + + +Logstash HTTP handler +````````````````````` + +Send logs to Logstash using HTTP requests. + + +.. code-block:: yaml + + system: + logging: + http: + enabled: true + url: http://127.0.0.1 + user: + password: + max_time_seconds: 15 + max_records: 100 + +The logs are sent in bulk, the max_time_seconds is the maximum time without sending the logs, the max_records configures + the maximum number of logs in a single HTTP request (The first condition triggers the request). diff --git a/docs/how_to_guides/setup_a_metrics_backend.rst b/docs/how_to_guides/setup_a_metrics_backend.rst new file mode 100644 index 0000000..514c79f --- /dev/null +++ b/docs/how_to_guides/setup_a_metrics_backend.rst @@ -0,0 +1,109 @@ +.. _setup_a_metric_backend: + + +How to setup a metric backend +============================= + +The Microservice class comes by default with the metrics resource, this means that the microservice is going to read +the configuration without any explicit code in your microservice. + + +Prometheus +---------- + +Prometheus, unlike other metric backends, follows a pull-based (over HTTP) architecture at the metric collection. It +means that the microservices just have the responsibility of exposing the metrics via an HTTP server and Prometheus +collects the metrics requesting them to the microservices. + +It is the supported metric backend with a more active development right now. + +The metrics resource uses other resource named tamarco_http_report_server, that it is an HTTP server, to expose the +application metrics. The metrics always are exposed to the `/metrics` endpoint. To expose the Prometheus metrics the +microservices should be configured as follows: + +.. code-block:: yaml + + system: + resources: + metrics: + collect_frequency: 10 + handlers: + prometheus: + enabled: true + tamarco_http_report_server: + host: 127.0.0.1 + port: 5747 + +With this configuration, a microservice is going to expose the Prometheus metrics at http://127.0.0.1:5747/metrics. + +The collect frequency defines the update period in seconds of the metrics in the HTTP server. + +The microservice name is automatically added as metric suffix to the name of the metrics. Example: A summary named +http_response_time in a microservice named billing_api is going to be named billing_api_http_response_time in the +exposed metrics. + + +Carbon +------ + +Only the plaintext protocol sent directly via a TCP socket is supported. + +To configure a carbon handler: + +.. code-block:: yaml + + system: + resources: + metrics: + handlers: + carbon: + enabled: true + host: 127.0.0.1 + port: 2003 + collect_frequency: 15 + +The collect frequency defines the period in seconds where the metrics are collected and sent to carbon. + + +File +---- + +It is an extension of the carbon handler, instead of sending the metrics to carbon it just appends the metrics to a +file. The format is the following: ` `. + +To configure the file handler: + +.. code-block:: yaml + + system: + resources: + metrics: + handlers: + file: + enabled: true + path: /tmp/tamarco_metrics + collect_frequency: 15 + +The collect frequency defines the period in seconds where the metrics are collected and written to a file. + + +Stdout +------ + +It is an extension of the carbon handler, instead of sending the metrics to carbon it just writes the metrics in the +stdout. The format is the following: ` `. + +To configure the file handler: + +.. code-block:: yaml + + system: + resources: + metrics: + handlers: + stdout: + enabled: true + collect_frequency: 15 + + +The collect frequency defines the period in seconds where the metrics are collected and written to a file. diff --git a/docs/how_to_guides/setup_a_setting_backend.rst b/docs/how_to_guides/setup_a_setting_backend.rst new file mode 100644 index 0000000..977e09d --- /dev/null +++ b/docs/how_to_guides/setup_a_setting_backend.rst @@ -0,0 +1,120 @@ +.. _setup_a_setting_backend: + +How to setup a setting backend +============================== + +There are some ways to set up the settings, etcd is the recommended backend for a centralized configuration. The YML and +file and dictionary are useful for development. + +etcd +---- + +etcd is the recommended backend for a centralized configuration. All the configuration of the system can be in etcd, +but before being able to read it, we should specify to the microservices how to access an etcd. + +The following environment variables need to be properly configured to use etcd: + +* TAMARCO_ETCD_HOST: Needed to setup the etcd as setting backend. +* TAMARCO_ETCD_PORT: Optional variable, by default is 2379. +* ETCD_CHECK_KEY: Optional variable, if set the microservice waits until the specified etcd key exits to initialize. +Avoids race conditions between the etcd and microservices initialization. Useful in orchestrators such docker-swarm +where dependencies between components cannot be easily specified. + +YML file +-------- + +For enable the feature, the following environment variable must be set: + +* TAMARCO_YML_FILE: Example: 'settings.yml'. Example of a YML file with the system configuration: + +.. code-block:: yaml + + system: + deploy_name: test_tamarco + logging: + profile: DEVELOP + file: false + stdout: true + redis: + enabled: false + host: "127.0.0.1" + port: 7006 + password: '' + ssl: false + microservices: + test: + logging: + profile: DEVELOP + file: false + stdout: true + resources: + metrics: + collect_frequency: 15 + status: + host: 127.0.0.1 + port: 5747 + debug: False + amqp: + host: 127.0.0.1 + port: 5672 + vhost: / + user: microservice + password: 1234 + connection_timeout: 10 + queues_prefix: "prefix" + +Dictionary +---------- + +It is possible to load the configuration from a dictionary: + + +.. code-block:: python + + import asyncio + + from sanic.response import text + + from tamarco.core.microservice import Microservice, MicroserviceContext, thread + from tamarco.resources.io.http.resource import HTTPClientResource, HTTPServerResource + + + class HTTPMicroservice(Microservice): + name = 'settings_from_dictionary' + http_server = HTTPServerResource() + + def __init__(self): + super().__init__() + self.settings.update_internal({ + 'system': { + 'deploy_name': 'settings_documentation', + 'logging': { + 'profile': 'PRODUCTION', + }, + 'resources': { + 'http_server': { + 'host': '127.0.0.1', + 'port': 8080, + 'debug': True + } + } + } + }) + + + ms = HTTPMicroservice() + + + @ms.http_server.app.route('/') + async def index(request): + print('Requested /') + return text('Hello world!') + + + def main(): + ms.run() + + + if __name__ == '__main__': + main() + diff --git a/docs/how_to_guides/setup_settings_for_a_resource.rst b/docs/how_to_guides/setup_settings_for_a_resource.rst new file mode 100644 index 0000000..c357ed5 --- /dev/null +++ b/docs/how_to_guides/setup_settings_for_a_resource.rst @@ -0,0 +1,49 @@ +.. _setup_setting_for_a_resource: + +How to setup settings for a resource +==================================== + +The resources are designed to automatically load their configuration using the setting resource. + +The resources should be defined as an attribute of the microservice class: + +.. code-block:: python + + class MyMicroservice(Microservice): + name = 'settings_from_dictionary' + + recommendation_http_api = HTTPServerResource() + billing_http_api = HTTPServerResource() + + def __init__(self): + super().__init__() + self.settings.update_internal({ + 'system': { + 'deploy_name': 'settings_documentation', + 'logging': { + 'profile': 'PRODUCTION', + }, + 'resources': { + 'recommendation_http_api': { + 'host': '127.0.0.1', + 'port': 8080, + 'debug': True + }, + 'billing_http_api': { + 'host': '127.0.0.1', + 'port': 9090, + 'debug': False + } + } + } + }) + +The resources load their configuration based on the name of the attribute used to bind the resource to the microservice. +In the example, we have two HTTPServerResource in the same microservice and each one uses a different configuration. + +The HTTPServerResource recommendations_api variable is going to find its configuration in the path +'system.resources.recommendation_api'. + +You must be cautious about choosing the name when the instances are created. If several microservices use the same +database, the name of the resource instance in the microservice must be the same in all microservices to load the same +configuration. diff --git a/docs/how_to_guides/setup_settings_for_a_specific_microservice.rst b/docs/how_to_guides/setup_settings_for_a_specific_microservice.rst new file mode 100644 index 0000000..72f011c --- /dev/null +++ b/docs/how_to_guides/setup_settings_for_a_specific_microservice.rst @@ -0,0 +1,32 @@ +.. _setup_setting_for_a_specific_microservice: + +How to setup settings for a specific microservice +================================================= + +The settings under `system.microservice..` +overrides the general settings of `system.` in the microservice +named . + +In the following example, the microservice dog is going to read the logging profile "DEVELOP" and +the other microservices are going to stay in the logging profile "PRODUCTION": + +.. code-block:: yaml + + system: + deploy_name: tamarco_doc + logging: + profile: PRODUCTION + file: false + stdout: true + microservices: + dog: + logging: + profile: DEVELOP + +The microservice name is declared when the microservice class is defined: + +.. code-block:: python + + class MicroserviceExample(Microservice): + + name = 'my_microservice_name' diff --git a/docs/how_to_guides/use_logging_resource.rst b/docs/how_to_guides/use_logging_resource.rst new file mode 100644 index 0000000..839ed94 --- /dev/null +++ b/docs/how_to_guides/use_logging_resource.rst @@ -0,0 +1,103 @@ +.. _use_the_logging_resource: + +How to use the logging resource +=============================== + +Tamarco uses the standard logging library, it only interferes doing an automatic configuration based in the settings. + +The microservice comes with a logger ready to use: + +.. code-block:: python + + import asyncio + + from tamarco.core.microservice import Microservice, task + + + class MyMicroservice(Microservice): + name = 'my_microservice_name' + + extra_loggers_names.append("my_extra_logger") + + @task + async def periodic_log(self): + logging.getlogger("my_extra_logger").info("Initializing periodic log") + while True: + await asyncio.sleep(1) + self.logger.info("Sleeping 1 second") + + if __name__ == "__main__": + ms = MyMicroservice() + ms.run() + +Also can configured more loggers adding their names to `my_extra_logger` list of the Microservice class. + +The logger bound to the microservice is the one named as the microservice, so you can get and use the logger whatever +you want: + +.. code-block:: python + + import logging + + async def http_handler(): + logger = logging.getlogger('my_microservice_name') + logger.info('Handling a HTTP request') + + +Logging exceptions +------------------ + +A very common pattern programming microservices is log exceptions. Tamarco automatically sends the exception tracing to +Logstash and print the content by stdout when the exc_info flag is active. Only works with logging lines inside an +except statement: + +.. code-block:: python + + import asyncio + + from tamarco.core.microservice import Microservice, task + + + class MyMicroservice(Microservice): + name = 'my_microservice_name' + + @task + async def periodic_exception_log(self): + while True: + try: + raise KeyError + except: + self.logger.warning("Unexpected exception.", exc_info=True) + + + if __name__ == "__main__": + ms = MyMicroservice() + ms.run() + + +Adding extra fields and tags +---------------------------- + +The fields extend the logging providing more extra information and the tags allow to filter the logs by this key. + +A common pattern is to enrich the logs with some information about the context. For example: with a request identifier +the trace can be followed by various microservices. + +This fields and tags are automatically sent to Logstash when it is configured. + + +.. code-block:: python + + logger.info("logger line", extra={'tags': {'tag': 'tag_value'}, 'extra_field': 'extra_field_value'}) + + +Default logger fields +--------------------- + +Automatically some extra fields are added to the logging. + +* `deploy_name`: deploy name configured in `system/deploy_name`, it allows to distinguish logs of different deploys, +for example between staging, develop and production environments. +* `levelname`: log level configured currently in the Microservice. +* `logger`: logger name used when the logger is declared. +* `service_name`: service name declared in the Microservice. diff --git a/docs/how_to_guides/use_metrics_resource.rst b/docs/how_to_guides/use_metrics_resource.rst new file mode 100644 index 0000000..b2bcb70 --- /dev/null +++ b/docs/how_to_guides/use_metrics_resource.rst @@ -0,0 +1,114 @@ +.. _use_metrics_resource: + + +How to use metrics resource +=========================== + +All Tamarco meters implement the Flyweight pattern, this means that no matter where you instantiate the meter if two +or more meters have the same characteristics they are going to be the same object. You don't need to be careful about +using the same object in multiple places. + + + +Counter +------- + +A counter is a cumulative metric that represents a single numerical value that only goes up. The counter is reseated +when the server restart. A counter can be used to count requests served, events, tasks completed, errors occurred, etc. + +.. code-block:: python + + cats_counter = Counter('cats', 'animals') + meows_counter = Counter('meows', 'sounds') + jumps_counter = Counter('jumps', 'actions') + + class Cat: + + def __init__(self): + cats_counter.inc() + + # It can work as a decorator, every time a function is called, the counter is increased in one. + @meows_counter + def meow(self): + print('meow') + + # Similarly it can be used as a decorator of coroutines. + @jumps_counter + async def jump(self): + print("jump") + +Gauge +----- + +A gauge is a metric that represents a single numerical value. Unlike the counter, it can go down. Gauges are typically +used for measured values like temperatures, current memory usage, coroutines, CPU usage, etc. You need to take into +account that this kind of data only save the last value when it is reported. + +It is used similarly to the counter, a simple example: + +.. code-block:: python + + ws_connections_metric = Gauge("websocket_connections", "connections") + + class WebSocketServer: + + @ws_connections_metric + def on_open(self): + ... + + def on_close(self): + ws_connections_metric.dec() + ... + + +Summary +------- + +A summary samples observations over sliding windows of time and provides instantaneous insight into their distributions, +frequencies, and sums). They are typically used to get feedback about quantities where the distribution of the data is +important, as the processing times. + +The default quantiles are: [0.5, 0.75, 0.9, 0.95, 0.99]. + +Timer +----- + +Gauge and Summary can be used as timers. The timer admits to be used as a context manager and as a decorator: + +.. code-block:: python + + request_processing_time = Summary("http_requests_processing_time", "time") + + @request_processing_time.timeit() + def http_handler(request): + ... + + +.. code-block:: python + + import time + + my_task_processing_time_gauge = Gauge("my_task_processing_time", "time") + + with my_task_processing_time_gauge.timeit() + my_task() + + + +Labels +------ + +The metrics admit labels to attach additional information in a counter. For example, the status code of an HTTP response +can be used as a label to monitoring the amount of failed requests. + +A meter with labels: + +.. code-block:: python + + http_requests_ok = Counter('http_requests', 'requests', labels={'status_code': 200}) + + def http_request_ping(request): + http_requests_ok.inc() + ... + +To add a label to an already existent meter: diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..19c4b49 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,20 @@ +Welcome to Tamarco's documentation! +=================================== + +.. toctree:: + + README + tutorials/main + how_to_guides/main + explanations/main + reference/main + develop/main + CONTRIBUTING + CODE_OF_CONDUCT + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/reference/main.rst b/docs/reference/main.rst new file mode 100644 index 0000000..a6c7f73 --- /dev/null +++ b/docs/reference/main.rst @@ -0,0 +1,40 @@ +Reference +========= + +---- +Core +---- + +.. automodule:: tamarco.core.microservice + :members: + +Logging +------- + +.. automodule:: tamarco.core.logging.logging + :members: + +Patterns +-------- + +.. automodule:: tamarco.core.patterns + :members: + +Settings +-------- + +.. automodule:: tamarco.core.settings.settings + :members: + +.. automodule:: tamarco.core.settings.settings_proxy + :members: + + +--------- +Resources +--------- + +.. automodule:: tamarco.resources.bases + :members: + + diff --git a/docs/tutorials/main.rst b/docs/tutorials/main.rst new file mode 100644 index 0000000..c2af86f --- /dev/null +++ b/docs/tutorials/main.rst @@ -0,0 +1,6 @@ +========= +Tutorials +========= + +.. include:: quick_start.rst +.. include:: write_your_first_microservice.rst diff --git a/docs/tutorials/quick_start.rst b/docs/tutorials/quick_start.rst new file mode 100644 index 0000000..dc7f5e3 --- /dev/null +++ b/docs/tutorials/quick_start.rst @@ -0,0 +1,28 @@ +Quick Start +=========== + +1. Install Tamarco + +To install Tamarco, simply run this command in your terminal of choice. Allowed Python versions +are Python >= 3.6. Recommended version is Python 3.7:: + + $ pip3 install tamarco + +Start a project +--------------- + +Use Tamarco to start a new project. Use the following command and fill the data requested:: + + $ tamarco start_project + +Write your microservice +----------------------- + +Start writing the microservice code inside the project folder, in `microservice.py`. + +Run the microservice +-------------------- + +Run the microservice with the following command:: + + $ python3 app.py diff --git a/docs/tutorials/write_your_first_microservice.rst b/docs/tutorials/write_your_first_microservice.rst new file mode 100644 index 0000000..7dc35ef --- /dev/null +++ b/docs/tutorials/write_your_first_microservice.rst @@ -0,0 +1,169 @@ +Write your first microservice +============================= + +In this section, we will create a simple microservice that inserts data to a Postgres table. + + +Installation +------------ + +For this example, we need the Tamarco framework and the Postgres resource plugin. Optionally, you can create +a virtual environment before installing the packages:: + + $ virtualenv virtualenv -p python3.6 + $ . virtualenv/bin/activate + $ pip3 install tamarco tamarco-postgres + + +Using Tamarco code generation +----------------------------- + +Tamarco provides the generation of a microservice skeleton using `cookiecutter +`_. templates. To use this feature, go to the path where you want to +create the microservice and type:: + + $ tamarco start_project + +This command will ask you a few questions to get a minimum service configuration and will generate the code in a new +folder named with the chosen `project_name`. The main script file is called `microservice.py` and for simplification, we +will code all our example in this file. + +More information about the microservice code generation `here <../explanations/microservice_cookiecutter_template>`_. + + +Our microservice step by step +----------------------------- + +The code generated in `microservice.py` is very simple: + +.. code-block:: python + + from tamarco.core.microservice import Microservice + + + class MyMicroservice(Microservice): + name = "my_awesome_project_name" + + + def main(): + ms = MyMicroservice() + ms.run() + +In the previous code, we can see that our service inherits from the Tamarco base class `Microservice`. This class will +be the base of all the microservices and it is responsible for starting all the resources and at the same time stop all +the resources properly when the microservice exits. It has several execution stages in its lifecycle, more information +`here <../explanations/microservice_base_class>`_. + +The next step is to declare the Postgres resource we want to use: + +.. code-block:: python + + from tamarco.core.microservice import Microservice + from tamarco-postgres import PostgresClientResource + + + class MyMicroservice(Microservice): + name = "my_awesome_project_name" + postgres = PostgresClientResource() + + +In a production environment, we normally get the service settings/configuration from a storage service like etcd, but +to simplify, now we set the required configuration using an internal function. More info about the Tamarco settings +`here <../explanations/a_walk_around_the_settings>`_. + + +.. code-block:: python + + from tamarco.core.microservice import Microservice + from tamarco-postgres import PostgresClientResource + + class MyMicroservice(Microservice): + name = "my_awesome_project_name" + postgres = PostgresClientResource() + + def __init__(self): + super().__init__() + self.settings.update_internal({ + "system": { + "deploy_name": "my_first_microservice", + "logging": { + "profile": "DEVELOP", + }, + "resources": { + "postgres": { + "host": "127.0.0.1", + "port": 5432, + "user": "postgres" + } + } + } + }) + +Our service already knows where to connect to the database, so, we have to create the table and make the queries. +Tamarco provides a decorator (`@task`) to convert a method in an asyncio task. The task is started and stopped when +the microservice starts and stops respectively: + + +.. code-block:: python + + from tamarco.core.microservice import Microservice, task + from tamarco-postgres import PostgresClientResource + + class MyMicroservice(Microservice): + name = "my_awesome_project_name" + postgres = PostgresClientResource() + + def __init__(self): + super().__init__() + self.settings.update_internal({ + "system": { + "deploy_name": "my_first_microservice", + "logging": { + "profile": "DEVELOP", + }, + "resources": { + "postgres": { + "host": "127.0.0.1", + "port": 5432, + "user": "postgres" + } + } + } + }) + + @task + async def postgres_query(self): + create_query = ''' + CREATE TABLE my_table ( + id INT PRIMARY KEY NOT NULL, + name TEXT NOT NULL + ); + ''' + insert_query = "INSERT INTO my_table (id, name) VALUES (1, 'John Doe');" + select_query = "SELECT * FROM my_table" + + try: + await self.postgres.execute(create_query) + await self.postgres.execute(insert_query) + response = await self.postgres.fetch(select_query) + except Exception: + self.logger.exception("Error executing query") + else: + self.logger.info(f"Data: {response}") + + +NOTICE that we imported `task` from tamarco.core.microservice!! + + +Running our microservice +------------------------ + +Firstly, we need a running Postgres, so we can launch a docker container:: + + $ docker run -d -p 5432:5432 postgres + +In the root of our project, there is the service entry point: `app.py`. You can execute this file and check the result +(don't forget to activate the virtualenv if you have one):: + + $ python app.py + diff --git a/examples/http_resource/microservice.py b/examples/http_resource/microservice.py new file mode 100755 index 0000000..3435f16 --- /dev/null +++ b/examples/http_resource/microservice.py @@ -0,0 +1,62 @@ +# How to execute this example: see the "Examples" section in README.md + +import asyncio + +from sanic.response import text + +from tamarco.core.microservice import Microservice, MicroserviceContext, thread +from tamarco.resources.io.http.resource import HTTPClientResource, HTTPServerResource + + +class HTTPMicroservice(Microservice): + name = "http_example" + extra_loggers_names = {name, "asyncio", "tamarco"} + + def __init__(self): + super().__init__() + self.settings.update_internal( + { + "system": { + "deploy_name": "test", + "logging": {"profile": "PRODUCTION"}, + "resources": {"http_server": {"host": "127.0.0.1", "port": 8080, "debug": True}}, + } + } + ) + + http_server = HTTPServerResource() + + @thread + async def get_index_each_second(self): + class HTTPClientContext(MicroserviceContext): + http_client = HTTPClientResource() + + ms_context = HTTPClientContext() + ms_context.loop = asyncio.get_event_loop() + await ms_context.start() + + await asyncio.sleep(2) + self.logger.info(f"Index url: http://127.0.0.1:8080/") + + while True: + async with ms_context.http_client.session.get("http://127.0.0.1:8080/") as response: + assert response.status == 200 + assert await response.text() == "Hello world!" + await asyncio.sleep(1) + + +ms = HTTPMicroservice() + + +@ms.http_server.app.route("/") +async def index(request): + print("Requested /") + return text("Hello world!") + + +def main(): + ms.run() + + +if __name__ == "__main__": + main() diff --git a/examples/metrics_resource/microservice.py b/examples/metrics_resource/microservice.py new file mode 100755 index 0000000..e9079c8 --- /dev/null +++ b/examples/metrics_resource/microservice.py @@ -0,0 +1,58 @@ +# How to execute this example: see the "Examples" section in README.md + +from time import sleep + +from tamarco.core.microservice import Microservice, task +from tamarco.resources.basic.metrics import MetersManager +from tamarco.resources.basic.metrics.meters import Counter, Timer +from tamarco.resources.basic.metrics.reporters import FileHandler, StdoutHandler + + +class MetricsMicroservice(Microservice): + name = "metrics_example" + extra_loggers_names = {name, "asyncio", "tamarco"} + + def __init__(self): + super().__init__() + self.settings.update_internal({"system": {"deploy_name": "test", "logging": {"profile": "PRODUCTION"}}}) + MetersManager.configure( + { + "handlers": [ + {"handler": StdoutHandler, "metric_prefix": "metric"}, + {"handler": FileHandler, "file_path": "/tmp/metrics"}, + ], + "collect_period": 2, + } + ) + + MetersManager.add_handler(FileHandler("/tmp/metrics")) + MetersManager.add_handler(StdoutHandler(metric_prefix="metric")) + MetersManager.configure({"collect_period": 2}) + + @Timer(callback=lambda time: print(f"The elapsed time is {time}")) + @task + async def time_me(self): + sleep(1) + + with Timer(callback=lambda time: print(f"The inner elapsed time is {time}")): + sleep(2) + + @task + async def count_cows(self): + cows_counter = Counter("cows_counter", "cow") + cows_counter.inc() + cows_counter.inc() + + @Counter("request_count", "request") + @task + async def count_requests(self): + pass + + +def main(): + ms = MetricsMicroservice() + ms.run() + + +if __name__ == "__main__": + main() diff --git a/examples/metrics_resource/prometheus_example.py b/examples/metrics_resource/prometheus_example.py new file mode 100755 index 0000000..d10c929 --- /dev/null +++ b/examples/metrics_resource/prometheus_example.py @@ -0,0 +1,54 @@ +# How to execute this example: see the "Examples" section in README.md + +import asyncio +from random import random + +from tamarco.core.microservice import Microservice, task +from tamarco.resources.basic.metrics.meters import Counter, Summary + + +class MetricsMicroservice(Microservice): + name = "metrics_example" + extra_loggers_names = {name, "asyncio", "tamarco"} + + def __init__(self): + super().__init__() + self.settings.update_internal( + { + "system": { + "deploy_name": "test", + "logging": {"profile": "PRODUCTION"}, + "resources": { + "metrics": {"handlers": {"prometheus": {"enabled": True}}, "collect_frequency": 1}, + "tamarco_http_report_server": {"host": "127.0.0.1", "port": 5747, "debug": False}, + }, + } + } + ) + + async def post_start(self): + await super().post_start() + self.logger.info("See Prometheus results in: http://localhost:5747/metrics") + + @task + async def count_me(self): + counter_task = Counter("tasks", "calls") + while True: + counter_task.inc() + await asyncio.sleep(0.5) + + @task + async def summary(self): + summary_test = Summary("summary", "calls") + while True: + summary_test.observe(random()) + await asyncio.sleep(0.1) + + +def main(): + ms = MetricsMicroservice() + ms.run() + + +if __name__ == "__main__": + main() diff --git a/examples/settings_watcher/microservice.py b/examples/settings_watcher/microservice.py new file mode 100644 index 0000000..96ba9b6 --- /dev/null +++ b/examples/settings_watcher/microservice.py @@ -0,0 +1,48 @@ +# How to execute this example: see the "Examples" section in README.md + +import asyncio + +from tamarco.core.microservice import Microservice, task +from tamarco.core.settings.backends import EtcdSettingsBackend +from tamarco.core.settings.settings import Key, Settings + + +class WatcherMicroservice(Microservice): + name = "watcher_example" + extra_loggers_names = {name, "asyncio", "tamarco"} + etcd_settings = EtcdSettingsBackend(etcd_config={"host": "127.0.0.1"}) + + def __init__(self): + super().__init__() + self.settings.update_internal({"system": {"deploy_name": "test", "logging": {"profile": "PRODUCTION"}}}) + # Settings class configuration just to simplify the example. DON'T DO IT! + settings = Settings() + settings.etcd_external = True + settings.external_backend = self.etcd_settings + + async def watcher_callback(self, key, settings): + self.logger.info(f"Watcher callback called for the key {key} with new value: {settings}") + + @task + async def change_settings(self): + await self.etcd_settings.set(Key("system.foo.cow"), "MOOOO") + try: + await self.settings.watch("system.foo.cow", self.watcher_callback) + except Exception: + self.logger.exception("Could not create the watcher") + + i = 1 + while True: + await asyncio.sleep(1) + self.logger.info(f"Changing system.foo.cow with value MOOOO_{i}") + await self.etcd_settings.set(Key("system.foo.cow"), f"MOOOO_{i}") + i += 1 + + +def main(): + ms = WatcherMicroservice() + ms.run() + + +if __name__ == "__main__": + main() diff --git a/examples/status_resource/microservice.py b/examples/status_resource/microservice.py new file mode 100755 index 0000000..16b805f --- /dev/null +++ b/examples/status_resource/microservice.py @@ -0,0 +1,57 @@ +# How to execute this example: see the "Examples" section in README.md + +import asyncio + +from tamarco.core.microservice import Microservice, MicroserviceContext, task +from tamarco.resources.io.http.resource import HTTPClientResource, HTTPServerResource + + +class HTTPClientContext(MicroserviceContext): + http_client = HTTPClientResource() + + +class StatusMicroservice(Microservice): + name = "status_example" + extra_loggers_names = {name, "asyncio", "tamarco"} + http_server = HTTPServerResource() + + def __init__(self): + super().__init__() + self.settings.update_internal( + { + "system": { + "deploy_name": "test", + "logging": {"profile": "PRODUCTION"}, + "resources": { + "tamarco_http_report_server": {"host": "127.0.0.1", "port": 5747, "debug": False}, + "http_server": {"host": "127.0.0.1", "port": 8080, "debug": True}, + }, + } + } + ) + + @task + async def get_status_each_second(self): + ms_context = HTTPClientContext() + ms_context.loop = asyncio.get_event_loop() + await ms_context.start() + await asyncio.sleep(1) + + while True: + await asyncio.sleep(1) + + async with ms_context.http_client.session.get("http://127.0.0.1:5747/status") as response: + self.logger.info("Requested http://127.0.0.1:5747/status") + assert response.status == 200 + response = await response.json() + self.logger.info(f"Response: {response}") + assert type(response) == dict + + +def main(): + ms = StatusMicroservice() + ms.run() + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..d713306 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,18 @@ +[tool.black] +line-length = 120 +exclude = ''' +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | virtualenv + | _build + | buck-out + | build + | dist + | project_template +)/ +''' diff --git a/requirements_dev.txt b/requirements_dev.txt new file mode 100644 index 0000000..24d864e --- /dev/null +++ b/requirements_dev.txt @@ -0,0 +1,42 @@ +aio-etcd==0.4.6.1 +aiohttp==3.5.4 +aiohttp-cors==0.7.0 +bumpversion==0.5.3 +cachetools==3.1.0 +cookiecutter==1.6.0 +coverage==4.5.3 +dnspython==1.15.0 +dnspython3==1.15.0 +fire==0.1.3 +flake8==3.7.7 +flake8-bugbear==19.3.0 +flake8-builtins==1.4.1 +flake8-colors==0.1.6 +flake8-comprehensions==2.1.0 +flake8-deprecated==1.3 +flake8-html==0.4.0 +flake8-import-order==0.18.1 +GitPython==2.1.11 +idna==2.8 +inflection==0.3.1 +m2r==0.2.1 +objgraph==3.4.1 +pip==19.1.1 +pyes==0.99.6 +pypandoc==1.4 +pytest==4.3.1 +pytest-asyncio==0.10.0 +pytest-timeout==1.3.3 +PyYAML==5.1 +redis==3.2.1 +sanic-plugins-framework==0.8.1 +sanic==19.3.1 +Sanic-Cors==0.9.8 +Sphinx==2.1.1 +thrift==0.11.0 +tox==3.11.1 +ujson==1.35 +urllib3==1.25.2 +watchdog==0.9.0 +wheel==0.33.4 +black==19.3b0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..865c4b9 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,89 @@ +[bumpversion] +current_version = 0.1.0 +commit = True +tag = False + +[bumpversion:file:setup.py] +search = version='{current_version}' +replace = version='{new_version}' + +[bumpversion:file:tamarco/__init__.py] +search = __version__ = '{current_version}' +replace = __version__ = '{new_version}' + +[bumpversion:file:docs/conf.py] +search = version = '{current_version}' +replace = version = '{new_version}' + +[bdist_wheel] +universal = 1 + +[flake8] +exclude = + .git, + .tox, + dist, + .pyenv-python3, + virtualenv, + venv, + __pycache__, + env, + docs, + contrib, + pycharm, + reports, + tamarco/tools/project/project_template/*, + setup.py +max-complexity = 10 +statistics = True +show-source = True +max-line-length = 120 +count = True +import-order-style = pycharm +application-import-names = tamarco,tests + +[pep8] +exclude = virtualenv,docs,env,pycharm,dist +max-line-length = 120 + +[tool:pytest] +testpaths = tests +python_files = *.py +norecursedirs = virtualenv reports docs requirements env pycharm docs examples dist project_template +addopts = --junitxml=./reports/junit.xml + --verbose + --capture=no + --ignore=setup.py + +[coverage:run] +branch = True +omit = */virtualenv/* + */env/* + */venv/* + *docs* + *examples* + */pycharm/* + */tests/* + *shared_fixtures* +parallel = True + +[coverage:report] +exclude_lines = + pragma: no cover + + def __repr__ + if self\.debug + + raise AssertionError + raise NotImplementedError + raise NotImplemented + + if 0: + if __name__ == .__main__.: +ignore_errors = True + +[coverage:xml] +output = reports/coverage.xml + +[coverage:html] +directory = reports/coverage_html diff --git a/setup.py b/setup.py new file mode 100755 index 0000000..d563c4e --- /dev/null +++ b/setup.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from setuptools import setup + +with open("README.md") as readme_file: + readme = readme_file.read() + +resources_require = { + "ci": [ + "radon==3.0.1", + "pep8==1.7.1", + "pytest==4.3.0", + "pytest-asyncio==0.10.0", + "pytest-timeout==1.3.3", + "flake8==3.7.7", + "flake8-html==0.4.0", + "flake8-deprecated==1.3", + "flake8-comprehensions==2.1.0", + "flake8-colors==0.1.6", + "flake8-builtins==1.4.1", + "flake8-bugbear==18.8.0", + "coverage==4.5.3", + "black==19.3b0", + ], + "visibility": ["prctl==1.6.1", "GitPython==2.1.11"], +} + +requirements = [ + "sanic-plugins-framework==0.8.1", + "urllib3==1.25.2", + "idna==2.8", + "sanic==19.3.1", + "cookiecutter==1.6.0", + "aiohttp==3.5.4", + "aiohttp_cors==0.7.0", + "aio_etcd==0.4.6.1", + "objgraph==3.4.1", + "pyes==0.99.6", + "thrift==0.11.0", + "redis==3.2.1", + "fire==0.1.3", + "inflection==0.3.1", + "sanic-cors==0.9.8", + "cachetools==3.1.0", + "ujson==1.35", + "dnspython==1.15.0", + "dnspython3==1.15.0", + "pyYAML==5.1", +] + +setup( + name="tamarco", + version="0.1.0", + description="Microservices Framework that aims to accomplish the 12 Factor app", + long_description=readme, + author="System73 Engineering Team", + author_email="opensource@system73.com", + url="https://github.com/System73/tamarco", + scripts=["tamarco/tamarco"], + packages=["tamarco"], + include_package_data=True, + install_requires=requirements, + extras_require=resources_require, + zip_safe=False, + keywords="tamarco", + classifiers=[ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Natural Language :: English", + "Programming Language :: Python :: 3.6", + ], + test_suite="pytests", +) diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000..2dc6451 --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,4 @@ +sonar.projectKey=tamarco +sonar.sources=./tamarco +sonar.projectName=Tamarco +sonar.python.coverage.reportPath=./reports/coverage.xml diff --git a/tamarco/__init__.py b/tamarco/__init__.py new file mode 100755 index 0000000..28bb3ba --- /dev/null +++ b/tamarco/__init__.py @@ -0,0 +1,3 @@ +__author__ = "System73 Engineering Team" +__email__ = "opensource@system73.com" +__version__ = "0.1.0" diff --git a/tamarco/codecs/__init__.py b/tamarco/codecs/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/codecs/interface.py b/tamarco/codecs/interface.py new file mode 100755 index 0000000..91c472e --- /dev/null +++ b/tamarco/codecs/interface.py @@ -0,0 +1,10 @@ +class CodecInterface: + """The interface that all Tamarco codecs must implement.""" + + @staticmethod + def encode(obj): + raise NotImplementedError + + @staticmethod + def decode(obj): + raise NotImplementedError diff --git a/tamarco/codecs/json.py b/tamarco/codecs/json.py new file mode 100755 index 0000000..1f732df --- /dev/null +++ b/tamarco/codecs/json.py @@ -0,0 +1,17 @@ +import ujson + +from tamarco.codecs.interface import CodecInterface + + +class JsonCodec(CodecInterface): + """Encodes and decode JSON objects.""" + + @staticmethod + def encode(obj): + """Converts arbitrary object recursively into a JSON.""" + return ujson.dumps(obj) + + @staticmethod + def decode(obj): + """Converts a JSON string to dict object structure.""" + return ujson.loads(obj) diff --git a/tamarco/codecs/pickle.py b/tamarco/codecs/pickle.py new file mode 100755 index 0000000..9a71ece --- /dev/null +++ b/tamarco/codecs/pickle.py @@ -0,0 +1,17 @@ +import pickle + +from tamarco.codecs.interface import CodecInterface + + +class PickleCodec(CodecInterface): + """Encodes and decode pickle objects.""" + + @staticmethod + def encode(obj): + """Return the pickled representation of obj as a bytes object.""" + return pickle.dumps(obj) + + @staticmethod + def decode(obj): + """Read a pickled object hierarchy from obj (bytes) and return the pickled representation.""" + return pickle.loads(obj) diff --git a/tamarco/codecs/yaml.py b/tamarco/codecs/yaml.py new file mode 100755 index 0000000..cd3643d --- /dev/null +++ b/tamarco/codecs/yaml.py @@ -0,0 +1,15 @@ +import yaml + +from tamarco.codecs.interface import CodecInterface + + +class YamlCodec(CodecInterface): + @staticmethod + def encode(obj): + """Serialize a python object into a YAML stream.""" + return yaml.dump(obj) + + @staticmethod + def decode(obj): + """Parse the first YAML document in a stream and produce the corresponding python object.""" + return yaml.full_load(obj) diff --git a/tamarco/core/__init__.py b/tamarco/core/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/core/dependency_resolver.py b/tamarco/core/dependency_resolver.py new file mode 100755 index 0000000..88e2e04 --- /dev/null +++ b/tamarco/core/dependency_resolver.py @@ -0,0 +1,37 @@ +class CantSolveDependencies(Exception): + pass + + +def resolve_dependency_order(dependency_graph): + """Function that return a ordered list of dependencies based in the dependency graph. + + Args: + dependency_graph (dict): Dict that represent dependency graph, + example a -> b -> c = {"a": ["b"], "b": ["c"], "c":[]}. + + Returns: + list: Ordered dependencies. + """ + ordered_deps = [] + number_of_nodes = len(dependency_graph.keys()) + solved_something = True + while len(ordered_deps) < number_of_nodes and solved_something: + solved_something = False + for node, dependencies in dependency_graph.items(): + if not dependencies and node not in ordered_deps: + _resolve_one(node, dependency_graph) + ordered_deps.append(node) + solved_something = True + + if len(ordered_deps) == number_of_nodes: + return ordered_deps + else: + raise CantSolveDependencies(f"Unsolved graph: {dependency_graph}") + + +def _resolve_one(node, dependency_graph): + for dependencies in dependency_graph.values(): + try: + dependencies.remove(node) + except Exception: + pass diff --git a/tamarco/core/logging/__init__.py b/tamarco/core/logging/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/core/logging/formatters/__init__.py b/tamarco/core/logging/formatters/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/core/logging/formatters/colored.py b/tamarco/core/logging/formatters/colored.py new file mode 100755 index 0000000..09369ec --- /dev/null +++ b/tamarco/core/logging/formatters/colored.py @@ -0,0 +1,73 @@ +import logging +from datetime import datetime + +import ujson + + +class ColoredFormatter(logging.Formatter): + """Formatter to log colored records in console.""" + + def __init__(self, color_blind=False): + """Initialize the python core logging formatter and set custom attribute. + + Args: + color_blind (bool): Change the color scheme for a higher color contrast. + """ + super().__init__() + self.color_blind = color_blind + + @staticmethod + def format_timestamp(timestamp): + """Format the time for our log record. + + Args: + timestamp (timestamp): Timestamp to format. + + Returns: + string: Datetime in ISO 8601 format (YYYY-MM-DD HH:MM:SS[.mmmmmm][+HH:MM]). + """ + return datetime.fromtimestamp(timestamp).isoformat(sep=" ") + + def format(self, record): # noqa: A003 + """Format the specified record as text. + + Args: + record (LogRecord): Logging record to format. + + Returns: + string: Log record formatted as a colored text. + """ + # https://github.com/tartley/colorama#recognised-ansi-sequences + if self.color_blind: + level_color = { + "DEBUG": "\x1b[1;37;42m DEBUG \x1b[0m ", + "INFO": "\x1b[1;30;47m INFO \x1b[0m ", + "WARNING": "\x1b[1;37;45m WARNING \x1b[0m ", + "ERROR": "\x1b[1;33;41m ERROR \x1b[0m ", + "CRITICAL": "\x1b[1;37;46m CRITICAL \x1b[0m ", + } + else: + level_color = { + "DEBUG": "\x1b[1;37;42m DEBUG \x1b[0m ", + "INFO": "\x1b[1;30;47m INFO \x1b[0m ", + "WARNING": "\x1b[1;37;43m WARNING \x1b[0m ", + "ERROR": "\x1b[1;37;41m ERROR \x1b[0m ", + "CRITICAL": "\x1b[1;37;44m CRITICAL \x1b[0m ", + } + + level = level_color[record.levelname] if record.levelname in level_color else f"[{record.levelname}] " + tags = f"{list(record.tags)} " if hasattr(record, "tags") else "" + new_record = ( + f"[{self.format_timestamp(record.created)}] {level} [({record.name}) {record.filename}:" + f"{record.lineno}] {tags}\x1b[0;33m {record.getMessage()} \x1b[0m" + ) + + if hasattr(record, "extra_msg"): + try: + new_record += f"\x1b[0;33m\n {ujson.dumps(record.extra_msg, indent=3)} \x1b[0m" + except Exception: + pass + if record.exc_info: + new_record += f"\n \x1b[0;31m {self.formatException(record.exc_info)} \x1b[0m" + + return new_record diff --git a/tamarco/core/logging/formatters/logstash.py b/tamarco/core/logging/formatters/logstash.py new file mode 100755 index 0000000..b998fcf --- /dev/null +++ b/tamarco/core/logging/formatters/logstash.py @@ -0,0 +1,235 @@ +import logging +import socket +import traceback +from datetime import datetime + +import ujson as json + + +class LogstashFormatterBase(logging.Formatter): + """Base formatter class to convert the log record in the different logstash formats.""" + + def __init__(self, message_type="Logstash", fqdn=False, service_name=None, deploy_name=None): + """Initialize the logstash base formatter class. + + Args: + message_type (str): Type field in the log record. + fqdn (bool): If True, the host field in the log record will be the fully qualified domain. Otherwise, + the system hostname. + service_name (str): Service name field in the log record. + deploy_name (str): Deploy name field in the log record. + """ + self.message_type = message_type + self.service_name = service_name + self.deploy_name = deploy_name + if fqdn: + self.host = socket.getfqdn() + else: + self.host = socket.gethostname() + self.skip_list = ( + "args", + "asctime", + "created", + "exc_info", + "exc_text", + "filename", + "funcName", + "id", + "levelname", + "levelno", + "lineno", + "module", + "msecs", + "msecs", + "message", + "msg", + "name", + "pathname", + "process", + "processName", + "relativeCreated", + "thread", + "threadName", + "extra", + "auth_token", + "password", + "tags", + ) + + self.easy_types = (str, bool, dict, float, int, list, type(None), tuple, set) + + def get_extra_fields(self, record): + """Get the fields (and its values) present in the log record that are not in the `skip_list` attribute + set in the `_init_` method. + + Args: + record (LogRecord): Log record to process. + + Returns: + dict: The keys of the dictionary are LogRecord attributes and the dict values are the string + representation of the attributes values. + """ + fields = {} + + for key, value in record.__dict__.items(): + if key not in self.skip_list: + if isinstance(value, self.easy_types): + fields[key] = value + else: + fields[key] = repr(value) + + return fields + + def get_debug_fields(self, record): + """Get the fields (and its values) present in the log record that are related with the debugging process. + + Args: + record (LogRecord): Log record to process. + + Returns: + dict: The keys of the dictionary are LogRecord attributes and the dict values are the attributes values. + """ + fields = { + "stack_trace": self.format_exception(record.exc_info), + "lineno": record.lineno, + "process": record.process, + "thread_name": record.threadName, + } + + if not getattr(record, "funcName", None): + fields["funcName"] = record.funcName + + if not getattr(record, "processName", None): + fields["processName"] = record.processName + + return fields + + @classmethod + def format_source(cls, message_type, host, path): + """Format the source field of the log record. + + Args: + message_type (str): Type of the log record message. + host (str): Hostname or the fully qualified domain where the logging call was made. + path (str): The full pathname of the source file where the logging call was made. + + Returns: + string: URI of the file where the logging call was made. + """ + return f"{message_type}://{host}/{path}" + + @classmethod + def format_timestamp(cls, time): + """Convert the timestamp passed by argument to the format: YYYY-mm-ddTHH:MM:SS.sssZ. + + Args: + time (timestamp): Timestamp to format. + + Returns: + string: Timestamp formatted with the format (YYYY-mm-ddTHH:MM:SS.sssZ). + """ + tstamp = datetime.utcfromtimestamp(time) + return f'{tstamp.strftime("%Y-%m-%dT%H:%M:%S")}.{int(tstamp.microsecond / 1000):03}Z' + + @classmethod + def format_exception(cls, exc_info): + """Concatenate the strings present in the exc_info list. + + Args: + exc_info (list): Exception information to be included in the log entry. + + Returns: + string: If exc_info is an empty list, an empty string is returned. If exc_info is not an empty list, + the string resulting from concatenating all exc_info items is returned. + """ + return "".join(traceback.format_exception(*exc_info)) if exc_info else "" + + @classmethod + def serialize(cls, message): + """Convert arbitrary object recursively into JSON. + + Args: + message (dict): Log entry information. + + Returns: + json: The log entry information in a JSON object. + """ + return json.dumps(message).encode() + + +class LogstashFormatterVersion0(LogstashFormatterBase): + """Formatter class to convert the log record into the logstash format version 0.""" + + version = 0 + + def format(self, record): # noqa: A003 + """Convert the log record into the logstash format version 0, adding extra and debug info if applicable. + + Args: + record (LogRecord): Log entry. + + Returns: + json: Log entry information in a JSON object. + """ + # Create message dict. + message = { + "@timestamp": self.format_timestamp(record.created), + "@message": record.getMessage(), + "@source": self.format_source(self.message_type, self.host, record.pathname), + "@source_host": self.host, + "@source_path": record.pathname, + "@tags": getattr(record, "tags", []), + "@type": self.message_type, + "@fields": {"levelname": record.levelname, "logger": record.name}, + } + + # Add extra fields. + message["@fields"].update(self.get_extra_fields(record)) + + if self.service_name: + message["@fields"]["service_name"] = self.service_name + + if self.deploy_name: + message["@fields"]["deploy_name"] = self.deploy_name + + # If exception, add debug info. + if record.exc_info: + message["@fields"].update(self.get_debug_fields(record)) + + return self.serialize(message) + + +class LogstashFormatterVersion1(LogstashFormatterBase): + """Formatter class to convert the log record into the logstash format version 1.""" + + def format(self, record): # noqa: A003 + """Converts the log record into the logstash format version 1, adding extra and debug info if applicable. + + Args: + record (LogRecord): Log entry. + + Returns: + json: Log entry information in a JSON object. + """ + # Create message dict. + message = { + "@timestamp": self.format_timestamp(record.created), + "@version": "1", + "message": record.getMessage(), + "host": self.host, + "path": record.pathname, + "tags": getattr(record, "tags", []), + "type": self.message_type, + # Extra Fields + "level": record.levelname, + "logger_name": record.name, + } + + # Add extra fields. + message.update(self.get_extra_fields(record)) + + # If exception, add debug info. + if record.exc_info: + message.update(self.get_debug_fields(record)) + + return self.serialize(message) diff --git a/tamarco/core/logging/formatters/syslog.py b/tamarco/core/logging/formatters/syslog.py new file mode 100755 index 0000000..921bf00 --- /dev/null +++ b/tamarco/core/logging/formatters/syslog.py @@ -0,0 +1,54 @@ +import logging +import socket +from datetime import datetime + +import ujson as json + + +class SyslogFormatter(logging.Formatter): + """Formatter to log records like syslog records.""" + + def __init__(self): + """Initialize the syslog formatter class.""" + super().__init__() + self.isoformat_sep = " " + + def format_timestamp(self, timestamp): + """Format the log record timestamp. + + Args: + timestamp (timestamp): Log record timestamp to format. + + Returns: + string: Datetime in ISO 8601 format (YYYY-MM-DD HH:MM:SS[.mmmmmm][+HH:MM]). + """ + return datetime.fromtimestamp(timestamp).isoformat(sep=self.isoformat_sep) + + def format(self, record): # noqa: A003 + """Format the specified record as text. + + Args: + record (LogRecord): Log record to format. + + Returns: + string: Log record formatted as syslog string. + """ + tags = f"[{list(record.tags)})] " if hasattr(record, "tags") else "[null] " + new_record = ( + f"[{self.format_timestamp(record.created)}] [{socket.getfqdn()}] [{record.levelname}] " + f"[({record.name}) {record.filename}:{record.lineno}] {tags}[{record.getMessage()}] " + ) + + if not hasattr(record, "extra_msg"): + new_record += "[null]" + else: + try: + # The next line only works with the first level of the dict! + extra_msg = {"json_" + str(k): str(v) for k, v in record.extra_msg.items()} + if record.exc_info: + extra_msg.update({"json_exception": self.formatException(record.exc_info)}) + new_record += f"[{json.dumps(extra_msg)}] " + except Exception: + pass + + return new_record diff --git a/tamarco/core/logging/handlers/__init__.py b/tamarco/core/logging/handlers/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/core/logging/handlers/asyncronous.py b/tamarco/core/logging/handlers/asyncronous.py new file mode 100755 index 0000000..97c0f98 --- /dev/null +++ b/tamarco/core/logging/handlers/asyncronous.py @@ -0,0 +1,64 @@ +import queue +from logging.handlers import QueueHandler, QueueListener + +from tamarco.resources.basic.metrics.meters import Counter + +MAX_QUEUE_SIZE = 10000 +TOLERANCE_PERCENTAGE = 2 +VALUE_TOLERANCE_PERCENTAGE = int(MAX_QUEUE_SIZE * (1 - (TOLERANCE_PERCENTAGE / 100))) + + +class QueueHandlerAsyncHandler(QueueHandler): + """Asynchronous version of the logging QueueHandler class.""" + + def __init__(self, log_queue): + """Initialize the Asynchronous QueueHandler class. + + Args: + log_queue (Queue): Queue class instance (python standard library). + """ + self.counter_overflow_queue = None + super().__init__(log_queue) + + def enqueue(self, record): + """If there is free space in the queue, the record is added to it. + + Args: + record (LogRecord): Entry log record. + """ + queue_size = self.queue.qsize() + if queue_size < VALUE_TOLERANCE_PERCENTAGE: + self.queue.put_nowait(record) + else: + self.counter_overflow_queue.inc() + + def prepare(self, record): + """To avoid exc_info from being deleted QueueHandler.prepare() sets exc_info to None and calls + it's own format(). + + Args: + record (Log record): Entry log record. + """ + return record + + +class AsyncWrapperHandler(QueueHandlerAsyncHandler): + """Wrapper of the asynchronous queue handler class.""" + + def __init__(self, handler, *args, **kwargs): + """The queue and its listener are initialized." + + Args: + handler (class): Handler for the logging queue. + """ + self.queue = queue.Queue(MAX_QUEUE_SIZE) + super().__init__(self.queue) + self.handler = handler(*args, **kwargs) + self.handler_name = self.handler.__class__.__name__ + self.listener = QueueListener(self.queue, self.handler) + self.counter_overflow_queue = Counter(f"counter_overflow_queue_{self.handler_name}", "messages") + self.listener.start() + + def __del__(self): + """Stop the queue listener.""" + self.listener.stop() diff --git a/tamarco/core/logging/handlers/elasticsearch.py b/tamarco/core/logging/handlers/elasticsearch.py new file mode 100755 index 0000000..12ccee0 --- /dev/null +++ b/tamarco/core/logging/handlers/elasticsearch.py @@ -0,0 +1,83 @@ +import datetime +import logging + +from pyes import ES +from pyes.exceptions import NoServerAvailable + +from tamarco.core.logging.formatters import logstash +from tamarco.core.logging.handlers.asyncronous import AsyncWrapperHandler + + +class ElasticSearchHandler(logging.Handler): + """Logging handler that sends the logs to a Elasticsearch instance.""" + + def __init__( + self, + conn_strs=None, + record_type="record", + level=logging.NOTSET, + fqdn=False, + service_name=None, + deploy_name=None, + version=0, + ): + """Initialize the handler. + + Args: + conn_strs (list): List of Elasticsearch connections strings. + record_type (str): The record type always will be 'record'. + level (str): Logging level. Default: NOTSET + fqdn (bool): If True, the host field in the log record will be the fully qualified domain. Otherwise, + the system hostname. + service_name (str): Service name. + deploy_name (str): Deploy name. + version (int): If 1 it is used the Logstash formatter version 1. Otherwise, the logstash formatter + version 0. + """ + logging.Handler.__init__(self, level=level) + self.conn_strs = conn_strs if conn_strs else ["127.0.0.1:9200"] + self.connected = False + self.conn = None + self.try_conn() + self.record_type = record_type + if version == 1: + self.formatter = logstash.LogstashFormatterVersion1(record_type, fqdn, service_name, deploy_name) + else: + self.formatter = logstash.LogstashFormatterVersion0(record_type, fqdn, service_name, deploy_name) + + def try_conn(self): + """Try a new connection to the Elasticsearch.""" + try: + self.conn = ES(self.conn_strs, timeout=5) + self.connected = True + except NoServerAvailable: + print("Error connecting to elasticsearch for logging") + + @property + def index_name(self): + """Construct the logs Elasticsearch index. + + Returns: + string: Logstash index. + """ + return "logstash-" + datetime.date.today().strftime("%Y.%m.%d") + + def emit(self, record): + """Emit the specified log record. + + Args: + record (LogRecord): Entry log to emit. + """ + entry = self.formatter.format(record) + if self.connected: + self.conn.index(entry, self.index_name, self.record_type) + else: + self.try_conn() + + +class AsyncElasticSearchHandler(AsyncWrapperHandler): + """Asynchronous version of the logging handler that sends the logs to a Elasticsearch instance.""" + + def __init__(self, *args, **kwargs): + """Initialize the asynchronous Elasticsearch handler.""" + super().__init__(ElasticSearchHandler, *args, **kwargs) diff --git a/tamarco/core/logging/handlers/http.py b/tamarco/core/logging/handlers/http.py new file mode 100644 index 0000000..68dd5cf --- /dev/null +++ b/tamarco/core/logging/handlers/http.py @@ -0,0 +1,108 @@ +import asyncio +import logging +from collections import deque + +import aiohttp +import ujson + +from tamarco.core.logging.formatters.logstash import LogstashFormatterVersion0, LogstashFormatterVersion1 + +MAX_RECORDS_STORE = 1001 + + +class HTTPSHandler(logging.Handler): + """Logging handler that sends a set of logs by HTTP with basic authentication.""" + + def __init__( + self, + url, + user=None, + password=None, + max_time_seconds=None, + max_records=None, + fqdn=False, + localname=None, + facility=None, + record_type="record", + service_name=None, + deploy_name=None, + version=0, + loop=None, + ): + """Initialize the HTTP logging handler. + + Args: + url (str): URL where the logs are sent. + user (str): HTTP session user authentication. + password (str): HTTP session password authentication. + max_time_seconds (int): The maximum seconds interval where the logs are sent. + max_records (int): If the logs queue reaches to this maximum number of logs, the set of logs are + sent although the max_time_seconds it has not been reached. + fqdn: If True, the host field in the log record will be the fully qualified domain. Otherwise, + the system hostname. + localname: + facility: + record_type (str): The record type always will be 'record'. + service_name (str): Service name.: + deploy_name (str): Deploy name.: + version (int): If 1 it is used the Logstash formatter version 1. Otherwise, the logstash formatter + version 0. + loop: Asyncio event loop. + """ + logging.Handler.__init__(self) + self.url = url + self.max_time_seconds = max_time_seconds + self.max_records = max_records + self.fqdn = fqdn + self.localname = localname + self.facility = facility + self.record_store = deque(maxlen=MAX_RECORDS_STORE) + self.loop = loop if loop else asyncio.get_event_loop() + if version == 1: + self.formatter = LogstashFormatterVersion1(record_type, fqdn, service_name, deploy_name) + else: + self.formatter = LogstashFormatterVersion0(record_type, fqdn, service_name, deploy_name) + if user and password: + self.session = aiohttp.ClientSession(auth=aiohttp.BasicAuth(login=user, password=password)) + else: + self.session = aiohttp.ClientSession() + self.periodic_send_task = asyncio.ensure_future(coro_or_future=self.periodic_send(), loop=self.loop) + + async def periodic_send(self): + """Periodic sending of the logs stored in the queue.""" + while True: + await asyncio.sleep(self.max_time_seconds, loop=self.loop) + self.send() + + def close(self): + """Closes the handler instance.""" + self.send() + self.periodic_send_task.cancel() + super().close() + + def send(self): + """HTTP POST request sending the logs stored in the queue.""" + try: + asyncio.ensure_future( + coro_or_future=self.session.post(self.url, json=list(self.record_store)), loop=self.loop + ) + except Exception: + print(f"[HTTPSHandler] Error sending logs to {self.url}") + else: + self.record_store.clear() + + def emit(self, record): + """Emit the specified log record. + + Args: + record (LogRecord): Entry log to emit. + """ + try: + payload = self.formatter.format(record) + self.record_store.append(ujson.loads(payload)) + if self.record_store and len(self.record_store) >= self.max_records: + self.send() + except (KeyboardInterrupt, SystemExit): + raise + except Exception: + self.handleError(record) diff --git a/tamarco/core/logging/handlers/logstash.py b/tamarco/core/logging/handlers/logstash.py new file mode 100755 index 0000000..14b8000 --- /dev/null +++ b/tamarco/core/logging/handlers/logstash.py @@ -0,0 +1,63 @@ +from logging.handlers import DatagramHandler + +from .asyncronous import AsyncWrapperHandler +from ..formatters import logstash + + +class UDPLogstashHandler(DatagramHandler): + """Logging handler for Logstash over UDP.""" + + def __init__( + self, host, port=5959, message_type="logstash", fqdn=False, service_name=None, deploy_name=None, version=0 + ): + """Initialize the Logstash handler. + + Args: + host (str): The host of the logstash server. + port (int): The port of the logstash server. + message_type (str): The type of the message (always be 'logstash'). + fqdn (bool): If True, the host field in the log record will be the fully qualified domain. Otherwise, + the system hostname. + service_name (str): Service name. + deploy_name (str): Deploy name. + version (int): If 1 it is used the Logstash formatter version 1. Otherwise, the logstash formatter + version 0. + """ + super().__init__(host, port) + if version == 1: + self.formatter = logstash.LogstashFormatterVersion1(message_type, fqdn, service_name, deploy_name) + else: + self.formatter = logstash.LogstashFormatterVersion0(message_type, fqdn, service_name, deploy_name) + + def makePickle(self, record): + """Convert the log record into the chosen logstash format (version 0 or 1). + + Args: + record (LogRecord): Entry log. + + Returns: + json: Log entry information in a JSON object. + """ + return self.formatter.format(record) + + +class AsyncUDPLogstashHandler(AsyncWrapperHandler): + """Asynchronous version of the logging handler that sends the logs to a Logstash instance.""" + + def __init__( + self, host, port=5959, message_type="logstash", fqdn=False, service_name=None, deploy_name=None, version=0 + ): + """Initialize the asynchronous Logstash handler. + + Args: + host (str): The host of the logstash server. + port (int): The port of the logstash server. + message_type (str): The type of the message (always be 'logstash'). + fqdn (bool): If True, the host field in the log record will be the fully qualified domain. Otherwise, + the system hostname. + service_name (str): Service name. + deploy_name (str): Deploy name. + version (int): If 1 it is used the Logstash formatter version 1. Otherwise, the logstash formatter + version 0. + """ + super().__init__(UDPLogstashHandler, host, port, message_type, fqdn, service_name, deploy_name, version) diff --git a/tamarco/core/logging/handlers/redis.py b/tamarco/core/logging/handlers/redis.py new file mode 100755 index 0000000..1292660 --- /dev/null +++ b/tamarco/core/logging/handlers/redis.py @@ -0,0 +1,110 @@ +import logging +import sys +import time +from collections import deque + +import redis +from redis.exceptions import ConnectionError, TimeoutError + +from tamarco.core.logging.formatters import logstash +from tamarco.core.logging.handlers.asyncronous import AsyncWrapperHandler + +MAX_RECORDS_STORE = 1000 +ELAPSED_ERROR_TIME = 10 + + +class RedisHandler(logging.Handler): + """Logging handler that sends the logs to a redis database.""" + + def __init__( + self, + redis_conf=None, + record_type="record", + service_name=None, + deploy_name=None, + level=logging.NOTSET, + fqdn=False, + version=0, + ): + """Initialize the handler. + + Args: + redis_conf (dict): Redis connection info, a key with the host address and other key with the + port. E.g.: {"host": "127.0.0.1", "port": 7000} + record_type (str): The record type always will be 'record'. + service_name (str): Service name. + deploy_name (str): Deploy name. + level (str): Logging level. Default: NOTSET + fqdn (bool): If True, the host field in the log record will be the fully qualified domain. Otherwise, + the system hostname. + version (int): If 1 it is used the Logstash formatter version 1. Otherwise, the logstash formatter + version 0. + """ + logging.Handler.__init__(self, level=level) + self.redis_conf = {"host": "127.0.0.1", "port": 7006} + if redis_conf: + self.redis_conf.update(redis_conf) + + self.key = self.redis_conf.get("key", "logstash") + + self.connected = False + self.conn = None + self.try_conn() + if version == 1: + self.formatter = logstash.LogstashFormatterVersion1(record_type, fqdn, service_name, deploy_name) + else: + self.formatter = logstash.LogstashFormatterVersion0(record_type, fqdn, service_name, deploy_name) + + self.record_store = deque(maxlen=MAX_RECORDS_STORE) + self.last_error_time = time.time() + + def try_conn(self): + """Try a new connection to the redis database.""" + try: + self.conn = redis.StrictRedis(**self.redis_conf) + except (ConnectionError, TimeoutError) as e: + print(f"Error connecting to redis of logger !! {e}", file=sys.stderr) + + def elapsed_time(self): + """Calculate if a certain time has passed since the last error sending the logs. Useful not to saturate + with error logs. + + Returns: + bool: True if the last error sending the logs is more that the of a certain time threshold + (ELAPSED_ERROR_TIME). False otherwise. + """ + new_error_time = time.time() + if (new_error_time - self.last_error_time) >= ELAPSED_ERROR_TIME: + self.last_error_time = new_error_time + return True + return False + + def send(self, record): + """Send the specified log record to redis and, if there are more logs entries in the queue, send them. + + Args: + record (LogRecord): Entry log. + """ + try: + self.conn.rpush(self.key, record) + for old_record in self.record_store: + self.conn.rpush(self.key, old_record) + self.record_store.clear() + except (ConnectionError, TimeoutError) as e: + if self.elapsed_time(): + print(f"Error connecting to redis of logger !! {e}", file=sys.stderr) + self.record_store.append(record) + self.try_conn() + + def emit(self, record): + """Format and sends the specified log record.""" + record = self.formatter.format(record) + self.send(record) + + +class AsyncRedisHandler(AsyncWrapperHandler): + """Asynchronous version of the logging handler that sends the logs to a redis database.""" + + def __init__(self, *args, **kwargs): + """Initialize the asynchronous redis handler.""" + super().__init__(RedisHandler, *args, **kwargs) diff --git a/tamarco/core/logging/logging.py b/tamarco/core/logging/logging.py new file mode 100755 index 0000000..23410ed --- /dev/null +++ b/tamarco/core/logging/logging.py @@ -0,0 +1,256 @@ +import logging.config +import sys + +from tamarco.core.patterns import Singleton +from tamarco.core.utils import check_connection_http_url +from .formatters.colored import ColoredFormatter +from .formatters.logstash import LogstashFormatterVersion1 +from .formatters.syslog import SyslogFormatter + +PROFILES = {"DEVELOP": {"loglevel": "DEBUG"}, "PRODUCTION": {"loglevel": "INFO"}, "TESTING": {"loglevel": "DEBUG"}} + + +class Logging(metaclass=Singleton): + """Class that handles the configuration of the standard logging of python using the microservice settings.""" + + def __init__(self): + """Initialize the Tamarco logging class with the available formatters.""" + self.color_blind = False + self.settings = None + self.microservice_name = None + self.deploy_name = None + self.logging_config = { + "version": 1, + "formatters": { + "detail": { + "format": "[%(asctime)s] [%(levelname)s] [(%(processName)s) " + "%(filename)s:%(lineno)s %(funcName)s] %(message)s" + }, + "colored": {"()": ColoredFormatter, "color_blind": self.color_blind}, + "syslog_format": {"()": SyslogFormatter}, + "logstash": {"()": LogstashFormatterVersion1}, + }, + "handlers": {}, + "loggers": {}, + } + + @staticmethod + def describe_static_settings(): + """Describe all the settings as a dictionary keys and their values are a setting short description. + These settings are the static settings needed by the class. + + Returns: + dict: Settings and their description. + """ + return { + "profile": "Profile of logging, it can be DEVELOP, TESTING or PRODUCTION", + "elasticsearch": 'List of connections string of elasticsearch. Example: ["127.0.0.1:9300"]', + "redis": "Dictionary with the redis connection parameters: host, port, password", + "logstash": "Logstash info connection: host and port", + "http": "HTTP endpoint", + "file_path": "File path where the logs are going to be stored", + "stdout": "If true there is logging through stdout", + } + + @staticmethod + def describe_dynamic_settings(): + """Describe all the class dynamic settings. + + Returns: + dict: Settings and their description. + """ + return {} + + def configure_settings(self, settings): + """Sets the settings object (a SettingsView(f"{ROOT_SETTINGS}.logging")). + + Args: + settings (SettingsInterface): Settings object that have the logging settings. + """ + self.settings = settings + + async def start(self, loggers, microservice_name, deploy_name, loop): + """Configure the standard python logging, adding handlers and loggers that uses that handlers. + + Args: + loggers (list): Names of the loggers you want to configure. + microservice_name (str): Name of the microservice that will use the logging. + deploy_name (str): Deploy name. + loop: asyncio event loop. + """ + self.microservice_name = microservice_name + self.deploy_name = deploy_name + await self.__setup_handlers(loop=loop) + await self.__setup_loggers(loggers) + logging.config.dictConfig(self.logging_config) + await self.__setup_watchers() + + async def __setup_handlers(self, loop): + """Configures the logging handlers needed by the loggers of the microservices. + + Args: + loop: asyncio event loop. + """ + await self.__setup_stdout_handler() + await self.__setup_file_handler() + await self.__setup_redis_handler() + await self.__setup_http_handler(loop) + await self.__setup_logstash_handler() + await self.__setup_elasticsearch_handler() + + async def __setup_stdout_handler(self): + """Configures the stdout logging handler.""" + if await self.settings.get("stdout", True): + self.logging_config["handlers"]["stdout"] = { + "class": "logging.StreamHandler", + "formatter": "colored", + "level": PROFILES[await self.settings.get("profile")]["loglevel"], + "stream": sys.stdout, + } + + async def __setup_file_handler(self): + """Configures the file logging handler.""" + if await self.settings.get("file_path", False): + self.logging_config["handlers"]["file"] = { + "class": "logging.handlers.RotatingFileHandler", + "formatter": "syslog_format", + "level": PROFILES[await self.settings.get("profile")]["loglevel"], + "filename": await self.settings.get("file_path"), + "maxBytes": 10485760, # 10MB + "backupCount": 100, + "encoding": "utf8", + } + + async def __setup_redis_handler(self): + """Configures the redis logging handler.""" + if await self.settings.get("redis.enabled", False): + self.logging_config["handlers"]["redis"] = { + "class": "tamarco.core.logging.handlers.redis.AsyncRedisHandler", + "level": PROFILES[await self.settings.get("profile")]["loglevel"], + "redis_conf": { + "host": await self.settings.get("redis.host"), + "password": await self.settings.get("redis.password"), + "port": await self.settings.get("redis.port"), + "ssl": await self.settings.get("redis.ssl"), + }, + "service_name": self.microservice_name, + "deploy_name": self.deploy_name, + } + + async def __setup_logstash_handler(self): + """Configures the logstash logging handler.""" + if await self.settings.get("logstash.enabled", False): + self.logging_config["handlers"]["logstash"] = { + "class": "tamarco.core.logging.handlers.logstash.AsyncUDPLogstashHandler", + "level": PROFILES[await self.settings.get("profile")]["loglevel"], + "host": await self.settings.get("logstash.host", "127.0.0.1"), + "port": await self.settings.get("logstash.port", 5959), + "fqdn": await self.settings.get("logstash.fqdn", False), + "service_name": self.microservice_name, + "deploy_name": self.deploy_name, + "version": await self.settings.get("version", 1), + } + + async def __setup_http_handler(self, loop): + """Configures the HTTP logging handler.""" + if await self.settings.get("http.enabled", False): + if await check_connection_http_url(url=await self.settings.get("http.url", ""), loop=loop): + self.logging_config["handlers"]["http"] = { + "class": "tamarco.core.logging.handlers.http.HTTPSHandler", + "level": PROFILES[await self.settings.get("profile")]["loglevel"], + "url": await self.settings.get("http.url"), + "user": await self.settings.get("http.user"), + "password": await self.settings.get("http.password"), + "max_time_seconds": await self.settings.get("http.max_time_seconds", 1), + "max_records": await self.settings.get("http.max_records", 10), + "service_name": self.microservice_name, + "deploy_name": self.deploy_name, + "loop": loop, + } + + async def __setup_elasticsearch_handler(self): + """Configures the Elasticsearch logging handler.""" + if await self.settings.get("elasticsearch.enabled", False): + self.logging_config["handlers"]["elasticsearch"] = { + "class": "tamarco.core.logging.handlers.elasticsearch.AsyncElasticSearchHandler", + "level": PROFILES[await self.settings.get("profile")]["loglevel"], + "conn_strs": await self.settings.get("elasticsearch.host"), + "service_name": self.microservice_name, + "deploy_name": self.deploy_name, + } + + async def __setup_loggers(self, loggers_names): + """Configures all the loggers needed by the microservice adding to them the handlers. + + Args + loggers_names (list): Names of the loggers you want to configure. + """ + profile = await self.settings.get("profile") + + for logger_name in loggers_names: + logger = self.__create_logger(logger_name, PROFILES[profile]["loglevel"]) + self.logging_config["loggers"].update(logger) + + def __create_logger(self, logger_name, level): + """Configures the loggers (all enable handlers, level) for the logger `logger_name` with the logging + level `level`. + + Args: + logger_name (str): Logger name. + level (str): Logging level. + + Returns: + dict: Logging configuration, set with the handlers and the logging level. + """ + logger = {"handlers": [], "level": level, "propagate": False} + + if "stdout" in self.logging_config["handlers"]: + logger["handlers"].append("stdout") + print(f"Logging handler stdout configured for {logger_name}.") + + if "elasticsearch" in self.logging_config["handlers"]: + logger["handlers"].append("elasticsearch") + print(f"Logging handler elasticsearch configured for {logger_name}.") + + if "redis" in self.logging_config["handlers"]: + logger["handlers"].append("redis") + print(f"Logging handler redis logstash configured for {logger_name}.") + + if "http" in self.logging_config["handlers"]: + logger["handlers"].append("http") + print(f"Logging handler http configured for {logger_name}.") + + if "file" in self.logging_config["handlers"]: + logger["handlers"].append("file") + print(f"Logging handler file configured for {logger_name}.") + + if "logstash" in self.logging_config["handlers"]: + logger["handlers"].append("logstash") + print(f"Logging handler logstash configured for {logger_name}.") + + return {logger_name: logger} + + async def __setup_watchers(self): + """Adds default watchers for the logging level. It adds the watcher to the system logging and + the microservice logging. + """ + + async def watcher_callback(key, setting): + """ Callback when the watcher reports that the key `key` has changed in the etcd. + + Args: + key (str): etcd key. + setting (str): The setting value already formatted. + """ + await self.settings.update_internal_settings(key, setting) + profile = await self.settings.get("profile") + + if profile in PROFILES.keys(): + new_profile = PROFILES[profile]["loglevel"] + self.logging_config["handlers"]["stdout"]["level"] = new_profile + for logger in self.logging_config["loggers"].items(): + logger[1]["level"] = new_profile + + logging.config.dictConfig(self.logging_config) + + await self.settings.watch("profile", watcher_callback) diff --git a/tamarco/core/microservice.py b/tamarco/core/microservice.py new file mode 100755 index 0000000..ece5b98 --- /dev/null +++ b/tamarco/core/microservice.py @@ -0,0 +1,418 @@ +import asyncio +import logging +import sys +import time +import uuid +from collections import OrderedDict +from collections.abc import Callable +from functools import partial +from threading import Thread +from typing import Coroutine, Union + +from tamarco.core.dependency_resolver import CantSolveDependencies, resolve_dependency_order +from tamarco.core.logging.logging import Logging +from tamarco.core.patterns import Singleton +from tamarco.core.settings.settings import Settings, SettingsView +from tamarco.core.signals import SignalsManager +from tamarco.core.tasks import TasksManager, get_task_wrapper, get_thread_wrapper +from tamarco.core.utils import Informer, ROOT_SETTINGS, get_fn_full_signature +from tamarco.resources.bases import BaseResource +from tamarco.resources.basic.metrics.resource import MetricsResource +from tamarco.resources.basic.registry.resource import Registry +from tamarco.resources.basic.status.resource import StatusResource +from tamarco.resources.debug.profiler import ProfilerResource +from tamarco.resources.io.http.resource import HTTPServerResource + +logger = logging.getLogger("tamarco") + + +class MicroserviceBase(metaclass=Singleton): + # Name of the microservice, is used by the resources + # to report a name of service. + name = None + + # Instance id of the microservice, name is shared + # among instances but the instance id is unique. + instance_id = uuid.uuid4() + + # Name of the deploy, is used by the resources + # to report a deploy name, is loaded by settings. + deploy_name = None + + # Loggers to be added by the application code. + extra_loggers_names = [] + + # Main event loop. + loop = asyncio.get_event_loop() + + # Manager for task. + tasks_manager = TasksManager() + + # Settings manager. + settings = Settings() + + # Logging manager. + logging = Logging() + + @property + def loggers_names(self): + """All loggers used by the framework. + + Returns: + list: list of loggers names used by the microservice. + """ + loggers = {"tamarco", "tamarco.tasks", "tamarco.settings", "asyncio"} + for resource in self.resources.values(): + loggers.update(resource.loggers_names) + loggers.update(self.extra_loggers_names) + loggers.update({self.name}) + return loggers + + def __new__(cls, *args, **kwargs): + cls.resources = OrderedDict() + + dependency_graph = { + attr_name: getattr(cls, attr_name).depends_on + for attr_name in dir(cls) + if isinstance(getattr(cls, attr_name), BaseResource) + } + + try: + resources_dep_ordered = resolve_dependency_order(dependency_graph) + except CantSolveDependencies as e: + print(e, file=sys.stderr) + exit(12) + else: + for name in resources_dep_ordered: + cls.resources[name] = getattr(cls, name) + + return super().__new__(cls, *args, **kwargs) + + def __init__(self): + assert self.name is not None, "Error, name should be defined in your microservice class" + self.logger = None + self._configure_provisional_logger() + + def _configure_provisional_logger(self): + """Provisional logging used before be able to read the final configuration from the settings.""" + self.logger = logging.getLogger(self.name) + stdout_handler = logging.StreamHandler(sys.stdout) + print(f"Configuring logger provisional logger of {self.name} to INFO and stdout") + self.logger.setLevel(logging.INFO) + self.logger.addHandler(stdout_handler) + self.logger.info(f"Configured {self.name} logger") + + async def bind(self): + """Call the bind function of all the resources. + It binds the resources to the microservice, allowing to the resources to identify their microservice. + """ + self.logger.info(f"Binding to microservice the resources: {list(self.resources.keys())}") + + await self.settings.bind(self.loop) + + for name, resource in self.resources.items(): + try: + await resource.bind(self, name) + except Exception: + self.logger.exception(f"Unexpected exception binding the resource {resource}") + exit(11) + + async def run_in_all_resources(self, method): + """Run the method name in all the resources. + + Args: + method (str): Method name to run in all the resources. + """ + for resource in self.resources.values(): + self.logger.debug(f"Calling {method} of resource {resource.name}") + try: + await getattr(resource, method)() + except Exception: + self.logger.exception(f"Error in {method} of resource {resource}") + else: + if method == "start": + self.logger.info(f"Started {resource.name} from {self.name}") + + async def start_logging(self): + """Initializes the logging of the microservice.""" + self.logger.info(f"Starting logging in microservice {self.name} with loggers: {self.loggers_names}") + await self.logging.start( + loggers=self.loggers_names, microservice_name=self.name, deploy_name=self.deploy_name, loop=self.loop + ) + Informer.log_all_info(self.logger) + + async def stop_settings(self): + """Stops the settings of the microservice.""" + self.logger.info("Stopping microservice settings") + await self.settings.stop() + + async def start_settings(self): + """Initializes the settings of the microservice.""" + self.logger.info("Starting microservice settings") + await self.settings.start() + self.deploy_name = await self.settings.get(f"{ROOT_SETTINGS}.deploy_name") + await self._configure_logging_settings() + await self._configure_resource_settings() + + async def _configure_logging_settings(self): + self.logger.info("Configuring logging settings") + self.logging.configure_settings(SettingsView(self.settings, f"{ROOT_SETTINGS}.logging", self.name)) + + async def _configure_resource_settings(self): + self.logger.info("Configuring resources settings") + for resource in self.resources.values(): + await resource.configure_settings( + SettingsView(self.settings, f"{ROOT_SETTINGS}.resources.{resource.name}", self.name) + ) + + def _collect_tasks(self): + for attr_name in dir(self): + attr = getattr(self, attr_name) + if hasattr(attr, "_mark_task"): + self.tasks_manager.register_task(attr._name, attr) + elif hasattr(attr, "_mark_thread"): + self.tasks_manager.register_thread(attr._name, attr) + + +class MicroserviceContext(MicroserviceBase): + """"This class is used to use tamarco resources without using a full microservice, + for example a script. + """ + + name = "microservice_context" + + async def start(self): + self.tasks_manager.set_loop(self.loop) + await self.bind() + await self.start_settings() + await self.start_logging() + await self.run_in_all_resources("pre_start") + await self.run_in_all_resources("start") + await self.run_in_all_resources("post_start") + self._collect_tasks() + self.tasks_manager.start_all() + + async def stop(self): + self.tasks_manager.stop_all() + await self.stop_settings() + await self.run_in_all_resources("stop") + await self.run_in_all_resources("post_stop") + + +class Microservice(MicroserviceBase): + """Main class of a microservice. + This class is responsible for controlling the lifecycle of the microservice, also + builds and provides the necessary elements that a resource needs to work. + + The resources of a microservice should be declared in this class. The microservice automatically takes the ownership + of all the declared resources. + """ + + # The signals manager are responsive of handling the signal_number of the system, providing a graceful stopping in + # the service when necessary. + signals_manager = SignalsManager() + + # Default http server resource. It is used by the metrics and status resource to expose information. + tamarco_http_report_server = HTTPServerResource() + + # Default metric resource. + metrics = MetricsResource() + + # Default status resource. It is responsive of apply the restart policies and expose the status of the resources + # an HTTP API. + status = StatusResource() + + # Default profiler resource. It is responsive of profile de application when the setting is activated. + profiler = ProfilerResource() + + # Default registry resource. It is responsive of maintain a etcd registry with all the alive microservice instances + # and their IPs to be used by a discovery system. + registry = Registry() + + def __init__(self): + super().__init__() + self.tasks_manager.set_loop(self.loop) + self.signals_manager.set_loop(self.loop) + + async def pre_start(self): + """Pre start stage of lifecycle. + This method can be overwritten by the user to add some logic in the start. + """ + self.logger.info("============ Pre Starting ============") + await self.run_in_all_resources("pre_start") + + async def start(self): + """Start stage of lifecycle. + This method can be overwritten by the user to add some logic in the start. + """ + self.logger.info("============ Starting ============") + await self.run_in_all_resources("start") + self._collect_tasks() + self.tasks_manager.start_all() + + async def post_start(self): + """Post start stage of lifecycle. + This method can be overwritten by the user to add some logic in the start. + """ + self.logger.info("============ Post Starting ============") + await self.run_in_all_resources("post_start") + + async def stop(self): + """Stop stage of the lifecycle. + This method can be overwritten by the user to add some logic to the shut down. + This method should close all the I/O operations opened by the resources. + """ + self.logger.info("============ Stopping ============") + await self.run_in_all_resources("stop") + await self.stop_settings() + self.tasks_manager.stop_all() + + async def post_stop(self): + """Post stop stage of the lifecycle. + This method can be overwritten by the user to add some logic to the shut down. + """ + self.logger.info("============ Post Stopping ============") + await self.run_in_all_resources("post_stop") + + async def _setup(self): + await self.bind() + await self.start_settings() + await self.start_logging() + await self.pre_start() + await self.start() + await self.post_start() + + def run(self): + """Run a microservice. + It initializes the main event loop of asyncio, so this function only are going to end when the microservice + ends its live cycle. + """ + self.logger.info(f"Running microservice {self.name}. Calling setup method") + try: + self.loop.run_until_complete(self._setup()) + self.loop.run_forever() + except Exception: + self.logger.critical( + "Unexpected exception in the setup or during the run of the loop, stopping the " "microservice", + exc_info=True, + ) + self.loop.run_until_complete(self.stop_gracefully()) + + async def stop_gracefully(self): + """Stop the microservice gracefully. + Shut down the microservice. If after 30 seconds the microservice is not closed gracefully it forces a exit. + """ + + thread = Thread(target=self._wait_and_force_exit) + thread.start() + await self.stop() + await self.post_stop() + if self.loop.is_running(): + self.loop.stop() + + def _wait_and_force_exit(self): + time.sleep(30) + self.logger.critical("Error stopping all the resources. Forcing exit.") + exit(1) + + +def task(name_or_fn): + """Decorator to convert a method of a microservice in a asyncio task. + The task is started and stopped when the microservice starts and stops respectively. + + Args: + name_or_fn: Name of the task or function. If function the task name is the declared name of the function. + """ + + def decorator(name, fn): + wrapper = get_task_wrapper(fn, name) + wrapper._mark_task = True + wrapper._name = name + return wrapper + + if name_or_fn is str: + name = name_or_fn + return partial(decorator, name) + elif callable(name_or_fn): + if not asyncio.iscoroutinefunction(name_or_fn): + raise Exception(f"Tamarco {name_or_fn} task not created! The function is not asynchronous") + fn = name_or_fn + name = get_fn_full_signature(fn) + return decorator(name, fn) + else: + raise Exception("task decorator should be used with a parameter (name) that is a str or without parameter") + + +def thread(name_or_fn): + """Decorator to convert a method of a microservice in a thread. + The thread is started and stopped when the microservice starts and stops respectively. + + Args: + name_or_fn: Name of the thread or function. If function the thread name is the declared name of the function. + """ + + def decorator(name: str, fn: Callable): + wrapper = get_thread_wrapper(fn, name) + wrapper._mark_thread = True + wrapper._name = name + return wrapper + + if name_or_fn is str: + name = name_or_fn + return partial(decorator, name) + elif callable(name_or_fn): + fn = name_or_fn + name = get_fn_full_signature(fn) + return decorator(name, fn) + else: + raise Exception("task decorator should be used with a parameter (name) that is a str or without parameter") + + +def task_timer(interval=1000, one_shot=False, autostart=False) -> Union[Callable, Coroutine]: + """Decorator to declare a task that should repeated in time intervals. + + Examples: + >>> @task_timer() + >>> async def execute(*arg,**kwargs) + >>> print('tick') + + >>> @task_timer(interval=1000, oneshot=True, autostart=True) + >>> async def execute(*args,**kwargs) + >>> print('tick') + + Args: + interval (int): Interval in milliseconds when the task is repeated. + one_shot (bool): Only runs the task once. + autostart (bool): Task is automatically initialized with the microservice. + """ + + def wrapper_task_timer(fn: Union[str, Callable]) -> Union[Callable, Coroutine]: + """Function that adds timer functionality""" + + async def fn_with_sleep(*args, **kwargs): + try: + # Interval time in float (seconds transform) + interval_seconds = interval / 1000 + + # Oneshot param True always first all sleep after that execute and finish + execute_task = autostart and not one_shot + + while True: + if execute_task: + logger.debug( + f"Executing task timer {fn.__name__} with the params: interval = {interval}, " + f"one_shot = {one_shot}, autostart = {autostart}" + ) + await fn(*args, **kwargs) + if one_shot and execute_task: + break + execute_task = True + await asyncio.sleep(interval_seconds) + except Exception: + logger.exception(f"Unexpected exception running task timer {fn.__name__}. Timer will not recover") + + # Change name timer function with original task name + fn_with_sleep.__name__ = fn.__name__ + return task(fn_with_sleep) + + return wrapper_task_timer diff --git a/tamarco/core/patterns/__init__.py b/tamarco/core/patterns/__init__.py new file mode 100755 index 0000000..5ae5138 --- /dev/null +++ b/tamarco/core/patterns/__init__.py @@ -0,0 +1,5 @@ +from .flyweight import Flyweight, FlyweightWithLabels +from .proxy import Proxy +from .singleton import Singleton + +__all__ = ["Singleton", "Proxy", "Flyweight", "FlyweightWithLabels"] diff --git a/tamarco/core/patterns/flyweight.py b/tamarco/core/patterns/flyweight.py new file mode 100644 index 0000000..50499bb --- /dev/null +++ b/tamarco/core/patterns/flyweight.py @@ -0,0 +1,126 @@ +import uuid + + +class Flyweight(type): + """Metaclass that implements the Flyweight pattern. + + It is like a Singleton but only for the instances with the same key. + The key is first parameter that you pass to the class when you create the object. + + This class is conceived for the internal use of the Tamarco metrics library. + + Example:: + + >>> class Metric(metaclass=Flyweight): + >>> def __init__(self, metric_id): + >>> self.metric_id = metric_id + >>> + >>> http_requests_1 = Metric('http_requests') + >>> http_requests_2 = Metric('http_requests') + >>> + >>> http_requests_1 == http_requests_2 + True + """ + + def __init__(cls, name, bases, dct): + """Initialize the Flyweight class. The class can be call with one argument or three: + + >>> class Flyweight(object) + >>> class Flyweight(name, bases, dict) + + With one argument, returns the type of an object. The return value is a type object and generally + the same object as returned by object.__class__. With three arguments, returns a new type object. + + Args: + name (Union[object, str]): when `name`is a str, is the class name and becomes the __name__ attribute. + bases (tuple): itemizes the base classes and becomes the __bases__ attribute. + dct (dict): is the namespace containing definitions for class body and is copied to a standard + dictionary to become the __dict__ attribute. + """ + cls.__instances = {} + type.__init__(cls, name, bases, dct) + + def __call__(cls, key, *args, **kw): + """Call cls as a function. It checks if the `key` instance already exists. + + Args: + key (string): instance name. + *args: Variable length argument list. + **kw: Arbitrary keyword arguments. + + Returns: + object: new instance is the key does not exist in the instances dictionary, or the instance + already created with that key. + """ + instance = cls.__instances.get(key) + if instance is None: + instance = type.__call__(cls, key, *args, **kw) + cls.__instances[key] = instance + return instance + + +class FlyweightWithLabels(Flyweight): + """Metaclass that extends the pattern of the Flyweight pattern with labels. + + This class is conceived for the internal use of the Tamarco metrics library. + + Example:: + + >>> class Metric(metaclass=FlyweightWithLabels): + >>> def __init__(self, metric_id, labels=None): + >>> self.metric_id = metric_id + >>> self.labels = labels if labels else {} + >>> + >>> requests_http_get_1 = Metric('request', labels={'protocol': 'http', 'method': 'get'}) + >>> requests_http_post_1 = Metric('request', labels={'protocol': 'http', 'method': 'post'}) + >>> + >>> requests_http_get_2 = Metric('request', labels={'protocol': 'http', 'method': 'get'}) + >>> requests_http_post_2 = Metric('request', labels={'protocol': 'http', 'method': 'post'}) + >>> + >>> requests_http_get_1 == requests_http_get_2 + True + >>> requests_http_post_1 == requests_http_post_2 + True + """ + + def __init__(cls, name, bases, dct): + """Initialize the FlyweightWithLabels class. + + Args: + name (Union[object, str]): when `name`is a str, is the class name and becomes the __name__ attribute. + bases (tuple): itemizes the base classes and becomes the __bases__ attribute. + dct (dict): is the namespace containing definitions for class body and is copied to a standard + dictionary to become the __dict__ attribute. + """ + cls.__extended_instances = {} + cls.__extended_instances_labels = {} + Flyweight.__init__(cls, name, bases, dct) + + def __call__(cls, key, *args, **kw): + """Call cls as a function. It checks if the `key` instance already exists. + + Args: + key (string): instance name. + *args: Variable length argument list. + **kw: Arbitrary keyword arguments. + + Returns: + object: new instance is the key with, its labels, does not exist in the instances dictionary, + or the instance already created with that key and labels. + """ + labels = kw.get("labels") + if not labels: + instance = Flyweight.__call__(cls, key, *args, **kw) + return instance + else: + assert isinstance(labels, dict), "The labels should be a dictionary" + extended_instances = cls.__extended_instances_labels.setdefault(key, {}) + for iter_label_id, iter_labels in extended_instances.items(): + if iter_labels == labels: + return cls.__extended_instances[iter_label_id] + else: + label_id = str(uuid.uuid4()) + extended_instance = type.__call__(cls, key, *args, **kw) + cls.__extended_instances[label_id] = extended_instance + cls.__extended_instances_labels[key][label_id] = labels + return extended_instance diff --git a/tamarco/core/patterns/proxy.py b/tamarco/core/patterns/proxy.py new file mode 100755 index 0000000..3969a12 --- /dev/null +++ b/tamarco/core/patterns/proxy.py @@ -0,0 +1,210 @@ +class Proxy: + """Proxy pattern to be used as a pointer. When the value of _obj changes, the reference to + the proxy remains. + """ + + __slots__ = ["_obj", "__weakref__"] + + def __init__(self, obj): + """Initialize the Proxy class. + + Args: + obj (object): New object to be used in the proxy. + """ + object.__setattr__(self, "_obj", obj) + + # + # proxying (special cases) + # + def __getattribute__(self, name): + """Get the value from the _obj attribute `name`. + + Args: + name (string): Attribute name. + + Returns: + Attribute value. + """ + return getattr(object.__getattribute__(self, "_obj"), name) + + def __delattr__(self, name): + """Delete the attribute `name` from the _obj object. + + Args: + name (string): Attribute name. + """ + delattr(object.__getattribute__(self, "_obj"), name) + + def __setattr__(self, name, value): + """Set a value to the _obj attribute `name`. + + Args: + name (string): Attribute name. + value: Attribute value. + """ + setattr(object.__getattribute__(self, "_obj"), name, value) + + def __bool__(self): + """Convert an object to boolean. + + Returns: + bool: Returns True when the evaluation of the _obj object is true, False otherwise. + """ + return bool(object.__getattribute__(self, "_obj")) + + def __str__(self): + """Compute the "informal" or nicely printable string representation of the _obj object. + + Returns: + str: String representation of the _obj object. + """ + return str(object.__getattribute__(self, "_obj")) + + def __repr__(self): + """Compute the "official" string representation of the _obj object. + + Returns: + str: String representation of the _obj object. + """ + return repr(object.__getattribute__(self, "_obj")) + + # + # factories + # + _special_names = [ + "__abs__", + "__add__", + "__and__", + "__call__", + "__cmp__", + "__coerce__", + "__contains__", + "__delitem__", + "__delslice__", + "__div__", + "__divmod__", + "__eq__", + "__float__", + "__floordiv__", + "__ge__", + "__getitem__", + "__getslice__", + "__gt__", + "__hash__", + "__hex__", + "__iadd__", + "__iand__", + "__idiv__", + "__idivmod__", + "__ifloordiv__", + "__ilshift__", + "__imod__", + "__imul__", + "__int__", + "__invert__", + "__ior__", + "__ipow__", + "__irshift__", + "__isub__", + "__iter__", + "__itruediv__", + "__ixor__", + "__le__", + "__len__", + "__long__", + "__lshift__", + "__lt__", + "__mod__", + "__mul__", + "__ne__", + "__neg__", + "__oct__", + "__or__", + "__pos__", + "__pow__", + "__radd__", + "__rand__", + "__rdiv__", + "__rdivmod__", + "__reduce__", + "__reduce_ex__", + "__repr__", + "__reversed__", + "__rfloorfiv__", + "__rlshift__", + "__rmod__", + "__rmul__", + "__ror__", + "__rpow__", + "__rrshift__", + "__rshift__", + "__rsub__", + "__rtruediv__", + "__rxor__", + "__setitem__", + "__setslice__", + "__sub__", + "__truediv__", + "__xor__", + "next", + ] + + @staticmethod + def make_method(name): + """Create a new method to getting the value of the attribute `name`. + + Args: + name (string): Attribute name. + + Returns: + function: New __getattribute__ method to get a value from the _obj object. + """ + + def method(self, *args, **kw): + return getattr(object.__getattribute__(self, "_obj"), name)(*args, **kw) + + return method + + @classmethod + def _create_class_proxy(cls, the_class): + """Create a proxy for the given class. + + Args: + the_class (class): Class type in which we want to use the proxy. + + Returns: + object: New proxy instance referencing the `the_class` class. + """ + namespace = {} + for name in cls._special_names: + if hasattr(the_class, name): + namespace[name] = cls.make_method(name) + return type(f"{cls.__name__}({the_class.__name__})", (cls,), namespace) + + def __new__(cls, obj, *args, **kwargs): + """Create an proxy instance referencing `obj`. + + (obj, *args, **kwargs) are passed to this class' __init__, so deriving classes can define an + __init__ method of their own. + + _class_proxy_cache is unique per deriving class (each deriving class must hold its own cache). + + Args: + obj (object): Instance of the class in which we want to use the proxy. + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Returns: + object: New proxy instance referencing `obj`. + """ + try: + cache = cls.__dict__["_class_proxy_cache"] + except KeyError: + cls._class_proxy_cache = cache = {} + try: + the_class = cache[obj.__class__] + except KeyError: + cache[obj.__class__] = the_class = cls._create_class_proxy(obj.__class__) + instance = object.__new__(the_class) + the_class.__init__(instance, obj, *args, **kwargs) + return instance diff --git a/tamarco/core/patterns/singleton.py b/tamarco/core/patterns/singleton.py new file mode 100755 index 0000000..c7fd070 --- /dev/null +++ b/tamarco/core/patterns/singleton.py @@ -0,0 +1,21 @@ +class Singleton(type): + """Singleton pattern implementation. + + This pattern restricts the instantiation of a class to one object. + """ + + _instances = {} + + def __call__(cls, *args, **kwargs): + """Call cls as a function. It checks if the instance already exists. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Returns: + object: Instance from the class that uses this pattern. + """ + if cls not in cls._instances: + cls._instances[cls] = super().__call__(*args, **kwargs) + return cls._instances[cls] diff --git a/tamarco/core/settings/__init__.py b/tamarco/core/settings/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/core/settings/backends/__init__.py b/tamarco/core/settings/backends/__init__.py new file mode 100755 index 0000000..b6fce51 --- /dev/null +++ b/tamarco/core/settings/backends/__init__.py @@ -0,0 +1,11 @@ +from .dictionary import DictSettingsBackend +from .etcd import EtcdSettingsBackend +from .file_based import JsonSettingsBackend, PythonSettingsBackend, YamlSettingsBackend + +__all__ = [ + "JsonSettingsBackend", + "PythonSettingsBackend", + "YamlSettingsBackend", + "DictSettingsBackend", + "EtcdSettingsBackend", +] diff --git a/tamarco/core/settings/backends/dictionary.py b/tamarco/core/settings/backends/dictionary.py new file mode 100755 index 0000000..2c5c3d1 --- /dev/null +++ b/tamarco/core/settings/backends/dictionary.py @@ -0,0 +1,86 @@ +import asyncio + +from tamarco.core.settings.backends.interface import SettingsInterface, _EmptyArg + + +class DictSettingsBackend(SettingsInterface): + """Class to handle settings based in a python dictionary.""" + + def __init__(self, dict_settings, loop=None): + self.loop = loop + self.settings = dict_settings + self.callbacks = {} + + def set_loop(self, loop): + self.loop = loop + + async def get(self, key, default=_EmptyArg): + """Return the setting value. + + Args: + key (str): Path to the setting. + default: Default value to return if the key does not exist. + + Returns: + Setting value. + """ + setting = self.settings + for token in key.split("."): + try: + setting = setting[token] + except (KeyError, TypeError): + if default != _EmptyArg: + return default + else: + raise KeyError(key) + return setting + + async def set(self, key, value): # noqa: A003 + """Set the setting value. + + Args: + key (str): Path to the setting. + value: Setting value to set. + """ + setting = self.settings + tokens = key.split(".")[:-1] + last_token = key.split(".")[-1] + for token in tokens: + if token not in setting or not isinstance(setting[token], dict): + setting[token] = {} + setting = setting[token] + setting[last_token] = value + await self._trigger_callbacks(key) + + async def delete(self, key): + """Delete a setting. + + Args: + key (str): Path to the setting. + """ + setting = self.settings + tokens = key.split(".")[:-1] + last_token = key.split(".")[-1] + for token in tokens: + setting = setting[token] + del setting[last_token] + await self._trigger_callbacks(key) + + async def watch(self, key, callback): + """Create a hook in the key to trigger the callback when a setting is changed. + + Args: + key (str): Path to the setting. + callback: Callback to call when the value of the key change. + """ + self.callbacks[key] = self.callbacks.get(key, []) + [callback] + + async def _trigger_callbacks(self, key): + for callback_key in self.callbacks.keys(): + if callback_key in key: + try: + value = await self.get(callback_key) + except KeyError: + value = None + for callback in self.callbacks[callback_key]: + asyncio.ensure_future(callback(callback_key, value), loop=self.loop) diff --git a/tamarco/core/settings/backends/etcd.py b/tamarco/core/settings/backends/etcd.py new file mode 100755 index 0000000..160b910 --- /dev/null +++ b/tamarco/core/settings/backends/etcd.py @@ -0,0 +1,220 @@ +import asyncio +import logging +import os +from concurrent.futures._base import CancelledError, TimeoutError +from typing import NewType, TypeVar + +import aio_etcd +import ujson +from etcd import EtcdNotDir, EtcdResult + +from tamarco.core.settings.backends.interface import SettingsInterface, _EmptyArg +from tamarco.core.settings.utils import format_key_to_etcd, parse_dir_response + +Key = NewType("Key", str) +Value = TypeVar("Value", str, int, float) + +logger = logging.getLogger("tamarco.settings") + +WATCHER_ERROR_WAIT_TIME = 5 + + +class EtcdSettingsBackend(SettingsInterface): + """Class to handle settings that are in a etcd service.""" + + @staticmethod + def _format_response_key(response_key): + key = response_key.replace("/", ".") + if key.startswith("."): + key = key[1:] + return key + + def __init__(self, etcd_config, loop=None): + self.watch_tasks = [] + self.client = aio_etcd.Client(**etcd_config, loop=loop) + + async def check_etcd_health(self): + """Check if a key specified in the enviroment variable ETCD_CHECK_KEY exists. + It is to ensure that the etcd is correctly configured before starting to read them. + The configuration script of etcd should write the ETCD_CHECK_KEY when all the parameters are correctly + configured. + + Raises: + KeyError: If the etcd key is not created in 4 seconds. + """ + if "ETCD_CHECK_KEY" in os.environ: + print(f"Checking if ETCD_CHECK_KEY={os.environ['ETCD_CHECK_KEY']} exists in Etcd.") + for _ in range(4): + try: + await self.get(f"{os.environ['ETCD_CHECK_KEY']}") + except KeyError: + await asyncio.sleep(1) + else: + return + raise KeyError + + async def get(self, key, default=_EmptyArg): + """Return the setting value. + + Args: + key (str): Path to the setting. + default: Default value to return if the key does not exist. + + Returns: + Setting value. + """ + try: + response = await self.client.read(key=format_key_to_etcd(key), recursive=True) + except aio_etcd.EtcdKeyNotFound: + logger.debug(f"Could not found the key {key} in ETCD") + if default != _EmptyArg: + return default + else: + raise KeyError + except Exception: + logger.warning(f"Error retrieving key {key} from ETCD", exc_info=True) + raise + + logger.debug(f"Got setting {key} from ETCD with response: {response}") + + if response.dir: + return parse_dir_response(response, key) + else: + return ujson.loads(response.value) + + async def set(self, key, value, ttl=None): # noqa: A003 + """Set the setting value. + + Args: + key (str): Path to the setting. + value: Setting value to set. + """ + if isinstance(value, dict): + try: + await self.mkdir(key) + except EtcdNotDir: + logger.warning(f"Exception creating the ETCD directory {key}. It is not a directory") + return await self.recursive_set(key, value) + else: + logger.debug(f"Adding the key {key} with value: {value}") + value = ujson.dumps(value) + await self.client.write(key=format_key_to_etcd(key), value=value, ttl=ttl, append=False) + + async def recursive_set(self, base_key, conf_dict): + """Set a directory recursively in a certain path. + + Args: + base_key: Path where to write the settings. + conf_dict: Settings to update. + """ + base_key += "/" if base_key != "" else "" + for key, value in conf_dict.items(): + key = base_key + key + if isinstance(value, dict): + try: + await self.mkdir(key) + except EtcdNotDir: + logger.warning(f"Exception creating the ETCD directory {key}. It is not a directory") + await self.recursive_set(key, value) + else: + await self.set(key, value) + + async def mkdir(self, key): + """Create a etcd directory. + + Args: + key (str): Path to the directory to create. + """ + logger.info(f"Adding to ETCD the folder: {key}") + await self.client.write(key=key, value=None, dir=True) + + async def delete(self, key) -> EtcdResult: + """Delete a setting. + + Args: + key (str): Path to the setting. + """ + try: + result = await self.client.delete(key=format_key_to_etcd(key), recursive=True) + except Exception: + logger.warning(f"Could not delete from ETCD the key: {key}", exc_info=True) + raise KeyError + else: + logger.info(f"Deleted from ETCD the key: {key}") + return result + + async def watch_callback(self, response, key, callback): + """Intermediary callback to parse the etcd response. + + Args: + response: Etcd response to be parsed. + key (str): Setting to watch. + callback: Callback function to execute. + """ + if response.action == "delete": + setting = None + elif response.dir: + setting = parse_dir_response(response, key) + else: + setting = ujson.loads(response.value) + logger.info(f"ETCD watcher: change in setting {key}. New value: {setting}. Triggering callback") + await callback(key, setting) + + async def watch(self, key, callback): + """Create a hook in the key to trigger the callback when a setting is changed. + + Args: + key (str): Path to the setting. + callback: Callback to call when the value of the key change. + """ + + async def watch_task(): + """Watch task executed once per key to watch.""" + logger.info(f"Creating the task to watch the ETCD key: {key}") + formatted_key = format_key_to_etcd(key) + while True: + try: + response = await self.client.watch(formatted_key, recursive=True) + if not hasattr(response, "_prev_node") or (response.value != response._prev_node.value): + formatted_response_key = self._format_response_key(response.key) + await self.watch_callback(response, formatted_response_key, callback) + except TimeoutError: + # ETCD v2 issue with socket Timeout in watcher. v3 will solve the problem. + logger.warning(f"Socket timeout reached, re-watching the ETCD key: {key}") + except CancelledError: + logger.warning(f"ETCD watcher for the key {key} has been cancelled") + return + except Exception: + logger.warning( + f"Error watching the key {key}. Waiting {WATCHER_ERROR_WAIT_TIME} seconds before " + f"retrying. (Changes during this time won't trigger the callbacks!)", + exc_info=True, + ) + await asyncio.sleep(WATCHER_ERROR_WAIT_TIME) + await asyncio.sleep(0.1) + + self.watch_tasks.append(asyncio.ensure_future(watch_task())) + + def cancel_watch_tasks(self): + """Remove all the watchers from the settings to close the coroutines properly.""" + for task in self.watch_tasks: + if not task.done(): + try: + print(f"Cancelling this task -> {task}") + task.cancel() + except Exception: + raise + + def __del__(self): + """When the setting object is deleted cancels all the watch tasks.""" + self.cancel_watch_tasks() + self.close() + + async def _check_servers(self): + machines = await self.client.machines() + assert machines + return machines + + def close(self): + """Close the connections.""" + self.client.close() diff --git a/tamarco/core/settings/backends/file_based.py b/tamarco/core/settings/backends/file_based.py new file mode 100755 index 0000000..60e065a --- /dev/null +++ b/tamarco/core/settings/backends/file_based.py @@ -0,0 +1,34 @@ +import ujson + +from tamarco.core.settings.backends.dictionary import DictSettingsBackend + + +class JsonSettingsBackend(DictSettingsBackend): + """Class to handle settings that are in a Json file.""" + + def __init__(self, file, loop=None): + settings_dict = ujson.load(open(file)) + super().__init__(settings_dict, loop) + + +class YamlSettingsBackend(DictSettingsBackend): + """Class to handle settings that are in a Yaml file.""" + + def __init__(self, file, loop=None): + import yaml + + settings_dict = yaml.full_load(open(file)) + super().__init__(settings_dict, loop) + + +class PythonSettingsBackend(DictSettingsBackend): + """Class to handle settings that are in a Python file.""" + + def __init__(self, file, loop=None): + import importlib.util + + spec = importlib.util.spec_from_file_location("settings.python", file) + settings = importlib.util.module_from_spec(spec) + spec.loader.exec_module(settings) + settings_dict = settings.__dict__ + super().__init__(settings_dict, loop) diff --git a/tamarco/core/settings/backends/interface.py b/tamarco/core/settings/backends/interface.py new file mode 100755 index 0000000..695b102 --- /dev/null +++ b/tamarco/core/settings/backends/interface.py @@ -0,0 +1,52 @@ +class _EmptyArg: + pass + + +class _Undefined: + pass + + +class SettingsInterface: + """Interface for a settings class backend.""" + + async def get(self, key, default=_EmptyArg): + """Return the setting value. + + Args: + key (str): Path to the setting. + default: Default value to return if the key does not exist. + + Returns: + Setting value. + """ + raise NotImplementedError + + async def set(self, key, value): # noqa: A003 + """Set the setting value. + + Args: + key (str): Path to the setting. + value: Setting value to set. + """ + raise NotImplementedError + + async def delete(self, key): + """Delete a setting. + + Args: + key (str): Path to the setting. + """ + raise NotImplementedError + + async def watch(self, key, callback): + """Create a hook in the key to trigger the callback when a setting is changed. + + Args: + key (str): Path to the setting. + callback: Callback to call when the value of the key change. + """ + raise NotImplementedError + + async def cancel_watch_tasks(self): + """Remove all the watchers from the settings to close the coroutines properly.""" + raise NotImplementedError diff --git a/tamarco/core/settings/setting_proxy.py b/tamarco/core/settings/setting_proxy.py new file mode 100755 index 0000000..0d676ae --- /dev/null +++ b/tamarco/core/settings/setting_proxy.py @@ -0,0 +1,74 @@ +from tamarco.core.patterns import Proxy +from tamarco.core.settings.settings import Settings, SettingsNotLoadedYet + + +class SettingProxy(Proxy): + """Proxy pattern used as a pointer/reference, this proxy is returned + by when_loaded_setting function. When the settings are loaded + the obj that proxies this proxy is set to the value of the setting. + If is used before the load of settings a SettingsNotLoadedYet exception is raised. + """ + + def __getattribute__(self, name): + obj = object.__getattribute__(self, "_obj") + if isinstance(obj, SettingsNotLoadedYet): + raise obj + return getattr(obj, name) + + def __delattr__(self, name): + obj = object.__getattribute__(self, "_obj") + if isinstance(obj, SettingsNotLoadedYet): + raise obj + delattr(obj, name) + + def __setattr__(self, name, value): + obj = object.__getattribute__(self, "_obj") + if isinstance(obj, SettingsNotLoadedYet): + raise obj + setattr(obj, name, value) + + def __nonzero__(self): + obj = object.__getattribute__(self, "_obj") + if isinstance(obj, SettingsNotLoadedYet): + raise obj + return bool(obj) + + def __str__(self): + obj = object.__getattribute__(self, "_obj") + if isinstance(obj, SettingsNotLoadedYet): + raise obj + return str(obj) + + def __repr__(self): + obj = object.__getattribute__(self, "_obj") + if isinstance(obj, SettingsNotLoadedYet): + raise obj + return repr(obj) + + @staticmethod + def make_method(name): + def method(self, *args, **kw): + obj = object.__getattribute__(self, "_obj") + if isinstance(obj, SettingsNotLoadedYet): + raise obj + return getattr(obj, name)(*args, **kw) + + return method + + +def when_loaded_setting(key): + """Helper function that returns a proxy that when the settings is loaded + the object behind the proxy is the value of the key passed to the function + so you can instantiate a settings value before the settings are loaded + and when they are loaded that instance should be the settings value, + if not raises an exception + + Args: + key(str): Path to the setting. + + Returns: + setting proxy. + """ + promised_setting = SettingProxy(SettingsNotLoadedYet("No settings yet")) + Settings().register_promised_setting(key, promised_setting) + return promised_setting diff --git a/tamarco/core/settings/settings.py b/tamarco/core/settings/settings.py new file mode 100755 index 0000000..bd53cd0 --- /dev/null +++ b/tamarco/core/settings/settings.py @@ -0,0 +1,327 @@ +import logging +import os +from typing import NewType, TypeVar + +from tamarco.core.patterns import Singleton +from tamarco.core.settings.backends import DictSettingsBackend, EtcdSettingsBackend, YamlSettingsBackend +from tamarco.core.settings.backends.interface import SettingsInterface, _EmptyArg, _Undefined +from tamarco.core.settings.utils import dict_deep_update +from tamarco.core.utils import get_etcd_configuration_from_environment_variables + +UNDEFINED = _Undefined +logger = logging.getLogger("tamarco.settings") + + +class SettingsNotLoadedYet(Exception): + pass + + +class SettingNotFound(Exception): + def __init__(self, key): + self.key = key + + +Key = NewType("Key", str) +Value = TypeVar("Value", str, int, float, dict) + + +def get_yml_file_from_enviroment_variable(): + tamarco_yml_file = os.environ.get("TAMARCO_YML_FILE", None) + return tamarco_yml_file + + +class Settings(SettingsInterface, metaclass=Singleton): + """Core settings class, here is the unique True of settings all the settings values are cached by this class in his + internal_backend, all of the other settings are views of the data that this class holds. + + The external backend is where the settings should be originally loaded, the internal backend acts as cache + to avoid making many requests to the external backend. + """ + + def __init__(self): + super().__init__() + self.promised_settings = {} + self.internal_backend = DictSettingsBackend({}) + self.external_backend = None + self.loop = None + self.etcd_external = False + + def update_internal(self, dict_settings): + """Update the internal cache with new settings. + + Args: + dict_settings (dict): Settings to add to the internal backend. + """ + dict_deep_update(self.internal_backend.settings, dict_settings) + + async def bind(self, loop): + """Binds the settings to one event loop. + + Args: + loop: Main asyncio event loop. + """ + self.loop = loop + + async def start(self): + """Start the settings. + First loads the settings from the external settings backend (etcd or yaml file) once the internal and external + settings backends are ready, the promised settings (when_loaded_settings) are resolved and the proxies start to + holds the settings values. + """ + self.internal_backend.set_loop(self.loop) + await self._load_external_backend() + await self._resolve_promised_settings() + + async def _load_external_backend(self): + """Loads a external backend either etcd or yaml file in that order. + To load it uses the environment variables TAMARCO_ETCD_HOST, TAMARCO_ETCD_PORT and TAMARCO_YAML_FILE. + """ + yaml_file = get_yml_file_from_enviroment_variable() + etcd_config = get_etcd_configuration_from_environment_variables() + + if etcd_config: + self.external_backend = EtcdSettingsBackend(etcd_config=etcd_config, loop=self.loop) + await self.external_backend.check_etcd_health() + self.etcd_external = True + elif yaml_file: + self.external_backend = YamlSettingsBackend(file=yaml_file, loop=self.loop) + else: + logger.warning("Could not get any settings external backend from the environment") + + async def _resolve_promised_settings(self): + """Set all the settings proxies with his correspondent values.""" + for key, proxies in self.promised_settings.items(): + try: + setting_value = await self.get(key) + except Exception: + logger.warning(f"Error loading promised setting : {key}") + else: + for proxy in proxies: + object.__setattr__(proxy, "_obj", setting_value) + + def register_promised_setting(self, key, promised_setting): + """Register a SettingProxy to be resolved when the settings are loaded. + + Args: + key (str): setting key to register. + promised_setting: setting proxy to register. + """ + self.promised_settings[key].setdefault([]).append(promised_setting) + + async def get(self, key, default=_EmptyArg): + """Get a setting value for a key. + + Args: + key(str): Path to the setting. + default: Default value in the case that it doesn't exists. + + Raises: + SettingNotFound: The setting can't be resolved and it hasn't default value. + + Returns: + Setting value. + """ + logger.debug(f"Getting the setting: {key}") + try: + value = await self.internal_backend.get(key) + if value != UNDEFINED: + return value + except KeyError: + if self.external_backend: + logger.debug(f"Setting {key} not found in internal cache, searching in external backend") + return await self.get_external(key, default) + + if default != _EmptyArg: + return default + else: + raise SettingNotFound(key) + + async def get_external(self, key, default=_EmptyArg): + """Get the setting from the external backend updating the internal one with the value of the external. + + Args: + key (str): Path to the setting. + default: Default value in case that the setting doesn't exists in the external backend. + + Returns: + Setting value. + """ + try: + value = await self.external_backend.get(key, default) + except Exception: + logger.warning(f"Setting {key} not found in external backend") + raise SettingNotFound(key) + else: + await self.internal_backend.set(key, value) + return value + + async def set(self, key, value): # noqa: A003 + """Set a setting value. + + Args: + key (str): Path to the setting. + value: Value to be set in the setting key. + """ + logger.info(f"Changing the value of the setting: {key}") + + await self.internal_backend.set(key, value) + if self.external_backend: + await self.external_backend.set(key, value) + + async def delete(self, key): + """Delete a setting. + + Args: + key (str): Path to the setting. + """ + logger.info(f"Deleting the setting: {key}") + + await self.internal_backend.delete(key) + if self.external_backend: + await self.external_backend.delete(key) + + async def watch(self, key, callback): + """Schedule a callback for when a setting is changed in the etcd backend. + + Args: + key (str): Path to the setting. + callback: function or coroutine to be called when the setting changes, it should have with two input + arguments, one for the setting path and other for the setting value. + """ + if self.etcd_external: + await self.external_backend.watch(key, callback) + else: + logger.warning(f"Trying to watch the setting {key} when it is not in the ETCD backend") + + async def update_internal_settings(self, key, value): + """Update an specific internal setting. + + Args: + key (str): Path to the setting. + value: Setting value. + """ + await self.internal_backend.set(key, value) + logger.debug(f"The internal setting {key} has changed") + + async def watch_and_update(self, key): + """Watch one specific settings and maintain it updated in the internal settings. + + Args: + key (str): Path to the setting. + """ + if self.etcd_external: + await self.external_backend.watch(key, self.update_internal_settings) + else: + logger.warning(f"Trying to watch the setting {key} when it is not in the ETCD backend") + + async def stop(self): + """Perform all the needed tasks in order to stop the Settings.""" + await self.cancel_watch_tasks() + + async def cancel_watch_tasks(self): + """Cancel all the pending watcher tasks of the settings in the etcd backend.""" + if self.etcd_external: + self.external_backend.cancel_watch_tasks() + else: + logger.warning(f"Trying to cancel all settings watcher tasks, but not ETCD backend found. Doing nothing") + + +class SettingsView(SettingsInterface): + """View/chroot/jail/box of main settings class. + Used in the resources to provide them with their subset of settings. + """ + + def __init__(self, settings, prefix, microservice_name=None): + """ + Args: + settings: settings main object. + prefix: prefix where the view is going to operate. + microservice_name: name of the microservice, it is going to be used to find custom settings for a specific + microservice. + """ + self.prefix = prefix + self.settings = settings + self.microservice_name = microservice_name + if microservice_name: + framework_prefix, *setting_route = prefix.split(".") + self.microservice_prefix = f"{framework_prefix}.microservices.{microservice_name}.{'.'.join(setting_route)}" + + async def get(self, key, default=_EmptyArg, raw=False): + """Get setting. + + Args: + key (str): Path to the setting. + default: Default value in case that the setting doesn't exists in the external backend. + raw: if True no prefix is used so is not a view. + """ + if not raw: + general_key = f"{self.prefix}.{key}" + if self.microservice_name: + microservice_key = f"{self.microservice_prefix}.{key}" + value = await self.settings.get(microservice_key, UNDEFINED) + if value != UNDEFINED: + return value + logger.warning( + f"Setting {microservice_key} not found in external backend, it will use {general_key} instead." + ) + return await self.settings.get(general_key, default) + else: + return await self.settings.get(key, default) + + async def set(self, key, value, raw=False): # noqa: A003 + """Set a setting value. + + Args: + key (str): Path to the setting. + default: Default value in the case that it doesn't exists. + raw: If True no prefix is used so is not a view. + + Returns: + Setting value. + """ + if not raw: + key = f"{self.prefix}.{key}" + return await self.settings.set(key, value) + + async def delete(self, key, raw=False): + """Delete a setting. + + Args: + key (str): Path to the setting. + raw: If True no prefix is used so is not a view. + """ + if not raw: + key = f"{self.prefix}.{key}" + return await self.settings.delete(key) + + async def watch(self, key, callback, raw=False): + """Schedule a callback for when a setting is changed in the etcd backend. + + Args: + key (str): Path to the setting. + callback: Callback to run whenever the `key` changes. + raw: If True no prefix is used so is not a view. + """ + key_microservice = key + if not raw: + if self.microservice_name: + key_microservice = f"{self.microservice_prefix}.{key}" + key = f"{self.prefix}.{key}" + + await self.settings.watch(key, callback) + if self.microservice_name: + await self.settings.watch(key_microservice, callback) + + async def update_internal_settings(self, key, value): + """Update internal settings. + + Args: + key (str): Path to the setting. + value: Setting value. + """ + await self.settings.update_internal_settings(key, value) + logger.debug(f"The internal setting {key} has changed") + + async def cancel_watch_tasks(self): + """Cancel all the pending watcher tasks of the settings in the etcd backend.""" + await self.settings.cancel_watch_tasks() diff --git a/tamarco/core/settings/utils/__init__.py b/tamarco/core/settings/utils/__init__.py new file mode 100644 index 0000000..a1df294 --- /dev/null +++ b/tamarco/core/settings/utils/__init__.py @@ -0,0 +1,2 @@ +from .etcd_tool import EtcdTool # noqa: F401 +from .utils import _format_key_from_etcd, dict_deep_update, format_key_to_etcd, parse_dir_response # noqa: F401 diff --git a/tamarco/core/settings/utils/etcd_tool.py b/tamarco/core/settings/utils/etcd_tool.py new file mode 100644 index 0000000..6f891e8 --- /dev/null +++ b/tamarco/core/settings/utils/etcd_tool.py @@ -0,0 +1,107 @@ +import logging + +import etcd +import ujson +import yaml + + +class EtcdTool: + """Utility to load settings to an etcd server from a yml file source. + + Unlike the rest of Tamarco it is based in a syncronous library because it is designed to be used as utility + from the command line or testing. + """ + + def __init__(self, host="127.0.0.1", port=2379, protocol="https", encode=None): + """ + Args: + host (str): Etcd host. + port (int): Etcd port. + protocol (str): Protocol to use with the etcd server. + encode (str): Json to encode the values with a json encoder. + """ + self.client = etcd.Client(host, port, protocol) + self.encode = encode + + def write(self, key, value): + """Set one etcd key with the value. + + Args: + key (str): Path to the setting. + value: Key value. + """ + try: + if self.encode == "json": + value = ujson.dumps(value) + self.client.write(key, value) + except Exception: + logging.error(f"Error writing key {key} to etcd.") + raise + + def read(self, key, recursive=False, value=False): + """Read data from etcd. + + Args: + key (str): Path to the setting. + recursive (bool): Read recursively a etcd directory. + value (bool): Get the value + + Returns: + Etcd value or directory structure. + """ + try: + if value: + response = self.client.read(key, recursive=False).value + else: + return self.client.read(key, recursive=recursive) + + if self.encode == "json": + return ujson.loads(response) + return response + except Exception: + logging.error(f"Error reading key {key}.") + raise + + def delete(self, key, recursive=False): + """Delete one key in etcd. + + Args: + key (str): Path to the setting. + recursive (bool): Delete recursively a etcd directory. + """ + try: + self.client.delete(key, recursive=recursive) + except Exception: + logging.error(f"Error deleting key {key}.") + raise + + def load_items(self, yml_dictionary, path=""): + """Write a dictionary recursively in a certain path of etcd. + + Args: + yml_dictionary (dict): dictionary that represents the settings to load in etcd. + path (str): path where to write the yml_dictionary. + """ + for key, value in yml_dictionary.items(): + new_path = path + "/" + key + if isinstance(value, dict): + self.load_items(value, new_path) + else: + if self.encode == "json": + value = ujson.dumps(value) + self.write(new_path, value) + + def load(self, yml_file): + """Load and convert a yml file to a dictionary then it saves the dictionary in etcd, mapping the dictionary's + keys to settings paths. + + Args: + yml_file (str): path the the yml file to read. + """ + logging.info(f"Loading file: {yml_file}") + try: + yml_dictionary = yaml.full_load(open(yml_file)) + self.load_items(yml_dictionary) + except Exception: + logging.error(f"Error reading file {yml_file}.") + raise diff --git a/tamarco/core/settings/utils/utils.py b/tamarco/core/settings/utils/utils.py new file mode 100644 index 0000000..944ed61 --- /dev/null +++ b/tamarco/core/settings/utils/utils.py @@ -0,0 +1,70 @@ +import ujson + + +def format_key_to_etcd(key): + """Format a dot based key as a etcd key. + + Args: + key (str): Path to the setting. + + Returns: + str: Formatted key. + """ + return "/" + key.replace(".", "/") + + +def _format_key_from_etcd(queried_key, response_key): + """Format a key from etcd format to a subkey in dotted format.""" + queried_key = format_key_to_etcd(queried_key) + key = response_key.replace(queried_key, "", 1) + if key.startswith("/"): + key = key[1:] + return key + + +def parse_dir_response(response, queried_key): + """Parse a response that is a dir to a python dict. + + Args: + response: Etcd response. + queried_key: Key of the query in the etcd response. + + Returns: + dict: Dictionary with the parsed response. + """ + setting = {} + for result in response.children: + if result.value is None: + break + if result.dir: + setting.update({_format_key_from_etcd(queried_key, result.key): parse_dir_response(result, queried_key)}) + else: + sub_key = _format_key_from_etcd(queried_key, result.key) + if "/" in sub_key: + sub_keys = sub_key.split("/")[:-1] + last_key = sub_key.split("/")[-1:][0] + sub_setting = setting + for key in sub_keys: + sub_setting[key] = sub_setting.get(key, {}) + sub_setting = sub_setting[key] + sub_setting.update({last_key: ujson.loads(result.value)}) + else: + setting.update({sub_key: ujson.loads(result.value)}) + return setting + + +def dict_deep_update(target, update): + """Recursively update a dict. Subdict's won't be overwritten but also updated. + + Args: + target: Target dictionary to update. + update: Parameters to update. + Returns: + dict: Updated dictionary. + """ + for key, value in update.items(): + if key not in target: + target[key] = value + elif isinstance(value, dict): + target[key] = dict_deep_update(value, target[key]) + return target diff --git a/tamarco/core/signals.py b/tamarco/core/signals.py new file mode 100755 index 0000000..535ae64 --- /dev/null +++ b/tamarco/core/signals.py @@ -0,0 +1,85 @@ +import asyncio +import logging +import signal + +from tamarco.core.patterns import Singleton + +logger = logging.getLogger("tamarco.signals") + +# Object that holds all the signals names and numbers, +# is a copy of the values of the standard module of python +signals_name_to_number = {} + +for signal_name in filter(lambda may_sig_name: may_sig_name.startswith("SIG"), dir(signal)): + signals_name_to_number[signal_name] = getattr(signal, signal_name) + + +def signal_handler(signal_number): + """Decorator to declare a function as a signal handler. + + Args: + signal_number (int): integer with the signal number to handle. + """ + + def wrapper(fn): + SignalsManager().register_signal(fn, signal_number) + return fn + + return wrapper + + +class SignalsManager(metaclass=Singleton): + """Class responsible of the handling of unix signals.""" + + handlers = {} + + def __init__(self): + self.loop = None + self.async_timeout_seconds = 5 + + def set_loop(self, loop): + """Declares the event loop used for handlers that are coroutines. + + Args: + loop: asyncio event loop where to launch coroutines. + """ + self.loop = loop + + def register_signal(self, handler, signal_number): + """Register a handler for a signal. + + Args: + handler: function or coroutine to handle the signal_number. + signal_number (int): number of the signal_number to be handled. + """ + if signal_number not in self.handlers: + self.handlers[signal_number] = [] + signal.signal(signal_number, self._dispatch_signal) + self.handlers[signal_number].append(handler) + + async def _cancellation_wrapper(self, task): + try: + await asyncio.wait_for(task, self.async_timeout_seconds) + except asyncio.CancelledError: + logger.warning(f"Timeout triggered calling {task} in async signal_number handler", exc_info=True) + + def _dispatch_signal(self, signum, frame): + for handler in self.handlers[signum]: + if asyncio.iscoroutinefunction(handler): + try: + asyncio.ensure_future(self._cancellation_wrapper(task=handler(signum, frame)), loop=self.loop) + except Exception: + logger.warning( + f"Error trying to execute the async signal_number handler {handler} of the " + f"signal_number: {signum}", + exc_info=True, + ) + else: + try: + handler(signum, frame) + except Exception: + logger.warning( + f"Unexpected exception in the signal_number handler {handler} of the " + f"signal_number: {signum}", + exc_info=True, + ) diff --git a/tamarco/core/tasks.py b/tamarco/core/tasks.py new file mode 100755 index 0000000..b77f545 --- /dev/null +++ b/tamarco/core/tasks.py @@ -0,0 +1,243 @@ +import asyncio +import logging +from functools import partial, wraps +from threading import Thread +from typing import Callable, Coroutine + +from tamarco.core.utils import is_awaitable + +logger = logging.getLogger("tamarco.tasks") + +THREAD_STOP_TIMEOUT = 3 + + +async def observe_exceptions(coro, name): + """Function wrapper to observe exceptions of a coroutine in the logs. + + Args: + coro: Coroutine to wrap with logging. + name (str): Task name. + """ + try: + return await coro + except asyncio.CancelledError: + logger.warning(f"Tamarco task {name} cancelled") + raise + except Exception: + logger.warning(f"Unexpected exception in Tamarco task {name}", exc_info=True) + raise + + +def get_task_wrapper(coro_fn, name): + """Returns a coroutine that prints unexpected exceptions to the logging. + + Args: + coro_fn: Coroutine to wrap. + name (str): Task name. + """ + + @wraps(coro_fn) + async def wrapper(*args, **kwargs): + coro = coro_fn(*args, **kwargs) + return await observe_exceptions(coro, name) + + return wrapper + + +def get_thread_wrapper(target, name): + """Returns a target thread that prints unexpected exceptions to the logging. + + Args: + target: Func or coroutine to wrap. + name(str): Task name. + """ + + @wraps(target) + def wrapper(*args, **kwargs): + try: + result = target(*args, **kwargs) + except Exception: + logger.warning(f"Unexpected exception in Tamarco thread {name}", exc_info=True) + raise + else: + if is_awaitable(result): + thread_loop = asyncio.new_event_loop() + asyncio.set_event_loop(thread_loop) + coro = result + result = thread_loop.run_until_complete(observe_exceptions(coro, name)) + return result + + return wrapper + + +class TasksManager: + """Helper class to handle asyncio tasks and threads. + The class is responsible of the start and stop of the tasks/threads. + """ + + def __init__(self, task_limit=None): + self.tasks_coros = {} + self.threads_fns = {} + self.tasks = {} + self.threads = {} + self.loop = asyncio.get_event_loop() + self.task_limit = task_limit + + def set_loop(self, loop): + """Sets the loop where the asyncio tasks are going to run. + + Args: + loop: Asyncio event loop. + """ + self.loop = loop + + def register_task(self, name, task_coro, *args, **kwargs): + """Registers an asyncio task. + + Args: + name (str): Name of task. For identification purposes. + task_coro: Coroutine or function that is going to be wrapped by the task. + *args: Arguments for the coroutine. + **kwargs: Keywords arguments for the coroutine. + """ + if not is_awaitable(task_coro): + task_coro = task_coro(*args, **kwargs) + self._register_task(name, task_coro) + + def _register_task(self, name: str, task_coro: Coroutine): + assert name not in self.tasks_coros, f"Error, name {name} of task already taken for Tasks fns" + assert asyncio.iscoroutine(task_coro), f"Error, Task {name} is no a coroutine" + self.tasks_coros[name] = task_coro + + def register_thread(self, name, thread_fn, *args, **kwargs): + """Registers a thread. + + Args: + name (str): Name of the thread to register. It is for identification purposes. + thread_fn: Main function of the thread. + *args: Arguments for the thread function. + **kwargs: Keywords arguments for the thread function. + """ + self._register_thread(name, partial(thread_fn, *args, **kwargs)) + + def _register_thread(self, name: str, thread_fn: Callable) -> None: + assert name not in self.threads_fns, f"Error name {name} of thread already taken for Threads fns" + self.threads_fns[name] = thread_fn + + def stop_all(self): + """Stop all running threads and tasks.""" + self.stop_all_threads() + + for task_name in list(self.tasks.keys()): + self.stop_task(task_name) + self.tasks.clear() + + def start_all(self): + """Starts all registered threads and tasks.""" + for thread_name in self.threads_fns.keys(): + self.start_thread(thread_name) + self.threads_fns.clear() + + for task_name in self.tasks_coros.keys(): + self.start_task(task_name) + self.tasks_coros.clear() + + def start_task(self, name, task_coro=None): + """Start a single task. + + Args: + name (str): Name of task. For identification purposes. + task_coro: Coroutine or function that is going to be wrapped by the task. + + Returns: + Asyncio future with the result of the task. + """ + assert name not in self.tasks, f"Name {name} is already taken for a task" + if not task_coro: + task_coro = self.tasks_coros[name] + logger.debug(f"Starting the task {name}") + self.tasks[name] = asyncio.ensure_future(task_coro, loop=self.loop) + self.tasks[name].add_done_callback(partial(self._delete_task, name)) + return self.tasks[name] + + async def wait_for_start_task(self, name, task_coro): + """Start task waiting to open it if the number of concurrent tasks exceeds the task limit. + + Args: + name (str): Name of task. For identification purposes. + task_coro: Coroutine or function that is going to be wrapped by the task. + + Returns: + Asyncio future with the result of the task. + """ + if self.task_limit: + while True: + if len(self.tasks) < self.task_limit: + return self.start_task(name, task_coro) + logger.debug("Limit number of coroutines reached in Tamarco task manager, waiting to open a new one") + await asyncio.sleep(0.1, loop=self.loop) + else: + return self.start_task(name, task_coro) + + def _delete_task(self, name, future): + try: + del self.tasks[name] + except KeyError: + logger.debug(f"Task {name} was already removed from the task manager when the done callback is called") + + def start_thread(self, name) -> Thread: + """Start a thread by name. + + Args: + name(str): Thread name. For identification purposes. + + Returns: + Thread: Thread object. + """ + assert name not in self.threads, f"Name {name} is already taken for a task" + logger.debug(f"Starting the thread {name}") + self.threads[name] = Thread(target=self.threads_fns[name], name=name) + self.threads[name].stop = False + self.threads[name].start() + return self.threads[name] + + def stop_all_threads(self): + """Stop all threads.""" + for name, thread in self.threads.items(): + logger.debug(f"Stopping thread {name}") + thread.stop = True + + for name, thread in self.threads.items(): + self._join_thread(name, thread) + + self.threads.clear() + + def stop_task(self, name): + """Stop a task by name. + + Args: + name (str): Name of task. For identification purposes. + """ + logger.debug(f"Stopping the task {name}") + self.tasks[name].cancel() + del self.tasks[name] + + def stop_thread(self, name): + """Stop a thread by name. + + Args: + name(str): Thread name. For identification purposes. + """ + thread = self.threads[name] + logger.debug(f"Stopping the thread {name}") + thread.stop = True + self._join_thread(name, thread) + del self.threads[name] + + @staticmethod + def _join_thread(name: str, thread) -> None: + thread.join(timeout=THREAD_STOP_TIMEOUT) + if thread.is_alive(): + logger.warning(f"Trying to stop thread {name}, but did not join") + else: + logger.debug(f"Stopped thread {name}") diff --git a/tamarco/core/utils.py b/tamarco/core/utils.py new file mode 100755 index 0000000..3252ce6 --- /dev/null +++ b/tamarco/core/utils.py @@ -0,0 +1,150 @@ +import asyncio +import inspect +import logging +import os +import platform +import sys +import threading +from pprint import pformat + +import aiohttp + +logger = logging.getLogger("tamarco") + +ROOT_SETTINGS = "system" + + +def is_awaitable(obj): + """Detect awaitable objects. + + Args: + obj: Object to inspect. + + Returns: + bool: True if the object is awaitable, False if not. + """ + return asyncio.iscoroutine(obj) or hasattr(obj, "__await__") + + +def get_etcd_configuration_from_environment_variables() -> dict: + """Returns the etcd configuration from the enviroment variables. + + Returns: + dict: Etcd configuration with host and port keys. + """ + tamarco_etcd_host = os.environ.get("TAMARCO_ETCD_HOST", None) + tamarco_etcd_port = os.environ.get("TAMARCO_ETCD_PORT", 2379) + if tamarco_etcd_host is None: + return {} + etcd_configuration = {"host": tamarco_etcd_host, "port": int(tamarco_etcd_port)} + return etcd_configuration + + +def inside_a_container(): + """Detect if the process is inside a Docker container. + + Returns: + bool: True if the microservice is running inside a docker container. + """ + cgroups = open("/proc/1/cgroup", "r").read() + return "docker" in cgroups + + +def get_fn_full_signature(fn): + args_sig = inspect.signature(fn) + return f"def {fn.__name__}{args_sig}" + + +class Informer: + """Return information about the environment where the microservice is running.""" + + @classmethod + def log_all_info(cls, logger): + logger.info("Information:\n" + pformat(cls.report_all_info())) + + @classmethod + def report_all_info(cls): + info = {} + info.update(cls.report_os_info()) + info.update(cls.report_git_info()) + info.update(cls.report_interpreter_info()) + return info + + @staticmethod + def report_os_info(): + return { + "os_info": { + "process_id": os.getpid(), + "container_id": os.environ.get("HOSTNAME", "not_in_container"), + "platform": platform.platform(), + } + } + + @staticmethod + def report_interpreter_info(): + return { + "python_interpreter": { + "executable": sys.executable, + "version": str(sys.version), + "flags": sys.flags, + "implementation": sys.implementation, + } + } + + @staticmethod + def report_git_info(): + try: + from git import Repo + + repo = Repo(".") + branch = repo.active_branch + return { + "git": { + "branch": branch.name, + "commit": {"hash": str(branch.commit), "datetime": str(branch.commit.authored_datetime)}, + } + } + except ImportError: + logger.warning("Git package is not installed, omitting version control information") + return {} + except Exception: + logger.warning("Unexpected exception trying to get version control information, omitting that information") + return {} + + +def set_thread_name(): + """Allow to distinguish more easily the different threads of a microservice by their name.""" + try: + import prctl + + thread_name = threading.current_thread().name + prctl.set_name(thread_name) + except ImportError: + logger.warning("Library prctl is not installed. Thread names cannot be set") + except Exception: + logger.warning("Unexpected exception setting the thread name for the prctl process") + + +class QueueTimeout(asyncio.Queue): + def __init__(self, *args, **kwargs): + self.timeout = kwargs.pop("timeout", 10) + super().__init__(*args, **kwargs) + + async def get(self): + return await asyncio.wait_for(super().get(), self.timeout) + + +async def check_connection_http_url(url, loop=None, retries=3): + if not url: + return False + for i in range(1, retries + 1): + print(f"Checking connection #{i} from URL={url}") + async with aiohttp.ClientSession(loop=loop) as client: + try: + async with client.get(url) as resp: + assert resp.status == 200 + except Exception: + await asyncio.sleep(1) + else: + return True + return False diff --git a/tamarco/resources/__init__.py b/tamarco/resources/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/resources/bases.py b/tamarco/resources/bases.py new file mode 100755 index 0000000..c3cd841 --- /dev/null +++ b/tamarco/resources/bases.py @@ -0,0 +1,188 @@ +from typing import List + +from tamarco.resources.basic.status.status_codes import StatusCodes + + +class BaseResource: + """Define the basic interface of a resource. + All the tamarco resources should inherit from this class. + + + Resource start call chain: + 1. bind + 2. configure_settings + 3. pre_start + 4. start + 5. post_start + + Resource stop call chain: + 1. stop + 2. post_stop + """ + + depends_on = [] + loggers_names = [] + + def __init__(self): + self.name = None + self.microservice = None + self.settings = None + self._status = StatusCodes.NOT_STARTED + + async def bind(self, microservice, name): + """Build method, the microservice binds all its resources. Microservice starts and stops the resources. + + Args: + microservice (Microservice): Microservice instance managing the resource. + name (str): Name of the resource instance in the microservice class. + """ + self.microservice = microservice + self.name = name + + async def configure_settings(self, settings): + """Build method, the microservice provides the settings class of each resource. + The resource should read the settings via this object. + + Args: + settings (SettingsView): Settings view of the resource. + """ + self.settings = settings + + async def pre_start(self): + """Pre start stage of the resource lifecycle.""" + pass + + async def start(self): + """Start stage of the resource lifecycle.""" + self._status = StatusCodes.STARTED + + async def post_start(self): + """Post start stage of the resource lifecycle.""" + pass + + async def stop(self): + """Stop stage of the resource lifecycle.""" + self._status = StatusCodes.STOPPED + + async def post_stop(self): + """Post stop stage of the resource lifecycle.""" + pass + + async def status(self) -> dict: + """Return information about the state of the resource.""" + return {"status": self._status} + + def __repr__(self): + return f"<{self.__class__} name={self.name}>" + + +class StreamBase: + def __init__(self, name, codec=None, resource=None): + """ + Args: + name (str): Name of the stream. + codec (CodecInterface): Used to decode the input and output from the stream. + resource (BaseResource): Owner resource of the stream. + """ + self.name = name + self.codec = codec + self.resource = resource + + +class InputBase(StreamBase): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.on_message_callback = None + if self.resource: + self.resource.add_input(self) + + def __call__(self, callback): + """Allow the input to behave as a decorator.""" + self.on_message_callback = callback + return self + + def __aiter__(self): + """Allow the input to behave as an asyncronous iterator.""" + return self + + def __anext__(self): + """Allow the input to behave as an asyncronous iterator.""" + raise NotImplementedError("The __anext__ method should be implemented in the child class.") + + def __repr__(self): + return f"Tamarco Input {self.name} from resource {self.resource}" + + +class OutputBase(StreamBase): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if self.resource: + self.resource.add_output(self) + + def __repr__(self): + return f"Tamarco Output {self.name} from resource {self.resource}" + + +class IOResource(BaseResource): + """Extended resource that manages I/O streams, like Kafka and AMQP.""" + + def __init__(self, inputs: List = None, outputs: List = None): + super().__init__() + self.inputs = {} + self.outputs = {} + inputs = [] if inputs is None else inputs + outputs = [] if outputs is None else outputs + + for input_element in inputs: + self.add_input(input_element) + for output_element in outputs: + self.add_output(output_element) + + def add_input(self, input_to_add): + """Add one input. + + Args: + input_to_add (InputBase): Input to add. + """ + assert input_to_add.name not in self.inputs, "Error two inputs with the same name" + self.inputs[input_to_add.name] = input_to_add + + def add_output(self, output): + """Add one output. + + Args: + output (OutputBase): Output to add. + """ + assert output.name not in self.outputs, "Error two outputs with the same name" + self.outputs[output.name] = output + + +class DatabaseResource(BaseResource): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.client = None + + async def start(self, clean_database=False, register_scripts=True): + await self.connect() + await self.init_db(clean_database=clean_database, register_scripts=register_scripts) + await super().start() + + async def stop(self): + self._status = StatusCodes.STOPPING + await self.disconnect() + await super().stop() + + async def connect(self, *args, **kwargs): + pass + + async def disconnect(self, *args, **kwargs): + pass + + async def init_db(self, *args, **kwargs): + pass + + def clear(self): + pass + + async def status(self): + return {"status": self._status} diff --git a/tamarco/resources/basic/__init__.py b/tamarco/resources/basic/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/resources/basic/metrics/__init__.py b/tamarco/resources/basic/metrics/__init__.py new file mode 100755 index 0000000..c84e8b2 --- /dev/null +++ b/tamarco/resources/basic/metrics/__init__.py @@ -0,0 +1,3 @@ +from .manager import MetersManager + +__all__ = ["MetersManager"] diff --git a/tamarco/resources/basic/metrics/collector.py b/tamarco/resources/basic/metrics/collector.py new file mode 100755 index 0000000..66dbaab --- /dev/null +++ b/tamarco/resources/basic/metrics/collector.py @@ -0,0 +1,95 @@ +import logging +import threading +import time + +from tamarco.core.utils import set_thread_name + +logger = logging.getLogger("tamarco.metrics") + + +class MetricsCollector: + """"Collect all the metrics from all the meters and using all the handler configured. + + Attributes: + meters (list): List of meters from which the metrics will be obtained. + handlers (list): List of metrics handlers where the metrics will be sent/stored. + collect_period (int): interval time in seconds between the beginning of the metrics harvest. + """ + + meters = [] + handlers = [] + collect_period = 10 + + def __new__(cls, *args, **kwargs): + raise NotImplementedError + + @classmethod + def add_handler(cls, handler): + """Append a handler in the handlers list. + + Args: + handler: Metrics handler. + """ + cls.handlers.append(handler) + + @classmethod + def run(cls): + """Collect the available metrics at defined time intervals.""" + set_thread_name() + thread = threading.currentThread() + while not thread.stop: + start_time = time.time() + try: + cls.collect_metrics() + cls.reset_meters() + except Exception: + logger.warning("Unexpected exception in Metrics collector thread", exc_info=True) + cls.sleep_until_the_next_write(start_time) + + @classmethod + def collect_metrics(cls): + """Build the metrics reports for all configured handlers.""" + for handler in cls.handlers: + handler.write(cls.meters) + + @classmethod + def reset_meters(cls): + """Reset the Summary meter.""" + for meter in cls.meters: + from tamarco.resources.basic.metrics.meters import Summary + + if isinstance(meter, Summary): + meter._reset() + + @classmethod + def sleep_until_the_next_write(cls, start_time): + """Sleep the remaining time left (after the beginning of the metrics harvest) to reach + `collect_period` seconds. + + Args: + start_time (int): Time in seconds when the metrics harvest started. + """ + end_time = time.time() + elapsed_time = end_time - start_time + sleep_time = cls.collect_period - elapsed_time + if sleep_time > 0: + time.sleep(sleep_time) + + @classmethod + def register_metric(cls, meter): + """Append a new meter to the meters list. + + Args: + meter: Metrics meter. + """ + cls.meters.append(meter) + + +class CollectorThread(threading.Thread): + """Run the metrics collector in a new thread.""" + + stop = False + name = "MetricsColl" + + def run(self): + MetricsCollector.run() diff --git a/tamarco/resources/basic/metrics/manager.py b/tamarco/resources/basic/metrics/manager.py new file mode 100755 index 0000000..36f351e --- /dev/null +++ b/tamarco/resources/basic/metrics/manager.py @@ -0,0 +1,48 @@ +from tamarco.resources.basic.metrics.collector import CollectorThread, MetricsCollector + + +class MetersManager: + """Class to manage the metrics harvest. + + Args: + thread: Thread where the metrics collector will start. + default_collect_period (int): Default interval time in seconds between the beginning of the metrics harvest. + """ + + thread = CollectorThread() + default_collect_period = 10 + + def __new__(cls, *args, **kwargs): + raise NotImplementedError + + @classmethod + def configure(cls, config): + """Load the settings configuration. + + Args: + config (dict): settings configuration for the Metrics Collector. + """ + for handler_data in config.get("handlers", []): + handler = handler_data.pop("handler") + cls.add_handler(handler(**handler_data)) + + MetricsCollector.collect_period = config.get("collect_period", cls.default_collect_period) + + @classmethod + def add_handler(cls, handler): + """Append new handler to the MetricsCollector handlers list. + + Args: + handler: Metrics handler. + """ + MetricsCollector.add_handler(handler) + + @classmethod + def start(cls): + """Start the thread where the Metrics Collector starts.""" + cls.thread.start() + + @classmethod + def stop(cls): + """Stop the thread where the Metrics Collector starts.""" + cls.thread.stop = True diff --git a/tamarco/resources/basic/metrics/meters/__init__.py b/tamarco/resources/basic/metrics/meters/__init__.py new file mode 100755 index 0000000..8b4388c --- /dev/null +++ b/tamarco/resources/basic/metrics/meters/__init__.py @@ -0,0 +1,6 @@ +from .base import Timer +from .counter import Counter +from .gauge import Gauge +from .summary import Summary + +__all__ = ["Counter", "Gauge", "Summary", "Timer"] diff --git a/tamarco/resources/basic/metrics/meters/base.py b/tamarco/resources/basic/metrics/meters/base.py new file mode 100755 index 0000000..c254c8d --- /dev/null +++ b/tamarco/resources/basic/metrics/meters/base.py @@ -0,0 +1,203 @@ +import asyncio +import time +from collections import namedtuple +from copy import copy +from functools import wraps + +import inflection + +from tamarco.core.patterns import FlyweightWithLabels +from tamarco.resources.basic.metrics.collector import MetricsCollector + +Metric = namedtuple("Metric", ["id", "labels", "value", "units", "timestamp", "empty"]) + + +def metric_factory(metric_id, value, units, timestamp, empty=False, labels=None): + return Metric( + id=metric_id, value=value, units=units, timestamp=timestamp, empty=empty, labels=labels if labels else {} + ) + + +class BaseMeter(metaclass=FlyweightWithLabels): + """Common part of all the meters.""" + + def __init__(self, metric_id, measurement_unit, labels=None, *args, **kwargs): + self.metric_id = metric_id + self.metric_type = inflection.underscore(self.__class__.__name__) + self.labels = labels if labels else {} + self.measurement_unit = measurement_unit + self.init_args = args + self.init_kwargs = kwargs + MetricsCollector.register_metric(self) + + @property + def timestamp(self): + return time.time() + + def new_labels(self, labels): + """Return another instance of the meter with other labels. + Used for adding or updating labels on the fly. + + Examples: + >>> http_meter = BaseMeter("http_requests", "requests", labels={'protocol': 'http'}) + >>> + >>> def handle_http_404(): + >>> error_meter = http_meter.new_labels({'status_code': 404}) + """ + updated_labels = copy(self.labels) + updated_labels.update(labels) + return self.__class__( + key=self.metric_id, + measurement_unit=self.measurement_unit, + labels=updated_labels, + *self.init_args, + **self.init_kwargs + ) + + +class Timer: + """Measures intervals of time. + The instances of this class measure intervals of time and when calls the callback with the period of time in seconds + Them can work as a decorator for functions or coroutines or as a context manager. + This class is conceived for the internal use of the Tamarco metrics library. + + Example: + >>> # This are going to print the elapsed time in this function every time it is called. + >>> @Timer(callback=lambda time: print(f"The elapsed time is {time}")) + >>> def some_task(): + >>> time.sleep(1) + >>> + >>> some_task() + >>> + >>> # It also works with coroutines. + >>> import asyncio + >>> + >>> @Timer(callback=lambda time: print(f"The elapsed time is {time}")) + >>> async def some_task(): + >>> await asyncio.sleep(1) + >>> + >>> asyncio.get_event_loop().run_until_complete(some_task()) + >>> + >>> # And as a context manager + >>> with Timer(callback=lambda time: print(f"The elapsed time is {time}")): + >>> time.sleep(1) + + """ + + def __init__(self, callback): + self.callback = callback + self.time_start = None + self.time_end = None + + def __enter__(self): + """Allow the timer to behave as a context manager.""" + self.time_start = time.time() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Allow the timer to behave as a context manager.""" + time_end = time.time() + value = time_end - self.time_start + self.callback(value) + + def __call__(self, func): + """Allow the timer to behave as a decorator.""" + if asyncio.iscoroutinefunction(func): + + @wraps(func) + async def wrapper(*args, **kwargs): + with self: + return await func(*args, **kwargs) + + else: + + @wraps(func) + def wrapper(*args, **kwargs): + with self: + return func(*args, **kwargs) + + return wrapper + return wrapper + + +class ExceptionMonitor: + """Provides the ability to trigger an event_callback when an exception is captured. + Work as a context manager and as a decorator. + + This class is conceived for the internal use of the Tamarco metrics library. + + Example: + >>> exception_monitor = ExceptionMonitor(lambda : print('Exception !!')) + >>> + >>> with exception_monitor: + >>> raise Exception + >>> + >>> @exception_monitor + >>> def exception_monitor(): + >>> raise Exception + """ + + def __init__(self, event_callback): + self.event_callback = event_callback + + def __call__(self, function): + if asyncio.iscoroutinefunction(function): + + @wraps(function) + async def wrapper(*args, **kwargs): + try: + return await function(*args, **kwargs) + except Exception: + self.event_callback() + raise + + else: + + @wraps(function) + def wrapper(*args, **kwargs): + try: + return function(*args, **kwargs) + except Exception: + self.event_callback() + raise + + return wrapper + return wrapper + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_val: + self.event_callback() + + +def spy_factory(wrapped_object, event_callback): + """Return a wrapped object that triggers the event callback every time the object is called. + + Example: + >>> SpyObject = spy_factory(object, lambda: print("A object is created")) + >>> SpyObject() # This line also print the line "A object is created" + + Args: + wrapped_object: Object to wrap. + event_callback: Function to be called when the object is called. + + Returns: + Wrapped object. + """ + if asyncio.iscoroutinefunction(wrapped_object): + + @wraps(wrapped_object) + async def wrapper(*args, **kwargs): + event_callback() + return await wrapped_object(*args, **kwargs) + + else: + + @wraps(wrapped_object) + def wrapper(*args, **kwargs): + event_callback() + return wrapped_object(*args, **kwargs) + + return wrapper diff --git a/tamarco/resources/basic/metrics/meters/counter.py b/tamarco/resources/basic/metrics/meters/counter.py new file mode 100755 index 0000000..c3810a3 --- /dev/null +++ b/tamarco/resources/basic/metrics/meters/counter.py @@ -0,0 +1,176 @@ +from copy import copy +from functools import wraps + +from tamarco.resources.basic.metrics.meters.base import BaseMeter, ExceptionMonitor, metric_factory, spy_factory + + +class Counter(BaseMeter): + """A counter is a cumulative metric that represents a single numerical value that only ever goes up. + The counter reset when the server restart. + A counter is typically used to count requests served, tasks completed, errors occurred, etc. + Counters should not be used to expose current counts of items whose number can also go down, + e.g. the number of currently running coroutines. Use gauges for this use case. + + Example: + >>> cats_counter = Counter('cats', 'cat') + >>> cats_meow_counter = Counter('meows', 'meow') + >>> cats_jump_counter = Counter('jumps', 'jump') + >>> + >>> # For example, we can count the cats in our server + >>> + >>> class Cat: + >>> def __init__(self): + >>> cats_counter.inc() + >>> + >>> # Works as a decorator + >>> @cats_meow_counter + >>> def meow(self): + >>> print('meow') + >>> + >>> # Also as a decorator of coroutines + >>> @cats_jump_counter + >>> async def jump(self): + >>> print("\\n") + + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.counter = 0 + + def current_value(self): + """Returns the current value of the counter.""" + return self.counter + + def inc(self, value=1): + """Increase the value of the counter, the default value is 1. + + Args: + value: Number to increment. + """ + assert ( + isinstance(value, int) or isinstance(value, float) + ) and value >= 0, "Counter only operates with positive integers or floats" + self.counter += value + + def count_exceptions(self): + """It works as a decorator or as context manager. + The exceptions aren't silenced. + + Example: + >>> exceptions_counter = Counter("exceptions", 'exception') + >>> + >>> with exceptions_counter.count_exceptions(): + >>> print('Counting exceptions in this context') + >>> ... + >>> + >>> http_errors = Counter("http_errors", 'error') + >>> + >>> @http_errors.count_exceptions() + >>> async def http_handler(request): + >>> ... + """ + return ExceptionMonitor(lambda: self.inc()) + + def __call__(self, function): + """ + Allow the Counter to work as a decorator of a function or a coroutine. + + Args: + function: Function to decorate. + + Returns: + Wrapped function that increases the counter in each call. + """ + return spy_factory(function, lambda: self.inc()) + + def _collect_metrics(self): + return [metric_factory(self.metric_id, self.counter, self.measurement_unit, self.timestamp, labels=self.labels)] + + +class HTTPCounter(Counter): + def __init__(self, metric_id, *args, **kwargs): + kwargs.setdefault("measurement_unit", "requests") + kwargs.setdefault("labels", {})["http_counter"] = "requests" + super().__init__(metric_id, **kwargs) + kwargs["labels"] = self._update_label(kwargs["labels"], {"exceptions": "uncaptured"}) + self.exception_counter = Counter(f"{metric_id}", *args, **kwargs) + kwargs["labels"] = self._update_label(kwargs["labels"], {"http_counter": "errors"}) + self.errors_counter = Counter(f"{metric_id}", *args, **kwargs) + + @staticmethod + def _update_label(labels, extra_label): + copy_labels = copy(labels) + copy_labels.update(extra_label) + return copy_labels + + def __call__(self, function): + @wraps(function) + async def wrapper(*args, **kwargs): + self.inc() + with self.exception_counter.count_exceptions(): + response_value = await function(*args, **kwargs) + if hasattr(response_value, "status") and response_value.status >= 400: + self.errors_counter.inc() + return response_value + + return wrapper + + +class HeaderToLabel: + """Used to map the headers of a http petition to a certain prometheus labels. + The object needs to be passed to the HTTPCounterHeaderMap. + """ + + def __init__(self, header, label, default_header_value): + """ + Args: + header (str): Header of the http request to map. + label (str): Label in prometheus where you want to count the headers. + default_header_value (str): Default value if the header doesn't exist. + """ + self.header = header + self.label = label + self.default_header_value = default_header_value + + +class HTTPCounterHeaderMap(HTTPCounter): + """Counter to use in conjunction with the handlers of a Sanic server. + It is a decorator that counts the http petitions and unexpected exceptions managed + by the handler. It also labels the metrics with the status codes of the responses + and the headers of the request. + + Example: + >>> customer_map = HeaderToLabel(header='Customer', label='customer_id', default_header_value='not available') + >>> + >>> @HTTPCounterHeaderMap('orders', header_to_label_maps=[customer_map]) + >>> async def get_all_orders_by_customer_handler(request): + >>> ... + >>> + """ + + def __init__(self, metric_id, *args, **kwargs): + """ + Args: + metric_id (str): Metric identifier. + *args: Arguments for HTTPCounter. + **kwargs: Keyword arguments for HTTPCounterHeaderMap. + """ + self.header_to_label_maps = kwargs.get("header_to_label_maps", {}) + super().__init__(metric_id, *args, **kwargs) + + def __call__(self, function): + @wraps(function) + async def wrapper(request, *args, **kwargs): + labels = {"path": request.path, "method": request.method} + for header_map in self.header_to_label_maps: + labels.update( + {header_map.label: request.headers.get(header_map.header, header_map.default_header_value)} + ) + with self.exception_counter.new_labels(labels).count_exceptions(): + response = await function(request, *args, **kwargs) + labels.update({"status_code": response.status}) + self.new_labels(labels).inc() + return response + + return wrapper diff --git a/tamarco/resources/basic/metrics/meters/gauge.py b/tamarco/resources/basic/metrics/meters/gauge.py new file mode 100644 index 0000000..fc7b924 --- /dev/null +++ b/tamarco/resources/basic/metrics/meters/gauge.py @@ -0,0 +1,103 @@ +from tamarco.resources.basic.metrics.meters.base import BaseMeter, Timer, metric_factory, spy_factory + + +class Gauge(BaseMeter): + """A gauge is a metric that represents a single numerical value that can arbitrarily go up and down. + Gauges are typically used for measured values like temperatures or current memory usage, + but also "counts" that can go up and down, like the number of running routines. + The initial value is 0 by default and the reset it at the restart. + It can be used as a timer, it is useful for batch jobs. You need to take in account that this kind + of data only save the last value, so if the Gauge is called a lot of times in the collect period + probably a summary is a best choice, because it computes some useful statistics. + + Example: + >>> current_websocket_connections = Gauge("current_websocket_connections", "ws") + >>> + >>> class WebSocketServer: + >>> @current_websocket_connections + >>> def on_open(self): + >>> ... + >>> + >>> def on_close(self): + >>> current_websocket_connections.dec() + >>> ... + >>> + >>> @Gauge("close_all_conections", "seconds").timeit() + >>> def close_all_conections(self): + >>> ... + """ + + def __init__(self, metric_id, measurement_unit, start_value=0, labels=None): + super().__init__(metric_id, measurement_unit, labels=labels, start_value=start_value) + self.value = start_value + self.parent_meter = None + + def timeit(self): + """Allows a gauge to work as a Timer. + The returned object of timeit() is a Timer and can be used as decorator and context manager. + + >>> import time + >>> + >>> task_gauge = Gauge("task", "time") + >>> + >>> # This are going to print the elapsed time in this function every time it is called. + >>> @task_gauge.timeit() + >>> def some_task(): + >>> time.sleep(1) + >>> + >>> # It also works with coroutines. + >>> import asyncio + >>> + >>> @task_gauge.timeit() + >>> async def some_task(): + >>> await asyncio.sleep(1) + >>> + >>> # And as a context manager + >>> with task_gauge.timeit() + >>> time.sleep(1) + """ + return Timer(callback=lambda time: self.set(time)) + + def inc(self, value=1): + """Increase the value of the gauge. + + Args: + value: Integer or float with the value to increment, the default value is 1. + """ + self._check_valid_value(value) + self.value += value + + def dec(self, value=1): + """Decrease the value of the gauge. + + Args: + value: Integer or float with the value to decrease, the default value is 1. + """ + self._check_valid_value(value) + self.value -= value + + def set(self, value): # noqa: A003 + """Set the gauge to one value. + + Args: + value: Integer or float with the value to set. + """ + self._check_valid_value(value) + self.value = value + + def set_to_current_time(self): + """Set the gauge to the current unix timestamp in seconds.""" + self.value = self.timestamp + + @staticmethod + def _check_valid_value(value): + assert isinstance(value, int) or isinstance( + value, float + ), "Invalid value for Gauge, it only works with integers and floats" + + def _collect_metrics(self): + return [metric_factory(self.metric_id, self.value, self.measurement_unit, self.timestamp, labels=self.labels)] + + def __call__(self, function): + """Allow the gauge to work as a decorator, it increases the gauge once every time the function is called.""" + spy_factory(function, lambda: self.inc()) diff --git a/tamarco/resources/basic/metrics/meters/summary.py b/tamarco/resources/basic/metrics/meters/summary.py new file mode 100755 index 0000000..653a257 --- /dev/null +++ b/tamarco/resources/basic/metrics/meters/summary.py @@ -0,0 +1,108 @@ +from tamarco.resources.basic.metrics.meters.base import BaseMeter, Timer, metric_factory +from tamarco.resources.basic.metrics.settings import DEFAULT_SUMMARY_QUANTILES + + +class Summary(BaseMeter): + """A summary samples observations (usually things like request durations) over sliding windows of time + and provides instantaneous insight into their distributions, frequencies, and sums. + + The default quantiles are: [0.5, 0.75, 0.9, 0.95, 0.99] + + Example: + >>> requests_time = Summary("http_requests", 'time') + >>> + >>> @requests_time.timeit() + >>> def http_request(): + >>> ... + >>> + >>> import psutil + >>> ram_usage = Summary("http_request", 'time') + >>> ram_usage.observe(psutil.virtual_memory().used) + """ + + def __init__(self, metric_id, measurement_unit, quantiles=None, *args, **kwargs): + super().__init__(metric_id, measurement_unit, quantiles=quantiles, *args, **kwargs) + self.values = [] + self.quantiles = quantiles if quantiles else DEFAULT_SUMMARY_QUANTILES + + def observe(self, value): + """Observe one value. + + Args: + value: integer or float with the value to observe. + """ + assert isinstance(value, int) or isinstance(value, float), "Summary values should be int or floats" + self.values.append(value) + + def timeit(self): + """Allows the Summary to work as a Timer. The timer can work as a decorator or as a context manager.""" + return Timer(lambda time: self.observe(time)) + + def _collect_metrics(self): + timestamp = self.timestamp + collected_values = [] + sorted_values = sorted(self.values) + + collected_values += self._get_metric_sum(timestamp, sorted_values) + collected_values += self._get_metric_count(timestamp, sorted_values) + + for quantile in self.quantiles: + collected_values += self._get_quantile(timestamp, sorted_values, quantile) + + return collected_values + + def _reset(self): + self.values = [] + + def _get_metric_sum(self, timestamp, values): + return [ + metric_factory( + self.metric_id + "_sum", + sum(values), + self.measurement_unit, + timestamp, + empty=False if values else True, + labels=self.labels, + ) + ] + + def _get_metric_count(self, timestamp, values): + return [ + metric_factory( + self.metric_id + "_count", + len(values), + self.measurement_unit, + timestamp, + empty=False if values else True, + labels=self.labels, + ) + ] + + def _get_quantile(self, timestamp, values, quantile): + try: + return [ + metric_factory( + self.metric_id, + self._quantile(values, quantile), + self.measurement_unit, + timestamp, + empty=False if values else True, + labels={**self.labels, "quantile": quantile}, + ) + ] + except IndexError: + return [] + + @staticmethod + def _quantile(data, percentile): + """Find the percentile of a list of values. + + Args: + data: A list of values. N must be sorted. + percentile: A float value from 0.0 to 1.0. + Returns: + The percentile of the values. + """ + if len(data): + n = int(round(percentile * len(data))) + return data[n - 1] diff --git a/tamarco/resources/basic/metrics/reporters/__init__.py b/tamarco/resources/basic/metrics/reporters/__init__.py new file mode 100755 index 0000000..2d455bd --- /dev/null +++ b/tamarco/resources/basic/metrics/reporters/__init__.py @@ -0,0 +1,5 @@ +from .carbon import CarbonHandler +from .file import FileHandler +from .stdout import StdoutHandler + +__all__ = ["CarbonHandler", "FileHandler", "StdoutHandler"] diff --git a/tamarco/resources/basic/metrics/reporters/base.py b/tamarco/resources/basic/metrics/reporters/base.py new file mode 100755 index 0000000..6ec3a0b --- /dev/null +++ b/tamarco/resources/basic/metrics/reporters/base.py @@ -0,0 +1,55 @@ +class CarbonBaseHandler: + """Base handler for all the metrics handlers.""" + + def __init__(self, metric_prefix=None): + """Initialize the base handler. + + Args: + metric_prefix (str): Prefix of all metrics. + """ + if metric_prefix is None: + self.metric_prefix = "" + else: + self.metric_prefix = metric_prefix + "." + + def format_metrics(self, meters): + """Format available metrics from all `meters`. + + Args: + meters (list): List of meters from which the metrics will be obtained. + + Returns: + str: A text with a metric per line. + """ + metrics_str = "" + for meter in meters: + metrics = meter._collect_metrics() + for metric in metrics: + parsed_labels = self.parse_label(metric.labels) + metrics_str += ( + f"{self.metric_prefix}{metric.id}_{meter.metric_type}_{parsed_labels}__{metric.units} " + f"{metric.value} {metric.timestamp}\n" + ) + return metrics_str + + @staticmethod + def parse_label(labels): + """Format the labels metric and their values. + + Args: + labels (list): List of metric labels. + + Returns: + str: A string with all the labels and their values separated by dots. + """ + labels_list = [f"{key}_{value}" for key, value in labels.items()] + labels_str = ".".join(labels_list) + return labels_str + + def write(self, metrics): + """The function to override in the handlers for to build the metrics report. + + Args: + metrics (list): List of meters from which the metrics will be obtained. + """ + raise NotImplementedError diff --git a/tamarco/resources/basic/metrics/reporters/carbon.py b/tamarco/resources/basic/metrics/reporters/carbon.py new file mode 100755 index 0000000..7634929 --- /dev/null +++ b/tamarco/resources/basic/metrics/reporters/carbon.py @@ -0,0 +1,42 @@ +import logging +import socket + +from tamarco.resources.basic.metrics.reporters.base import CarbonBaseHandler +from tamarco.resources.basic.metrics.settings import DEFAULT_CARBON_HOST, DEFAULT_CARBON_PORT + +logger = logging.getLogger("tamarco.metrics") + + +class CarbonHandler(CarbonBaseHandler): + """Handler for the applications metrics that sends them to a Graphite Carbon service.""" + + def __init__(self, host=DEFAULT_CARBON_HOST, port=DEFAULT_CARBON_PORT, metric_prefix=None): + """Initialize the Carbon handler. + + Args: + host (str): Carbon host address. + port (int): Carbon port number. + metric_prefix (str): Concatenated prefix in all metrics. + """ + super().__init__(metric_prefix) + self.host = host + self.port = port + self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.socket.connect((self.host, self.port)) + + def write(self, meters): + """Build the metrics report from all meters. + + Args: + meters (list): List of meters from which the metrics will be obtained. + """ + metrics_str = self.format_metrics(meters) + metrics_bytes = metrics_str.encode("utf-8") + try: # IMPROVEME, reconnection and error handling in socket + self.socket.send(metrics_bytes) + except (socket.timeout, socket.error): + try: + self.socket.connect((self.host, self.port)) + self.socket.send(metrics_bytes) + except Exception: + logger.exception("Unexpected exception sending metrics to carbon") diff --git a/tamarco/resources/basic/metrics/reporters/file.py b/tamarco/resources/basic/metrics/reporters/file.py new file mode 100755 index 0000000..f18e013 --- /dev/null +++ b/tamarco/resources/basic/metrics/reporters/file.py @@ -0,0 +1,30 @@ +from tamarco.resources.basic.metrics.reporters.base import CarbonBaseHandler +from tamarco.resources.basic.metrics.settings import DEFAULT_FILE_PATH + + +class FileHandler(CarbonBaseHandler): + """Handler for the applications metrics that store them in a file.""" + + def __init__(self, file_path=DEFAULT_FILE_PATH, metric_prefix=None): + """Initialize the File handler. + + Args: + file_path (str): File path. + metric_prefix (str): Concatenated prefix in all metrics. + """ + super().__init__(metric_prefix) + self.file = open(file_path, "a") + + def __del__(self): + """Close the file when the handler is deleted.""" + if self.file: + self.file.close() + + def write(self, meters): + """Build the metrics report from all meters. + + Args: + meters (list): List of meters from which the metrics will be obtained. + """ + metrics_str = self.format_metrics(meters) + self.file.write(metrics_str) diff --git a/tamarco/resources/basic/metrics/reporters/prometheus.py b/tamarco/resources/basic/metrics/reporters/prometheus.py new file mode 100644 index 0000000..cb069be --- /dev/null +++ b/tamarco/resources/basic/metrics/reporters/prometheus.py @@ -0,0 +1,140 @@ +import logging +from random import choice + +from sanic.response import text + +from tamarco.resources.basic.metrics.reporters.base import CarbonBaseHandler + +logger = logging.getLogger("tamarco.metrics") + + +class PrometheusHandler(CarbonBaseHandler): + """Handler for the applications metrics that sends them to a Prometheus API.""" + + def __init__(self, metric_id_prefix=None): + """Initialize the Carbon handler. + + Args: + metric_id_prefix (str): Concatenated prefix in all metrics. + """ + self.http_body = "# HELP NO METRICS\n" + self.metric_id_prefix = metric_id_prefix if metric_id_prefix else "" + super().__init__() + + async def http_handler(self, request): + """Handler for the Prometheus endpoint. + + Args: + request: HTTP request. + + Returns: + object: Response object with body in text format. + """ + return text(body=self.http_body, status=200) + + def write(self, meters): + """Build the metrics report from all meters. + + Args: + meters (list): List of meters from which the metrics will be obtained. + """ + try: + self.http_body = self.format_metrics(meters) + except Exception: + self.http_body = "# HELP Error collecting metrics" + logger.warning("Unexpected exception formatting metrics in Metrics prometheus handler", exc_info=True) + + def format_metrics(self, meters): + """Format available metrics from all `meters`. + + Args: + meters (list): List of meters from which the metrics will be obtained. + + Returns: + str: A text with a metric per line. + """ + http_body = "" + meters_by_type = {meter.metric_id: meter.metric_type for meter in meters} + for meter_id, meter_type in meters_by_type.items(): + help_and_type_lines = False + meters_with_meter_id = {meter for meter in meters if meter.metric_id == meter_id} + for meter in meters_with_meter_id: + metrics = meter._collect_metrics() + if not help_and_type_lines: + http_body += self.parse_help_line(self.parse_metric_id(meter.metric_id), choice(metrics).units) + http_body += self.parse_type_line(self.parse_metric_id(meter_id), meter_type) + help_and_type_lines = True + http_body += self.parse_metrics(metrics) + return http_body + + @staticmethod + def parse_type_line(metric_id, metric_type): + """Build the Prometheus TYPE line. + + Args: + metric_id (str): Identifier of the metric. + metric_type (str): Type of the metric. + + Returns: + str: Prometheus TYPE line. + """ + return f"# TYPE {metric_id} {metric_type}\n" + + @staticmethod + def parse_help_line(metric_id, meter_unit): + """Build the Prometheus HELP line. + + Args: + metric_id (str): Identifier of the metric. + meter_unit (str): Unit of the metric. + + Returns: + str: Prometheus HELP line. + """ + return f"# HELP {metric_id} units {meter_unit}\n" + + def parse_metric_id(self, metric_id: str): + """Format the identifier of the metric. + + Args: + metric_id (str): Identifier of the metric. + + Returns: + str: formatted metric id. + """ + metric_id_replaced = metric_id.replace(".", "_") + return f"{self.metric_id_prefix}_{metric_id_replaced}" + + def parse_metrics(self, metrics): + """Format all the metrics from the `metrics` list. + + Args: + metrics (list): list of metrics. + + Returns: + str: A text with a metric per line. + """ + parsed_metric = "" + for metric in metrics: + metric_value = metric.value if not metric.empty else "NaN" + label_str = self.parse_labels(metric.labels) + metric_id = self.parse_metric_id(metric.id) + parsed_metric += f"{metric_id}{label_str} {metric_value}\n" + return parsed_metric + + @staticmethod + def parse_labels(labels): + """Format the labels metric and their values. + + Args: + labels (list): List of metric labels. + + Returns: + str: A string with all the labels and their values separated by dots. + """ + if labels: + labels_str_list = [f'{key}="{value}"' for key, value in labels.items()] + labels_str = "{" + ",".join(labels_str_list) + "}" + return labels_str + else: + return "" diff --git a/tamarco/resources/basic/metrics/reporters/stdout.py b/tamarco/resources/basic/metrics/reporters/stdout.py new file mode 100755 index 0000000..de65a1e --- /dev/null +++ b/tamarco/resources/basic/metrics/reporters/stdout.py @@ -0,0 +1,14 @@ +from tamarco.resources.basic.metrics.reporters.base import CarbonBaseHandler + + +class StdoutHandler(CarbonBaseHandler): + """Handler for the applications metrics that send them to the standard output.""" + + def write(self, meters): + """Build the metrics report from all meters. + + Args: + meters (list): List of meters from which the metrics will be obtained. + """ + metrics_str = self.format_metrics(meters) + print(metrics_str) diff --git a/tamarco/resources/basic/metrics/resource.py b/tamarco/resources/basic/metrics/resource.py new file mode 100755 index 0000000..3b4870d --- /dev/null +++ b/tamarco/resources/basic/metrics/resource.py @@ -0,0 +1,174 @@ +import logging +import socket + +from tamarco.core.settings.settings import SettingNotFound +from tamarco.resources.bases import BaseResource +from tamarco.resources.basic.metrics.manager import MetersManager +from tamarco.resources.basic.metrics.reporters.carbon import CarbonHandler +from tamarco.resources.basic.metrics.reporters.file import FileHandler +from tamarco.resources.basic.metrics.reporters.prometheus import PrometheusHandler +from tamarco.resources.basic.metrics.reporters.stdout import StdoutHandler +from tamarco.resources.basic.status.status_codes import StatusCodes +from tamarco.resources.io.http.resource import HTTPServerResource +from .settings import PROMETHEUS_METRICS_HTTP_ENDPOINT + + +class MetricsResource(BaseResource): + """Resource class to handle the applications metrics.""" + + depends_on = ["tamarco_http_report_server"] + loggers_names = ["tamarco.metrics"] + + def __init__(self, *args, **kwargs): + """Initialize the metrics resource. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + """ + super().__init__(*args, **kwargs) + self._metric_prefix = None + self.logger = logging.getLogger("tamarco.metrics") + self.status_codes = StatusCodes + self.http_server_resource = HTTPServerResource() + + @property + def metric_prefix(self): + """Get the metrics prefix as a attribute. + + Returns: + str: Metric prefix. + """ + if not self._metric_prefix: + self._metric_prefix = self._get_metric_prefix() + return self._metric_prefix + + def _get_metric_prefix(self): + """Build the metrics prefix. + + Returns: + str: Metric prefix. + """ + deploy_name = self.microservice.deploy_name + service_name = self.microservice.name + hostname = socket.gethostname() + metrics_prefix = f"{deploy_name}.{service_name}.{hostname}" + return metrics_prefix + + async def _configure_carbon_handler(self): + """Load the Carbon handler configuration settings and adds the handler to the Meters Manager.""" + try: + enabled = await self.settings.get("handlers.carbon.enabled") + except SettingNotFound: + self.logger.warning("Metrics carbon handler cannot be configured because the enabled setting is missing") + else: + if enabled: + try: + host = await self.settings.get("handlers.carbon.host") + port = await self.settings.get("handlers.carbon.port") + except SettingNotFound: + self.logger.warning( + "Metrics carbon handler cannot be configured because the host and/or port " + "settings are missing." + ) + else: + carbon_handler = CarbonHandler(host, port, self.metric_prefix) + MetersManager.add_handler(carbon_handler) + else: + self.logger.info("Metrics carbon handler is disabled") + + async def _configure_file_handler(self): + """Load the File handler configuration settings and adds the handler to the Meters Manager.""" + try: + enabled = await self.settings.get("handlers.file.enabled") + except SettingNotFound: + self.logger.warning("Metrics file handler cannot be configured because the enabled setting is missing") + else: + if enabled: + try: + file_path = await self.settings.get("handlers.file.path") + file_handler = FileHandler(file_path=file_path) + except SettingNotFound: + self.logger.warning("Metrics file handler cannot be configured because the path setting is missing") + else: + MetersManager.add_handler(file_handler) + else: + self.logger.info("Metrics file handler is disabled") + + async def _configure_stdout_handler(self): + """Load the Standard Output handler configuration settings and adds the handler to the Meters Manager.""" + try: + enabled = await self.settings.get("handlers.stdout.enabled") + except SettingNotFound: + self.logger.warning("Metrics stdout handler cannot be configured because the enabled setting is missing") + else: + if enabled: + try: + stdout_prefix = await self.settings.get("handlers.stdout.prefix") + except SettingNotFound: + self.logger.warning( + "Metrics stdout handler cannot be configured because the prefix setting is " "missing" + ) + else: + stdout_handler = StdoutHandler(metric_prefix=stdout_prefix) + MetersManager.add_handler(stdout_handler) + else: + self.logger.info("Metrics stdout handler is disabled") + + async def _configure_prometheus_handler(self): + """Load the Prometheus handler configuration settings and adds the handler to the Meters Manager.""" + try: + enabled = await self.settings.get("handlers.prometheus.enabled") + except SettingNotFound: + self.logger.warning( + "Metrics prometheus handler cannot be configured because the enabled setting is " "missing" + ) + else: + if enabled: + try: + prometheus_handler = PrometheusHandler(metric_id_prefix=self.microservice.name) + MetersManager.add_handler(prometheus_handler) + self.microservice.tamarco_http_report_server.add_endpoint( + uri=PROMETHEUS_METRICS_HTTP_ENDPOINT, endpoint_handler=prometheus_handler.http_handler + ) + except Exception: + self.logger.exception("Unexpected exception configuring the Metrics prometheus handler") + else: + self.logger.info("Metrics prometheus handler is disabled") + + async def _configure_collect_period(self): + """Load the collect period setting and adds it to the Meters Manager.""" + try: + collect_frequency = await self.settings.get("collect_frequency") + except SettingNotFound: + self.logger.warning( + f"Metrics collect frequency is not configured because the collect_frequency setting is " + f"missing. Using the default value: {MetersManager.default_collect_period}" + ) + else: + self.logger.info(f"Metrics collect frequency configured: {collect_frequency} seconds") + MetersManager.configure(config={"collect_period": collect_frequency}) + + async def start(self): + """Configure the metrics available handlers.""" + await super().start() + await self._configure_carbon_handler() + await self._configure_file_handler() + await self._configure_stdout_handler() + await self._configure_collect_period() + await self._configure_prometheus_handler() + MetersManager.start() + + async def stop(self): + """Stop the Metrics Manager.""" + self.logger.info(f"Stopping Metrics resource: {self.name}") + await super().stop() + MetersManager.stop() + + async def status(self): + """Return the resource status code. + + Returns: + dict: Resource status. + """ + return {"status": self._status} diff --git a/tamarco/resources/basic/metrics/settings.py b/tamarco/resources/basic/metrics/settings.py new file mode 100644 index 0000000..9521bc9 --- /dev/null +++ b/tamarco/resources/basic/metrics/settings.py @@ -0,0 +1,8 @@ +PROMETHEUS_METRICS_HTTP_ENDPOINT = "/metrics" + +DEFAULT_SUMMARY_QUANTILES = [0.5, 0.75, 0.9, 0.95, 0.99] + +DEFAULT_FILE_PATH = "/tmp/metrics" + +DEFAULT_CARBON_HOST = "localhost" +DEFAULT_CARBON_PORT = 2003 diff --git a/tamarco/resources/basic/registry/__init__.py b/tamarco/resources/basic/registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tamarco/resources/basic/registry/resource.py b/tamarco/resources/basic/registry/resource.py new file mode 100644 index 0000000..d11df1e --- /dev/null +++ b/tamarco/resources/basic/registry/resource.py @@ -0,0 +1,69 @@ +import asyncio +import logging +import socket + +import aio_etcd + +from tamarco.core.patterns import Singleton +from tamarco.core.utils import get_etcd_configuration_from_environment_variables +from tamarco.resources.bases import BaseResource +from tamarco.resources.basic.status.status_codes import StatusCodes +from .settings import ETCD_DEFAULT_REGISTRY_PATH, ETCD_REGISTRY_PERIOD_SECONDS, ETCD_REGISTRY_TTL_SECONDS + + +class Registry(BaseResource, metaclass=Singleton): + """Class that registers the instance of the microservice in etcd. + This functionality is thought to have auto discovery of microservices using etcd. + """ + + loggers_names = ["tamarco.registry"] + + def __init__(self, *args, **kwargs): + self.etcd_config = get_etcd_configuration_from_environment_variables() + self.etcd_client = None + self.register_task = None + self.logger = logging.getLogger("tamarco.registry") + self.own_ip = socket.gethostbyname(socket.gethostname()) + super().__init__(*args, **kwargs) + + async def post_start(self): + enabled_setting = await self.settings.get("enabled", False) + if enabled_setting: + self.logger.info("Registry resource enabled") + await self.connect_to_etcd() + self.register_task = asyncio.ensure_future(self.register_coroutine(), loop=self.microservice.loop) + else: + self.logger.info("Registry resource disabled") + + async def connect_to_etcd(self): + self.etcd_client = aio_etcd.Client(**self.etcd_config, loop=self.microservice.loop) + + async def register_coroutine(self): + try: + register_path = await self.settings.get("path", ETCD_DEFAULT_REGISTRY_PATH) + register_key = self.get_register_key(register_path) + while True: + await self.register_in_etcd(register_key) + await asyncio.sleep(ETCD_REGISTRY_PERIOD_SECONDS) + except Exception: + self.logger.critical( + "Unexpected exception registering the coroutine. Reporting failed status", exc_info=True + ) + self._status = StatusCodes.FAILED + + def get_register_key(self, register_path): + return f"{register_path}.{self.microservice.name}.{self.microservice.instance_id}".replace(".", "/") + + async def register_in_etcd(self, register_key): + try: + await self.etcd_client.set(key=register_key, value=self.own_ip, ttl=ETCD_REGISTRY_TTL_SECONDS) + except Exception: + self.logger.warning("Unexpected exception registering instance in ETCD", exc_info=True) + + async def stop(self): + self.logger.info(f"Stopping Registry resource: {self.name}") + await super().stop() + + async def post_stop(self): + if self.register_task is not None: + self.register_task.cancel() diff --git a/tamarco/resources/basic/registry/settings.py b/tamarco/resources/basic/registry/settings.py new file mode 100644 index 0000000..5fb4a60 --- /dev/null +++ b/tamarco/resources/basic/registry/settings.py @@ -0,0 +1,4 @@ +ETCD_REGISTRY_TTL_SECONDS = 5 +ETCD_REGISTRY_PERIOD_SECONDS = 2 + +ETCD_DEFAULT_REGISTRY_PATH = "system.discovery" diff --git a/tamarco/resources/basic/status/__init__.py b/tamarco/resources/basic/status/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/resources/basic/status/resource.py b/tamarco/resources/basic/status/resource.py new file mode 100755 index 0000000..7a6b855 --- /dev/null +++ b/tamarco/resources/basic/status/resource.py @@ -0,0 +1,127 @@ +import asyncio +import logging + +from sanic.response import json + +from tamarco.core.patterns import Singleton +from tamarco.resources.bases import BaseResource +from tamarco.resources.basic.status.status_codes import StatusCodes +from .settings import STATUS_HTTP_ENDPOINT + +logger = logging.getLogger("tamarco.status") + +PERIOD_BETWEEN_CHECKS = 1 + + +def get_global_status(status): + """ + Taking into account all the resources states of the microservice: + a. Return HTTP 200 if all the resources are in the status STARTED. + b. Return HTTP 500 if any resource is in the state STOPPED or FAILED. + c. Return HTTP 102 otherwise. + + Args: + status (dict): all resources with a dict where we can get their values status. + + Returns: + int: global status + """ + status_list = [] + status_dict_values = status.values() + for status_values in status_dict_values: + try: + status_list.append(status_values["status"]) + except Exception: + logger.warning("Could not get the status of one particular resource", exc_info=True) + + if all(StatusCodes.STARTED == status for status in status_list): # if status_list is empty => True + return 200 + if any(StatusCodes.STOPPED == status or StatusCodes.FAILED == status for status in status_list): + return 500 + return 102 + + +async def sanic_status_endpoint(request): + response = {} + status = StatusResource() + for name, resource in status.microservice.resources.items(): + response[name] = await resource.status() + status = get_global_status(response) + return json(body=response, status=status) + + +class StatusResource(BaseResource, metaclass=Singleton): + """ + """ + + depends_on = ["tamarco_http_report_server"] + loggers_names = ["tamarco.status"] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.logger = logger + self.status_codes = StatusCodes + self.critical_resources = [] + self.resources_to_restart_on_failure = [] + + async def status(self): + return {"status": self.status_codes.STARTED, "status_str": "STARTED"} + + async def start(self): + await super().start() + self.microservice.tamarco_http_report_server.add_endpoint( + uri=STATUS_HTTP_ENDPOINT, endpoint_handler=sanic_status_endpoint + ) + self.critical_resources = await self.settings.get( + "restart_policy.resources.restart_microservice_on_failure", [] + ) + self.resources_to_restart_on_failure = await self.settings.get( + "restart_policy.resources.restart_resource_on_failure", [] + ) + asyncio.ensure_future(self._check_status_repeatedly()) + + async def stop(self): + self.logger.info(f"Stopping Status resource: {self.name}") + await super().stop() + + async def _check_status_repeatedly(self): + while self._status == self.status_codes.STARTED: + await asyncio.sleep(PERIOD_BETWEEN_CHECKS) + await self._check_status() + + async def _check_status(self): + await self._restart_microservice_on_failure() + await self._restart_resource_on_failure() + + async def _restart_microservice_on_failure(self): + for name, resource in self.microservice.resources.items(): + if name in self.critical_resources or "all" in self.critical_resources: + status_response = await resource.status() + try: + if status_response["status"] == StatusCodes.FAILED: + logger.critical( + f"{name} resource has a failed status. Closing microservice " f"{self.microservice.name}" + ) + await self.microservice.stop_gracefully() + break + except KeyError: + logger.critical( + f"{name} resource has an unknown status. Closing microservice {self.microservice.name}", + exc_info=True, + ) + await self.microservice.stop_gracefully() + break + + async def _restart_resource_on_failure(self): + for name, resource in self.microservice.resources.items(): + if name in self.resources_to_restart_on_failure or "all" in self.resources_to_restart_on_failure: + status_response = await resource.status() + try: + if status_response["status"] == StatusCodes.FAILED: + await resource.stop() + await resource.start() + except KeyError: + logger.error(f"Resource {name} reporting wrong its status") + except Exception: + logger.error(f"Unknown exception, restarting the resource {name}. Closing microservice") + self.microservice.stop_gracefully() diff --git a/tamarco/resources/basic/status/settings.py b/tamarco/resources/basic/status/settings.py new file mode 100644 index 0000000..3c03f78 --- /dev/null +++ b/tamarco/resources/basic/status/settings.py @@ -0,0 +1 @@ +STATUS_HTTP_ENDPOINT = "/status" diff --git a/tamarco/resources/basic/status/status_codes.py b/tamarco/resources/basic/status/status_codes.py new file mode 100755 index 0000000..1ce4cf5 --- /dev/null +++ b/tamarco/resources/basic/status/status_codes.py @@ -0,0 +1,10 @@ +class StatusCodes: + NOT_STARTED = 0 + CONNECTING = 1 + STARTED = 2 + STOPPING = 3 + STOPPED = 4 + FAILED = 5 + + def __new__(cls, *args, **kwargs): + raise NotImplementedError diff --git a/tamarco/resources/debug/__init__.py b/tamarco/resources/debug/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/resources/debug/config.py b/tamarco/resources/debug/config.py new file mode 100644 index 0000000..0f1adc8 --- /dev/null +++ b/tamarco/resources/debug/config.py @@ -0,0 +1 @@ +TIME_BETWEEN_SNAPSHOTS = 5 diff --git a/tamarco/resources/debug/memory_analizer.py b/tamarco/resources/debug/memory_analizer.py new file mode 100755 index 0000000..199840f --- /dev/null +++ b/tamarco/resources/debug/memory_analizer.py @@ -0,0 +1,48 @@ +import asyncio +import datetime +import logging + +import objgraph + +from tamarco.core.patterns import Singleton +from tamarco.resources.bases import BaseResource + +TIME_BETWEEN_SNAPSHOTS = 30 + + +class MemoryAnalyzerResource(BaseResource, metaclass=Singleton): + def __init__(self): + super().__init__() + self.logger = logging.getLogger("tamarco.memory_analyzer") + self.memory_watcher_task = None + + async def start(self): + self.logger.info(f"Starting Memory Analyzer resource") + self.memory_watcher_task = asyncio.ensure_future(self.memory_watcher(), loop=self.microservice.loop) + + async def stop(self): + self.logger.info(f"Stopping Memory Analyzer resource") + self.memory_watcher_task.cancel() + + async def memory_watcher(self): + try: + while True: + await self.objgraph_save() + await asyncio.sleep(TIME_BETWEEN_SNAPSHOTS) + except Exception: + self.logger.warning("Unexpected exception saving objgraph object") + + async def objgraph_save(self): + try: + objgraph_file = open(f"/tmp/{self.microservice.name}_objgraph.log", "a") + except Exception: + self.logger.exception("Unexpected exception opening profile log file") + return + try: + objgraph_file.write(f"\n\n###############\n# DATE : {datetime.datetime.now()}\n###############\n") + objgraph_file.write("############### growth ###############\n") + objgraph.show_growth(limit=50, file=objgraph_file) + except Exception: + self.logger.exception("Unexpected exception writing objgraph information") + finally: + objgraph_file.close() diff --git a/tamarco/resources/debug/pdb.py b/tamarco/resources/debug/pdb.py new file mode 100755 index 0000000..1ba6e83 --- /dev/null +++ b/tamarco/resources/debug/pdb.py @@ -0,0 +1,18 @@ +import pdb +import signal + +from tamarco.core.microservice import Microservice +from tamarco.core.patterns import Singleton +from tamarco.resources.bases import BaseResource + +START_PDB_SIGNAL = signal.SIGUSR1 + + +class PdbResource(BaseResource, metaclass=Singleton): + async def bind(self, microservice: Microservice, name: str) -> None: + super().bind(microservice, name) + microservice.signals_manager.register_signal(START_PDB_SIGNAL, self.start_pdb) + + @staticmethod + def start_pdb(): + pdb.set_trace() diff --git a/tamarco/resources/debug/profiler.py b/tamarco/resources/debug/profiler.py new file mode 100644 index 0000000..1461a6b --- /dev/null +++ b/tamarco/resources/debug/profiler.py @@ -0,0 +1,82 @@ +import asyncio +import datetime +import logging +import pstats +from cProfile import Profile + +from tamarco.core.settings.settings import SettingNotFound +from tamarco.resources.bases import BaseResource +from .config import TIME_BETWEEN_SNAPSHOTS + + +class ProfilerResource(BaseResource): + + loggers_names = ["tamarco.profiler"] + + def __init__(self, *args, **kwargs): + self.logger = logging.getLogger("tamarco.profiler") + self.profiler = None + self.profiler_file_path = None + self.cpu_watcher_task = None + + super().__init__(*args, **kwargs) + + async def start(self): + if await self.is_profiler_enabled(): + self.profiler_file_path = f"/tmp/{self.microservice.name}_profile" + self._initialize_profiler() + self.logger.info( + f"Started Profiler resource with file: {self.profiler_file_path} and " + f"time between snapshots: {TIME_BETWEEN_SNAPSHOTS}" + ) + self.cpu_watcher_task = asyncio.ensure_future(self.cpu_watcher(), loop=self.microservice.loop) + else: + self.logger.debug("Profiler resource disabled") + await super().start() + + async def stop(self): + if self.profiler: + self._stop_profiler() + if self.cpu_watcher_task: + self.cpu_watcher_task.cancel() + await super().stop() + + def _initialize_profiler(self): + self.profiler = Profile() + self.profiler.enable() + + def _stop_profiler(self): + assert self.profiler, "Trying to stop a profiler when it isn't initialized" + self.profiler.disable() + self.profiler = None + + def _restart_profiler(self): + self._stop_profiler() + self._initialize_profiler() + + async def is_profiler_enabled(self) -> bool: + try: + microservices_with_profiler = await self.settings.get("microservices_with_profiler") + except SettingNotFound: + return False + else: + return self.microservice.name in microservices_with_profiler + + async def cpu_watcher(self): + while True: + await asyncio.sleep(TIME_BETWEEN_SNAPSHOTS) + self.save_profile_snapshot_to_file() + + def save_profile_snapshot_to_file(self): + try: + with open(self.profiler_file_path, "a") as profile_file: + self.logger.debug(f"Opened profile file {self.profiler_file_path}. Saving profile information") + profile_file.write(f"\n\n###############\n# DATE : {datetime.datetime.now()}\n###############\n") + + stats = pstats.Stats(self.profiler, stream=profile_file) + stats.sort_stats("tottime") + stats.print_stats(100) + + self._restart_profiler() + except Exception: + self.logger.warning("Unexpected exception saving profile information") diff --git a/tamarco/resources/io/__init__.py b/tamarco/resources/io/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/resources/io/http/__init__.py b/tamarco/resources/io/http/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/resources/io/http/resource.py b/tamarco/resources/io/http/resource.py new file mode 100755 index 0000000..2bebe1b --- /dev/null +++ b/tamarco/resources/io/http/resource.py @@ -0,0 +1,194 @@ +import asyncio +import copy +import logging +from collections import OrderedDict + +import aiohttp +import ujson as json +from cachetools import TTLCache +from sanic import Sanic +from sanic_cors import CORS + +from tamarco.core.settings.settings import SettingNotFound +from tamarco.resources.bases import BaseResource +from tamarco.resources.basic.status.status_codes import StatusCodes + + +class HTTPErrorCacheMiddlewareEnabled(Exception): + pass + + +class HTTPCacheMiddleware: + def __init__(self, maxsize=1_000, ttl=60, header_keys=None): + """ + Args: + maxsize (int): Cache max size. + ttl (int): Time To Live of the keys in the cache. + header_keys (header_keys): List of the headers that will be part of the cache key. + """ + self.cache = TTLCache(maxsize, ttl) + self.maxsize = maxsize + self.ttl = ttl + self.header_keys = header_keys if isinstance(header_keys, list) else [] + + def _get_cache_key(self, request): + headers = self._get_json_headers(request) + return request.url + headers + + async def middleware_request(self, request): + """ + Args: + request (Request): Request to intercept. + """ + cache_key = self._get_cache_key(request) + try: + response = copy.deepcopy(self.cache[cache_key]) + except KeyError: + return + response.headers["x-cache"] = "HIT" + return response + + async def middleware_response(self, request, response): + """ + Args: + request (Request): Request to intercept. + response (HTTPResponse): Response to intercept. + """ + if "x-cache" not in response.headers: + response.headers["x-cache"] = "MISS" + cache_key = self._get_cache_key(request) + self.cache[cache_key] = copy.deepcopy(response) + + def _get_json_headers(self, request): + """ + Args: + request (Request): Request to intercept. + """ + if not self.header_keys: + return "" + + header_dict = {} + for key in self.header_keys: + header_dict[key] = request.headers.get(key) + ordered_dict = OrderedDict(sorted(header_dict.items())) + return json.dumps(ordered_dict) + + +class HTTPServerResource(BaseResource): + depends_on = [] + loggers_names = ["tamarco.http"] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.app = Sanic("http_app", log_config=None) + CORS( + self.app, + automatic_options=True, + origins="*", + supports_credentials=True, + methods=["GET", "POST", "PATCH", "PUT", "DELETE", "HEAD", "OPTIONS"], + allow_headers=[ + "Authorization", + "Content-Type", + "Accept", + "Origin", + "User-Agent", + "DNT", + "Cache-Control", + "X-Mx-ReqToken", + "Keep-Alive", + "X-Requested-With", + "If-Modified-Since", + ], + ) + self.logger = logging.getLogger("tamarco.http") + self._server_task = None + self.status_codes = StatusCodes + self.middleware_cache = HTTPCacheMiddleware() + + def set_cache_middleware(self, maxsize=None, ttl=None, header_keys=None): + if maxsize is not None and maxsize != self.middleware_cache.maxsize: + self.middleware_cache.maxsize = maxsize + + if ttl is not None and ttl != self.middleware_cache.ttl: + self.middleware_cache.ttl = ttl + + if header_keys: + self.middleware_cache.header_keys = header_keys + + self.middleware_cache = HTTPCacheMiddleware( + self.middleware_cache.maxsize, self.middleware_cache.ttl, self.middleware_cache.header_keys + ) + + def enable_cache_middleware(self): + try: + self.app.middleware("request")(self.middleware_cache.middleware_request) + self.app.middleware("response")(self.middleware_cache.middleware_response) + except Exception: + self.logger.warning("Unexpected exception enabling cache in HTTP Server resource", exc_info=True) + raise HTTPErrorCacheMiddlewareEnabled() + + async def start(self): + self.app.config.KEEP_ALIVE = await self.settings.get("keep_alive_connections", False) + try: + self._server_task = asyncio.ensure_future( + self.app.create_server( + host=await self.settings.get("host"), + port=await self.settings.get("port"), + debug=await self.settings.get("debug"), + return_asyncio_server=True, + ), + loop=self.microservice.loop, + ) + + cache_enabled = await self.settings.get("cache_enabled", False) + if cache_enabled: + maxsize = await self.settings.get("cache_maxsize", None) + ttl = await self.settings.get("cache_ttl", None) + header_keys = await self.settings.get("cache_header_keys", None) + self.set_cache_middleware(maxsize, ttl, header_keys) + self.enable_cache_middleware() + except SettingNotFound: + self.logger.error("The HTTP Server resource settings are missing") + await super().start() + + def add_endpoint(self, uri, endpoint_handler): + """ + Args: + uri (str): Uri of the endpoint. + endpoint_handler: Handler of the endpoint. + """ + self.app.route(uri)(endpoint_handler) + + async def stop(self): + self.logger.info(f"Stopping HTTP Server resource: {self.name}") + self._status = StatusCodes.STOPPING + if self._server_task is not None: + self._server_task.cancel() + await super().stop() + + async def status(self): + return {"status": self.status_codes.STARTED} + + +class HTTPClientResource(BaseResource): + depends_on = [] + loggers_names = ["tamarco.http"] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.logger = logging.getLogger("tamarco.http") + self.session = None + self.status_codes = StatusCodes + + async def start(self): + self.session = aiohttp.ClientSession(loop=self.microservice.loop) + await super().start() + + async def stop(self): + self.logger.info(f"Stopping HTTP Client resource: {self.name}") + await self.session.close() + await super().stop() + + async def status(self): + return {"status": self.status_codes.STARTED} diff --git a/tamarco/tamarco b/tamarco/tamarco new file mode 100755 index 0000000..a1a6b61 --- /dev/null +++ b/tamarco/tamarco @@ -0,0 +1,22 @@ +#!/usr/bin/env python + +""" +CLI to manage tamarco stuff for example the scaffolding of a new microservice or the launch of all the ci stuff +""" +import fire + +import tamarco +from tamarco.tools import ci +from tamarco.tools import etcd +from tamarco.tools.project import start_project + +tamarco_client = { + 'ci': ci.main, + 'start_project': start_project.main, + 'etcd': etcd.main, + 'version': lambda: print(f"tamarco version {tamarco.__version__}"), +} + + +if __name__ == "__main__": + fire.Fire(tamarco_client) diff --git a/tamarco/tools/__init__.py b/tamarco/tools/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/tools/ci.py b/tamarco/tools/ci.py new file mode 100755 index 0000000..1eb60ce --- /dev/null +++ b/tamarco/tools/ci.py @@ -0,0 +1,123 @@ +""" +This file is a script that launch all the Continuous Integration tools in the +tamarco project +""" +import os +import subprocess + +PROJECT_ROOT = "." + +REPORT_PATH = os.path.join(PROJECT_ROOT, "reports") + +FLAKE8_REPORT_HTML_DIR = os.path.join(REPORT_PATH, "flake8-html") +FLAKE8_REPORT_FILE = os.path.join(REPORT_PATH, "flake8.report") +SLOCCOUNT_REPORT_FILE = os.path.join(REPORT_PATH, "sloccount.report") +RADON_REPORT_FILE_XML = os.path.join(REPORT_PATH, "ccm.xml") +RADON_REPORT_FILE = os.path.join(REPORT_PATH, "ccm.report") + + +def create_reports_dir(): + subprocess.call(["mkdir", REPORT_PATH]) + + +def write_stdout_to_file(file, stdout): + with open(file, "w") as file: + decoded_stdout = stdout.decode() + file.write(decoded_stdout) + + +class Pytest: + @classmethod + def run(cls): + try: + import pytest + except ImportError: # pragma: no cover + import logging + + logging.getLogger("tamarco.init").error( + "Tamarco don't have support for ci please reinstall tamarco with support for it" + " > pip install tamarco[ci]" + ) + raise + + pytest_args = [f"--junitxml={REPORT_PATH}/junit.xml", "--verbose"] + + pytest.main(pytest_args) + + +class Flake8: + @classmethod + def run(cls, path): + cls.run_flake8_html(path) + cls.run_flake8_file(path) + + @staticmethod + def run_flake8_file(path): + print("Running flake8 file report ...") + flake8_process = subprocess.Popen(["flake8", path], stdout=subprocess.PIPE) + stdout, stderr = flake8_process.communicate() + write_stdout_to_file(FLAKE8_REPORT_FILE, stdout) + + @staticmethod + def run_flake8_html(path): + print("Running flake8 html report ...") + flake_8_html_report_command = f"flake8 --format=html --htmldir={FLAKE8_REPORT_HTML_DIR} {path}".split() + flake8_html_process = subprocess.Popen(flake_8_html_report_command, stdout=subprocess.PIPE) + flake8_html_process.wait() + + +class Sloccount: + @staticmethod + def run(path: str): + have_sloccount = not subprocess.call(["which", "sloccount"]) + if have_sloccount: + print("Running sloccount ...") + sloccount = subprocess.Popen( + ["sloccount", "--duplicates", "--wide", "--details", path], stdout=subprocess.PIPE + ) + stdout, stderr = sloccount.communicate() + write_stdout_to_file(SLOCCOUNT_REPORT_FILE, stdout) + else: + print("sloccount not found in system, install it with your package manager to be able to use it") + + +class Radon: + @classmethod + def run(cls, path: str): + cls.run_radon_txt(path) + cls.run_radon_xml(path) + + @staticmethod + def run_radon_txt(path: str): + print("Running radon for a human readable report ...") + radon = subprocess.Popen(["radon", "cc", "-a", "-i", "project_template", path], stdout=subprocess.PIPE) + stdout, stderr = radon.communicate() + write_stdout_to_file(RADON_REPORT_FILE, stdout) + + @staticmethod + def run_radon_xml(path: str): + print("Running radon for a xml report ...") + radon = subprocess.Popen(["radon", "cc", "-a", "--xml", "-i", "project_template", path], stdout=subprocess.PIPE) + stdout, stderr = radon.communicate() + with open(RADON_REPORT_FILE_XML, "w") as radon_report: + radon_report.write('') + radon_report.write(stdout.decode()) + + +def main( + path: str, no_pytest: bool = False, no_flake8: bool = False, no_sloccount: bool = False, no_radon: bool = False +): + + create_reports_dir() + + if not no_pytest: + Pytest.run() + + if not no_flake8: + Flake8.run(path) + + if not no_sloccount: + Sloccount.run(path) + + if not no_radon: + Radon.run(path) diff --git a/tamarco/tools/etcd.py b/tamarco/tools/etcd.py new file mode 100644 index 0000000..00afe18 --- /dev/null +++ b/tamarco/tools/etcd.py @@ -0,0 +1,52 @@ +from pprint import pprint + +from tamarco.core.settings.utils import parse_dir_response +from tamarco.core.settings.utils.etcd_tool import EtcdTool + + +def write_yml(yml_file, host="127.0.0.1", port=2379): + """Write a yml file to etcd. + + Args: + yml_file (str): Path to the yml file. + host (str): Etcd host. + port (int): Etcd port. + """ + print(f"Writing the yml file {yml_file} in ETCD. Host: {host}. Port: {port}.") + etcd_tool = EtcdTool(host=host, port=port) + etcd_tool.load(yml_file) + print("Write finished correctly.") + + +def read(key, recursive=True, host="127.0.0.1", port=2379): + """Read a file or a directory in etcd. + + Args: + key (str): Key or directory to read. + recursive (bool): Recursive read when the key is a directory. + host (str): Etcd host. + port (int): Etcd port. + """ + print(f"Reading {key} from {host}:{port}") + etcd_tool = EtcdTool(host=host, port=port) + response = etcd_tool.read(key=key, recursive=recursive) + parsed_response = parse_dir_response(response, key) + pprint(parsed_response) + + +def delete(key, recursive=True, host="127.0.0.1", port=2379): + """Delete a file or a directory in etcd. + + Args: + key (str): Key or directory to delete. + recursive (bool): Recursive delete when the key is a directory. + host (str): Etcd host. + port (int): Etcd port. + """ + print(f"Deleting {key} in {host}:{port}") + etcd_tool = EtcdTool(host=host, port=port) + etcd_tool.delete(key=key, recursive=recursive) + print("Delete finished correctly.") + + +main = {"write_yml": write_yml, "read": read, "delete": delete} diff --git a/tamarco/tools/project/project_template/cookiecutter.json b/tamarco/tools/project/project_template/cookiecutter.json new file mode 100755 index 0000000..8cf2d2c --- /dev/null +++ b/tamarco/tools/project/project_template/cookiecutter.json @@ -0,0 +1,8 @@ +{ + "project_name": "tamarco_microservice", + "project_slug": "my_microservice", + "full_name": "tamarco user", + "email": "myemail@myemail.com", + "version": "0.1.0", + "project_short_description": "My cool microservice" +} \ No newline at end of file diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.coveragerc b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.coveragerc new file mode 100755 index 0000000..17b9963 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.coveragerc @@ -0,0 +1,35 @@ +# .coveragerc to control coverage.py +[run] +branch = True +omit = *virtualenv* + *docs* + *test* + *settings* + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + raise NotImplemented + pass + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +ignore_errors = True + +[xml] +output = reports/coverage.xml + +[html] +directory = reports/coverage_html diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.gitignore b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.gitignore new file mode 100755 index 0000000..4635b7e --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/.gitignore @@ -0,0 +1,70 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +reports + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# pyenv python configuration file +.python-version + +# Virtual environments +virtualenv +venv + +# Pycharm +.idea diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/Dockerfile b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/Dockerfile new file mode 100755 index 0000000..f5c9848 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/Dockerfile @@ -0,0 +1,15 @@ +# {{ cookiecutter.project_name }} + +FROM python:3.6.0 + +ADD . /opt/{{ cookiecutter.project_slug }} + +RUN mkdir -p /var/log/{{ cookiecutter.project_slug }} + +RUN apt-get -qq update \ + && pip3 install -r /opt/{{ cookiecutter.project_slug }}/requirements.txt \ + && apt-get -qq autoremove \ + && apt-get autoclean \ + && rm -rf /var/lib/apt/lists/* + +CMD python3 /opt/{{ cookiecutter.project_slug }}/app.py diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/HISTORY.md b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/HISTORY.md new file mode 100755 index 0000000..0899282 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/HISTORY.md @@ -0,0 +1,6 @@ +# History + + +## {{ cookiecutter.version }} ({% now 'local' %}) + +* First release. diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/README.md b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/README.md new file mode 100755 index 0000000..4fff553 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/README.md @@ -0,0 +1,12 @@ + +# {{ cookiecutter.project_name }} + +{{ cookiecutter.project_short_description }} + +## Features + + +* TODO + +## How to use + diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/app.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/app.py new file mode 100755 index 0000000..6253767 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/app.py @@ -0,0 +1,5 @@ +from {{ cookiecutter.project_slug }}.microservice import main + + +if __name__ == '__main__': + main() diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/Makefile b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/Makefile new file mode 100755 index 0000000..83dad38 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/authors.rst b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/authors.rst new file mode 100755 index 0000000..06770ce --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/authors.rst @@ -0,0 +1,13 @@ +======= +Credits +======= + +Development Lead +---------------- + +* {{ cookiecutter.full_name }} <{{ cookiecutter.email }}> + +Contributors +------------ + +None yet. Why not be the first? diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/conf.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/conf.py new file mode 100755 index 0000000..79fd2e4 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/conf.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# {{ cookiecutter.project_slug }} documentation build configuration file, created by +# sphinx-quickstart on Tue Jul 9 22:26:36 2013. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import sys + +# If extensions (or modules to document with autodoc) are in another +# directory, add these directories to sys.path here. If the directory is +# relative to the documentation root, use os.path.abspath to make it +# absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# Get the project root dir, which is the parent dir of this +cwd = os.getcwd() +project_root = os.path.dirname(cwd) + +# Insert the project root dir as the first element in the PYTHONPATH. +# This lets us ensure that the source package is imported, and that its +# version is used. +sys.path.insert(0, project_root) + +import {{ cookiecutter.project_slug }} + +# -- General configuration --------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'mkdsupport'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = ['.rst', ".md"] + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'{{ cookiecutter.project_name }}' +copyright = u"{% now 'local', '%Y' %}, {{ cookiecutter.full_name }}" + +# The version info for the project you're documenting, acts as replacement +# for |version| and |release|, also used in various other places throughout +# the built documents. +# +# The short X.Y version. +version = {{ cookiecutter.project_slug }}.__version__ +# The full version, including alpha/beta/rc tags. +release = {{ cookiecutter.project_slug }}.__version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to +# some non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built +# documents. +#keep_warnings = False + + +# -- Options for HTML output ------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a +# theme further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as +# html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the +# top of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon +# of the docs. This file should be a Windows icon file (.ico) being +# 16x16 or 32x32 pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) +# here, relative to this directory. They are copied after the builtin +# static files, so a file named "default.css" will overwrite the builtin +# "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names +# to template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. +# Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. +# Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages +# will contain a tag referring to it. The value of this option +# must be the base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = '{{ cookiecutter.project_slug }}doc' + + +# -- Options for LaTeX output ------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto/manual]). +latex_documents = [ + ('index', '{{ cookiecutter.project_slug }}.tex', + u'{{ cookiecutter.project_name }} Documentation', + u'{{ cookiecutter.full_name }}', 'manual'), +] + +# The name of an image file (relative to this directory) to place at +# the top of the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings +# are parts, not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output ------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', '{{ cookiecutter.project_slug }}', + u'{{ cookiecutter.project_name }} Documentation', + [u'{{ cookiecutter.full_name }}'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ---------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', '{{ cookiecutter.project_slug }}', + u'{{ cookiecutter.project_name }} Documentation', + u'{{ cookiecutter.full_name }}', + '{{ cookiecutter.project_slug }}', + 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/history.rst b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/history.rst new file mode 100755 index 0000000..2506499 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/history.rst @@ -0,0 +1 @@ +.. include:: ../HISTORY.rst diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/index.rst b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/index.rst new file mode 100755 index 0000000..5835f34 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/index.rst @@ -0,0 +1,20 @@ +Welcome to {{ cookiecutter.project_name }}'s documentation! +====================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + ../README.md + installation + usage + authors + ../HISTORY.md + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/installation.rst b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/installation.rst new file mode 100755 index 0000000..f96f5a1 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/installation.rst @@ -0,0 +1,14 @@ +.. highlight:: shell + +============ +Installation +============ + + +Stable release +-------------- + + + +From sources +------------ diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/make.bat b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/make.bat new file mode 100755 index 0000000..6746ac8 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/mkdsupport.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/mkdsupport.py new file mode 100755 index 0000000..445ef62 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/mkdsupport.py @@ -0,0 +1,43 @@ +"""An Sphinx extension supporting for sphinx.ext.autodoc with modules containing docstrings in Markdown +""" + +import pypandoc +from recommonmark.transform import AutoStructify + +# Since pypandoc.convert_text will always return strings ended with \r\n, the separator should also set to it +SEP = u'\r\n' + + +def setup(app): + """Add extension's default value and set new function to ```autodoc-process-docstring``` event""" + + # The 'rebuild' parameter should set as 'html' rather than 'env' since this extension needs a full rebuild of HTML + # document + app.add_config_value('mkdsupport_use_parser', 'markdown_github', 'html') + app.connect('autodoc-process-docstring', pandoc_process) + + # for mkd files extended directives for rst transformation + # http://recommonmark.readthedocs.io/en/latest/auto_structify.html#autostructify-component + app.add_transform(AutoStructify) + + +def pandoc_process(app, what, name, obj, options, lines): + """"Convert docstrings in Markdown into reStructureText using pandoc + """ + + if not lines: + return None + if lines[0] != "mkd": + return + + input_format = app.config.mkdsupport_use_parser + output_format = 'rst' + + # Since default encoding for sphinx.ext.autodoc is unicode and pypandoc.convert_text, which will always return a + # unicode string, expects unicode or utf-8 encodes string, there is on need for dealing with coding + text = SEP.join(lines) + text = pypandoc.convert_text(text, output_format, format=input_format) + + # The 'lines' in Sphinx is a list of strings and the value should be changed + del lines[:] + lines.extend(text.split(SEP)) diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/readme.rst b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/readme.rst new file mode 100755 index 0000000..72a3355 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/readme.rst @@ -0,0 +1 @@ +.. include:: ../README.rst diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/usage.rst b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/usage.rst new file mode 100755 index 0000000..4add6fb --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/docs/usage.rst @@ -0,0 +1,7 @@ +===== +Usage +===== + +To use {{ cookiecutter.project_name }} in a project:: + + import {{ cookiecutter.project_slug }} diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/requirements.txt b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/requirements.txt new file mode 100644 index 0000000..4b73cbf --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/requirements.txt @@ -0,0 +1 @@ +tamarco \ No newline at end of file diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/setup.cfg b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/setup.cfg new file mode 100755 index 0000000..18c65d9 --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/setup.cfg @@ -0,0 +1,26 @@ +[flake8] +exclude = + .git, + dist, + virtualenv/*, + __pycache__, + env, + docs, + tests, + examples, + contrib, + pycharm, + reports, + setup.py +max-complexity = 10 +statistics = True +show-source = True +max-line-length = 120 +count = True + +[tool:pytest] +python_files=*.py +norecursedirs=virtualenv reports docs requirements +addopts = --junitxml=./reports/junit.xml + --verbose + --capture=no diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/__init__.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/conftest.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/conftest.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/custom_settings/settings.yml b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/custom_settings/settings.yml new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/functional/__init__.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/functional/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/unit/__init__.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/tests/unit/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/__init__.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/__init__.py new file mode 100755 index 0000000..f102a9c --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/logic/__init__.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/logic/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/meters.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/meters.py new file mode 100755 index 0000000..e69de29 diff --git a/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/microservice.py b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/microservice.py new file mode 100755 index 0000000..436201d --- /dev/null +++ b/tamarco/tools/project/project_template/{{ cookiecutter.project_name }}/{{ cookiecutter.project_slug }}/microservice.py @@ -0,0 +1,10 @@ +from tamarco.core.microservice import Microservice + + +class MyMicroservice(Microservice): + name = "{{cookiecutter.project_slug}}" + + +def main(): + ms = MyMicroservice() + ms.run() diff --git a/tamarco/tools/project/start_project.py b/tamarco/tools/project/start_project.py new file mode 100755 index 0000000..5a95296 --- /dev/null +++ b/tamarco/tools/project/start_project.py @@ -0,0 +1,11 @@ +import os + +from cookiecutter.main import cookiecutter + + +def main(directory: str = "."): + print(f"Path of the new microservice project: {directory}") + self_path = os.path.dirname(os.path.realpath(__file__)) + project_template_path = os.path.join(self_path, "project_template") + print(f"Project template path: {project_template_path}") + cookiecutter(template=project_template_path, output_dir=directory) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100755 index 0000000..bf98abc --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,69 @@ +import asyncio +import functools +import logging +import os + +import pytest +import yaml + +from tamarco.core.patterns import Singleton + +ETCD_CONFIGURATION = {"host": "127.0.0.1", "port": 2379} +SETTINGS_FILE_PATH = "tests/custom_settings/settings.yml" + + +@pytest.fixture +def tests_root_path(): + return os.path.split(os.path.abspath(__file__))[0] + + +@pytest.fixture +def project_root_path(tests_root_path): + return os.path.split(tests_root_path)[0] + + +@pytest.fixture +def inject_in_env_settings_file_path(): + os.environ["TAMARCO_YML_FILE"] = SETTINGS_FILE_PATH + yield + os.environ["TAMARCO_YML_FILE"] = "" + + +@pytest.fixture +def add_logging(): + logger = logging.getLogger("s73_logging") + logger.addHandler(logging.StreamHandler()) + logger.setLevel(logging.DEBUG) + yield + logger.handlers = [] + + +@pytest.fixture +def settings(): + return get_settings_from_yaml_file(SETTINGS_FILE_PATH) + + +def get_settings_from_yaml_file(settings_path: str): + with open(settings_path, "r") as stream: + yaml_file = yaml.full_load(stream) + return yaml_file + + +def clear_singleton_instance(instance_name): + for instance in Singleton._instances.copy(): + try: + if instance.name == instance_name: + del Singleton._instances[instance] + except Exception: + pass + + +def decorator_async(func): + @functools.wraps(func) + def wrap_func(*args, **kwargs): + async def b(): + return func(*args, **kwargs) + + return asyncio.get_event_loop().run_until_complete(b()) + + return wrap_func diff --git a/tests/custom_settings/settings.yml b/tests/custom_settings/settings.yml new file mode 100755 index 0000000..66176c3 --- /dev/null +++ b/tests/custom_settings/settings.yml @@ -0,0 +1,34 @@ +system: + deploy_name: test_tamarco + system_configuration: + profile: DEVELOP + logging: + profile: DEVELOP + file_path: false + stdout: true + redis: + enabled: false + host: "127.0.0.1" + port: 7006 + password: '' + ssl: false + microservices: + test: + logging: + profile: DEVELOP + file: false + stdout: true + resources: + metrics: + handlers: + file: + path: /tmp/metrics + collect_frequency: 1 + status: + host: 127.0.0.1 + port: 5747 + debug: False + http: + host: 127.0.0.1 + port: 5747 + debug: false \ No newline at end of file diff --git a/tests/functional/__init__.py b/tests/functional/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py new file mode 100644 index 0000000..8ea3846 --- /dev/null +++ b/tests/functional/conftest.py @@ -0,0 +1,41 @@ +import subprocess +from time import sleep + +import pytest + +from tamarco.tools.etcd import write_yml +from ..conftest import SETTINGS_FILE_PATH + + +def local_command(command): + print(f"\nLaunching command: {command}") + process = subprocess.Popen(command, shell=True) + return_code = process.wait() + return process, return_code + + +def docker_compose_up(): + print("Bringing up the docker compose") + command = f"docker-compose up -d" + _, return_code = local_command(command) + if return_code != 0: + pytest.fail(msg="Failed setting up the containers.") + + +def docker_compose_down(): + print("Removing all containers") + command = f"docker-compose kill && docker-compose down" + _, return_code = local_command(command) + if return_code != 0: + print("Warning: Error stopping all the containers.") + else: + print("Removed docker containers.") + + +@pytest.fixture(scope="session", autouse=True) +def docker_compose(): + docker_compose_up() + sleep(10) + write_yml(SETTINGS_FILE_PATH) + yield + docker_compose_down() diff --git a/tests/functional/core/__init__.py b/tests/functional/core/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/core/settings/__init__.py b/tests/functional/core/settings/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/core/settings/conftest.py b/tests/functional/core/settings/conftest.py new file mode 100755 index 0000000..66cde75 --- /dev/null +++ b/tests/functional/core/settings/conftest.py @@ -0,0 +1,29 @@ +import os + +import pytest + +from tamarco.core.settings.backends import EtcdSettingsBackend +from tests.conftest import ETCD_CONFIGURATION +from tests.conftest import get_settings_from_yaml_file + +TEST_PATH = os.path.realpath(os.path.dirname(__file__)) + +settings_backends = [(EtcdSettingsBackend, [ETCD_CONFIGURATION])] + + +@pytest.fixture +def loaded_test_settings(event_loop): + yaml_settings = get_settings_from_yaml_file(os.path.join(TEST_PATH, "files/settings.yaml")) + + settings = EtcdSettingsBackend({"host": "127.0.0.1", "port": 2379}) + + async def set_settings(): + for key, value in yaml_settings.items(): + try: + await settings.set(key, value) + except Exception: + # Exception configuring yaml in etcd: {yaml_settings} + pass + + event_loop.run_until_complete(set_settings()) + return settings diff --git a/tests/functional/core/settings/files/__init__.py b/tests/functional/core/settings/files/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/core/settings/files/settings.json b/tests/functional/core/settings/files/settings.json new file mode 100755 index 0000000..1a5d053 --- /dev/null +++ b/tests/functional/core/settings/files/settings.json @@ -0,0 +1,24 @@ +{ + "redis": { + "port": 7006, + "host": "127.0.0.1" + }, + "elasticsearch": [ + "127.0.0.1:9300", + "127.0.0.1:9200" + ], + "rabbitmq": { + "host": "127.0.0.1", + "port": 5677 + }, + "signaler": { + "redis": { + "port": 7006, + "host": "127.0.0.1" + }, + "rabbitmq": { + "host": "127.0.0.1", + "port": 5677 + } + } +} diff --git a/tests/functional/core/settings/files/settings.py b/tests/functional/core/settings/files/settings.py new file mode 100755 index 0000000..707eb58 --- /dev/null +++ b/tests/functional/core/settings/files/settings.py @@ -0,0 +1,7 @@ +redis = {"port": 7006, "host": "127.0.0.1"} + +elasticsearch = ["127.0.0.1:9200", "127.0.0.1:9300"] + +rabbitmq = {"host": "127.0.0.1", "port": 5677} + +signaler = {"rabbitmq": {"host": "127.0.0.1", "port": 5677}, "redis": {"port": 7006, "host": "127.0.0.1"}} diff --git a/tests/functional/core/settings/files/settings.yaml b/tests/functional/core/settings/files/settings.yaml new file mode 100755 index 0000000..f81eff9 --- /dev/null +++ b/tests/functional/core/settings/files/settings.yaml @@ -0,0 +1,16 @@ +redis: + port: 7006 + host: 127.0.0.1 +elasticsearch: + - 127.0.0.1:9300 + - 127.0.0.1:9200 +rabbitmq: + host: 127.0.0.1 + port: 5677 +signaler: + redis: + port: 7006 + host: 127.0.0.1 + rabbitmq: + host: 127.0.0.1 + port: 5677 diff --git a/tests/functional/core/settings/test_backends.py b/tests/functional/core/settings/test_backends.py new file mode 100755 index 0000000..52ff3b8 --- /dev/null +++ b/tests/functional/core/settings/test_backends.py @@ -0,0 +1,115 @@ +import asyncio + +import pytest + +from tamarco.core.settings.backends import EtcdSettingsBackend +from tests.functional.core.settings.conftest import settings_backends + + +@pytest.mark.asyncio +@pytest.mark.parametrize("settings_class,init_args", settings_backends) +async def test_settings_get(settings_class, init_args, event_loop, loaded_test_settings): + settings = settings_class(*init_args) + redis_port = await settings.get("redis.port") + assert redis_port == 7006 + + redis_host = await settings.get("redis.host") + assert redis_host == "127.0.0.1" + + redis_conf = await settings.get("redis") + assert redis_conf["host"] == "127.0.0.1" + + elasticsearch = await settings.get("elasticsearch") + assert len(elasticsearch) == 2 + assert all("127.0.0.1" in x for x in elasticsearch) + + not_a_key = await settings.get("not_a_key", default=99) + assert not_a_key == 99 + + +@pytest.mark.asyncio +@pytest.mark.parametrize("settings_class,init_args", settings_backends) +async def test_settings_get_wrong_key(settings_class, init_args, event_loop): + settings = settings_class(loop=event_loop, *init_args) + with pytest.raises(KeyError): + await settings.get("non_existing_key") + + +@pytest.mark.asyncio +@pytest.mark.parametrize("settings_class,init_args", settings_backends) +async def test_settings_get_multiple_dict_level(settings_class, init_args, event_loop, loaded_test_settings): + settings = settings_class(loop=event_loop, *init_args) + + signaler = await settings.get("signaler") + + assert signaler["redis"]["port"] == 7006 + + +@pytest.mark.asyncio +@pytest.mark.parametrize("settings_class,init_args", settings_backends) +async def test_settings_set(settings_class, init_args, event_loop, add_logging): + settings = settings_class(loop=event_loop, *init_args) + await settings.set("redis.port", 9999) + assert (await settings.get("redis.port")) == 9999 + + await settings.set("redis.host", "127.0.0.2") + assert (await settings.get("redis.host")) == "127.0.0.2" + + await settings.set("logging.test.loggers", "StreamLogger") + assert (await settings.get("logging.test.loggers")) == "StreamLogger" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("settings_class,init_args", settings_backends) +async def test_settings_delete(settings_class, init_args, event_loop, loaded_test_settings): + settings = settings_class(loop=event_loop, *init_args) + await settings.delete("redis.port") + with pytest.raises(KeyError): + await settings.get("redis.port") + + await settings.delete("elasticsearch") + with pytest.raises(KeyError): + await settings.get("elasticsearch") + + +@pytest.mark.asyncio +@pytest.mark.parametrize("settings_class,init_args", settings_backends) +async def test_settings_watch_set(settings_class, init_args, event_loop): + settings = settings_class(loop=event_loop, *init_args) + + fut = asyncio.Future() + + async def callback(key, value): + fut.set_result((key, value)) + + await settings.set("redis.port", 8888) + await settings.watch("redis.port", callback) + await asyncio.sleep(0.1) + await settings.set("redis.port", 9999) + key, value = await fut + assert key == "redis.port" + assert value == 9999 + + +@pytest.mark.asyncio +@pytest.mark.parametrize("settings_class,init_args", settings_backends) +async def test_settings_watch_delete(settings_class, init_args, event_loop): + settings = settings_class(loop=event_loop, *init_args) + + fut = asyncio.Future() + + async def callback(key, value): + fut.set_result((key, value)) + + await settings.watch("redis.port", callback) + await asyncio.sleep(0.1) + await settings.delete("redis.port") + key, value = await fut + assert key == "redis.port" + assert value is None + + +@pytest.mark.asyncio +async def test_etcd_settings_check_machines(): + backend = EtcdSettingsBackend({"host": "127.0.0.1", "port": 2379}) + assert await backend._check_servers() diff --git a/tests/functional/core/settings/utils/__init__.py b/tests/functional/core/settings/utils/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/core/settings/utils/settings.yml b/tests/functional/core/settings/utils/settings.yml new file mode 100644 index 0000000..16379f3 --- /dev/null +++ b/tests/functional/core/settings/utils/settings.yml @@ -0,0 +1,24 @@ +key_1: 1000 +key_2: "1000" +key_3: 1000.0 +key_4: !!float 1000 +key_5: !!str 1000 +key_6: !!str Yes +key_7: Yes +one_level: + key_1: 1000 + key_2: "1000" + key_3: 1000.0 + key_4: !!float 1000 + key_5: !!str 1000 + key_6: !!str Yes + key_7: Yes +two_levels: + intermediate_level: + key_1: 1000 + key_2: "1000" + key_3: 1000.0 + key_4: !!float 1000 + key_5: !!str 1000 + key_6: !!str Yes + key_7: Yes \ No newline at end of file diff --git a/tests/functional/core/settings/utils/test_etcd_tool.py b/tests/functional/core/settings/utils/test_etcd_tool.py new file mode 100644 index 0000000..34889ac --- /dev/null +++ b/tests/functional/core/settings/utils/test_etcd_tool.py @@ -0,0 +1,73 @@ +from os import path + +from tamarco.core.settings.utils.etcd_tool import EtcdTool + + +def test_add_key(): + response_dict = {} + + client = EtcdTool(host="127.0.0.1") + client.write(key="/added_key", value="added value") + + response = client.read("/", recursive=True) + + for result in response.children: + response_dict[result.key] = result.value + + assert response_dict["/added_key"] == "added value" + + +def test_delete_key(): + response_dict = {} + client = EtcdTool(host="127.0.0.1") + + client.write(key="/added_key_1", value="added value 1") + client.write(key="/added_key_2", value="added value 2") + + client.delete(key="/added_key_1") + + response = client.read("/", recursive=True) + + for result in response.children: + response_dict[result.key] = result.value + + assert "/added_key_1" not in response_dict + assert response_dict["/added_key_2"] == "added value 2" + + +def test_load_file(): + response_dict = {} + test_folder = path.abspath(path.join(__file__, "..")) + setting_file = "settings.yml" + settings = test_folder + "/" + setting_file + client = EtcdTool(host="127.0.0.1") + client.load(settings) + + response = client.read("/", recursive=True) + + for result in response.children: + response_dict[result.key] = result.value + + assert response_dict["/key_1"] == "1000" + assert response_dict["/key_2"] == "1000" + assert response_dict["/key_3"] == "1000.0" + assert response_dict["/key_4"] == "1000.0" + assert response_dict["/key_5"] == "1000" + assert response_dict["/key_6"] == "Yes" + assert response_dict["/key_7"] == "True" + + assert response_dict["/one_level/key_1"] == "1000" + assert response_dict["/one_level/key_2"] == "1000" + assert response_dict["/one_level/key_3"] == "1000.0" + assert response_dict["/one_level/key_4"] == "1000.0" + assert response_dict["/one_level/key_5"] == "1000" + assert response_dict["/one_level/key_6"] == "Yes" + assert response_dict["/one_level/key_7"] == "True" + + assert response_dict["/two_levels/intermediate_level/key_1"] == "1000" + assert response_dict["/two_levels/intermediate_level/key_2"] == "1000" + assert response_dict["/two_levels/intermediate_level/key_3"] == "1000.0" + assert response_dict["/two_levels/intermediate_level/key_4"] == "1000.0" + assert response_dict["/two_levels/intermediate_level/key_5"] == "1000" + assert response_dict["/two_levels/intermediate_level/key_6"] == "Yes" + assert response_dict["/two_levels/intermediate_level/key_7"] == "True" diff --git a/tests/functional/core/test_logging.py b/tests/functional/core/test_logging.py new file mode 100644 index 0000000..f457e48 --- /dev/null +++ b/tests/functional/core/test_logging.py @@ -0,0 +1,39 @@ +import asyncio + +import pytest +import redis +import ujson + +from tamarco.core.logging.logging import Logging +from tamarco.core.settings.settings import Settings, SettingsView +from tamarco.core.utils import ROOT_SETTINGS + + +@pytest.mark.asyncio +async def test_logging_elastic_search(event_loop): + settings = SettingsView(Settings(), f"{ROOT_SETTINGS}.logging") + loggings = Logging() + await settings.set("redis.port", 7006) + await settings.set("redis.host", "127.0.0.1") + await settings.set("profile", "DEVELOP") + await settings.set(f"{ROOT_SETTINGS}.deploy_name", "functional_logstash", raw=True) + loggings.configure_settings(settings) + await asyncio.sleep(5.0) + await loggings.start( + ["test_funcional_logstash_logging"], "test_functional_logstash", "test_deploy_name", loop=event_loop + ) + r = redis.StrictRedis(host="127.0.0.1", port=7006) + await asyncio.sleep(5.0) + messages = [] + messages_check = [] + + for _ in range(20): + msg = f"Hello world {_}" + messages.append(msg) + + q = r.rpop("logstash") + while q is not None: + messages_check.append(ujson.loads(q.decode())["@message"]) + q = r.rpop("logstash") + + assert 0 <= len(messages_check) diff --git a/tests/functional/resources/__init__.py b/tests/functional/resources/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/resources/basic/__init__.py b/tests/functional/resources/basic/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/resources/basic/metrics/__init__.py b/tests/functional/resources/basic/metrics/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/resources/basic/metrics/test_metrics.py b/tests/functional/resources/basic/metrics/test_metrics.py new file mode 100755 index 0000000..0865a67 --- /dev/null +++ b/tests/functional/resources/basic/metrics/test_metrics.py @@ -0,0 +1,48 @@ +import socket +import time +from unittest import mock + +import pytest + +from tamarco.resources.basic.metrics import MetersManager +from tamarco.resources.basic.metrics.meters import Counter, Gauge +from tamarco.resources.basic.metrics.reporters import CarbonHandler, FileHandler, StdoutHandler + + +@pytest.fixture +def meters_manager(): + """ This fixture is needed because if the metrics tests fails and exit before + the MetersManagers call to its stop() function, the coverage process will never stop + (the MetersManagers have to kill their threads). """ + + yield + + MetersManager.stop() + + +def test_metrics(meters_manager): + with mock.patch("socket.socket") as socket_mock, mock.patch( + "tamarco.resources.basic.metrics.reporters.file.open", mock.mock_open(), create=True + ) as open_mock: + MetersManager.add_handler(CarbonHandler("127.0.0.1", 2003, metric_prefix=socket.gethostname())) + MetersManager.add_handler(FileHandler("/tmp/metrics")) + MetersManager.add_handler(StdoutHandler(metric_prefix="metric")) + MetersManager.configure({"collect_period": 1}) + MetersManager.start() + + gauge_test = Gauge("critical.path.function.time", "seconds") + + @Counter("count.things", "things") + def count_one(): + pass + + @gauge_test.timeit() + def wait_one_ms(): + time.sleep(0.1) + + for _ in range(0, 20): + count_one() + wait_one_ms() + + socket_mock.assert_called() + open_mock.assert_called() diff --git a/tests/functional/resources/basic/registry/__init__.py b/tests/functional/resources/basic/registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/functional/resources/basic/registry/test_resource.py b/tests/functional/resources/basic/registry/test_resource.py new file mode 100644 index 0000000..0ec6825 --- /dev/null +++ b/tests/functional/resources/basic/registry/test_resource.py @@ -0,0 +1,19 @@ +import aio_etcd +import pytest + +from tamarco.resources.basic.registry.resource import Registry +from tests.conftest import ETCD_CONFIGURATION + + +@pytest.fixture +def etcd_client(event_loop): + return aio_etcd.Client(**ETCD_CONFIGURATION, loop=event_loop) + + +@pytest.mark.asyncio +async def test_register_in_etcd(etcd_client): + registry = Registry() + registry.etcd_client = etcd_client + await registry.register_in_etcd("test_registry") + get = await etcd_client.get("test_registry") + assert get.value == registry.own_ip diff --git a/tests/functional/resources/basic/status/__init__.py b/tests/functional/resources/basic/status/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/resources/basic/status/conftest.py b/tests/functional/resources/basic/status/conftest.py new file mode 100644 index 0000000..0c6ba38 --- /dev/null +++ b/tests/functional/resources/basic/status/conftest.py @@ -0,0 +1,69 @@ +import pytest + +from tamarco.core.microservice import Microservice, MicroserviceContext +from tamarco.resources.basic.metrics.collector import CollectorThread +from tamarco.resources.basic.metrics.manager import MetersManager +from tamarco.resources.basic.status.resource import StatusResource +from tamarco.resources.io.http.resource import HTTPClientResource, HTTPServerResource + + +class StatusContext(MicroserviceContext): + name = "test" + status = StatusResource() + client = HTTPClientResource() + tamarco_http_report_server = HTTPServerResource() + + def __init__(self): + super().__init__() + self.settings.update_internal( + { + "system": { + "deploy_name": "test", + "logging": {"profile": "DEVELOP", "stdout": True}, + "resources": {"tamarco_http_report_server": {"host": "127.0.0.1", "port": 5747, "debug": True}}, + } + } + ) + + +class StatusMicroservice(Microservice): + name = "test" + http_server = HTTPServerResource() + + def __init__(self): + super().__init__() + self.settings.update_internal( + { + "system": { + "deploy_name": "test", + "logging": {"profile": "DEVELOP", "stdout": True}, + "resources": {"http_server": {"host": "127.0.0.1", "port": 8080, "debug": True}}, + } + } + ) + + +@pytest.fixture +def clean_meter_manager(): + MetersManager.thread = CollectorThread() + + +@pytest.fixture +def status_context(event_loop): + StatusContext.loop = event_loop + status_context = StatusContext() + + yield status_context + + event_loop.run_until_complete(status_context.stop()) + + +@pytest.fixture +def status_microservice(event_loop): + StatusMicroservice.loop = event_loop + status_microservice = StatusMicroservice() + + yield status_microservice + + event_loop.run_until_complete(status_microservice.stop()) + event_loop.run_until_complete(status_microservice.post_stop()) diff --git a/tests/functional/resources/basic/status/test_resource.py b/tests/functional/resources/basic/status/test_resource.py new file mode 100755 index 0000000..da57541 --- /dev/null +++ b/tests/functional/resources/basic/status/test_resource.py @@ -0,0 +1,40 @@ +import asyncio +from unittest import mock + +import pytest + +from tamarco.resources.basic.status.status_codes import StatusCodes +from tests.utils import AsyncMock + + +@pytest.mark.asyncio +async def test_status_resource(status_context): + await status_context.start() + await asyncio.sleep(1) + url = "http://127.0.0.1:5747/status" + + async with status_context.client.session.get(url) as response: + assert response.status == 200 + response = await response.json() + assert "status" in response + assert "status" in response["status"] + assert isinstance(response["status"]["status"], int) + + +@pytest.mark.asyncio +async def test_check_status(event_loop, clean_meter_manager, status_microservice): + status_codes = StatusCodes + with mock.patch( + "tamarco.core.microservice.Microservice.stop_gracefully", new_callable=AsyncMock + ) as stop_gracefully_mock, mock.patch( + "tamarco.resources.basic.status.resource.StatusResource.status", + return_value={"status": status_codes.FAILED}, + new_callable=AsyncMock, + ): + + stop_gracefully_mock._name = "stop_gracefully_mock" + + asyncio.ensure_future(status_microservice._setup()) + + await asyncio.sleep(0.3) + stop_gracefully_mock.assert_called() diff --git a/tests/functional/resources/io/__init__.py b/tests/functional/resources/io/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/resources/io/http/__init__.py b/tests/functional/resources/io/http/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/functional/resources/io/http/test_resource.py b/tests/functional/resources/io/http/test_resource.py new file mode 100755 index 0000000..069ca16 --- /dev/null +++ b/tests/functional/resources/io/http/test_resource.py @@ -0,0 +1,23 @@ +from tamarco.core.microservice import MicroserviceContext +from tamarco.resources.io.http.resource import HTTPClientResource, HTTPServerResource + + +class HTTPContext(MicroserviceContext): + name = "test" + + def __init__(self): + super().__init__() + self.settings.update_internal( + { + "system": { + "deploy_name": "test", + "logging": {"profile": "DEVELOP", "stdout": True}, + "resources": { + "server": {"host": "127.0.0.1", "port": 8080, "debug": True, "keep_alive_connections": False} + }, + } + } + ) + + server = HTTPServerResource() + client = HTTPClientResource() diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/codecs/__init__.py b/tests/unit/codecs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/codecs/test_codecs.py b/tests/unit/codecs/test_codecs.py new file mode 100644 index 0000000..57460a3 --- /dev/null +++ b/tests/unit/codecs/test_codecs.py @@ -0,0 +1,27 @@ +import pytest + +from tamarco.codecs.interface import CodecInterface +from tamarco.codecs.json import JsonCodec +from tamarco.codecs.pickle import PickleCodec +from tamarco.codecs.yaml import YamlCodec + + +@pytest.mark.parametrize("Codec", (YamlCodec, JsonCodec, PickleCodec, CodecInterface)) +@pytest.mark.asyncio +async def test_codec(Codec): + str_original = "test" + if isinstance(Codec, YamlCodec): + str_original = "Node:0 " " Node:1" + elif isinstance(Codec, JsonCodec): + str_original = "{'node1': {'node2': 'example node'}}" + + try: + obj_encode = Codec.encode(str_original) + except Exception: + if isinstance(Codec, CodecInterface): + assert True + try: + assert Codec.decode(obj_encode) == str_original + except Exception: + if isinstance(Codec, CodecInterface): + assert True diff --git a/tests/unit/core/__init__.py b/tests/unit/core/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/core/settings/__init__.py b/tests/unit/core/settings/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/core/settings/example_settings.json b/tests/unit/core/settings/example_settings.json new file mode 100644 index 0000000..4283ca3 --- /dev/null +++ b/tests/unit/core/settings/example_settings.json @@ -0,0 +1,4 @@ +{ + "hello": "world", + "cheap": {"is": "dear"} +} \ No newline at end of file diff --git a/tests/unit/core/settings/example_settings.py b/tests/unit/core/settings/example_settings.py new file mode 100644 index 0000000..d104959 --- /dev/null +++ b/tests/unit/core/settings/example_settings.py @@ -0,0 +1 @@ +settings = {"hello": "world", "cheap": {"is": "dear"}} diff --git a/tests/unit/core/settings/example_settings.yaml b/tests/unit/core/settings/example_settings.yaml new file mode 100644 index 0000000..0411a7b --- /dev/null +++ b/tests/unit/core/settings/example_settings.yaml @@ -0,0 +1,3 @@ +hello: "world" +cheap: + is: "dear" \ No newline at end of file diff --git a/tests/unit/core/settings/test_dict_deep_update.py b/tests/unit/core/settings/test_dict_deep_update.py new file mode 100755 index 0000000..5a48089 --- /dev/null +++ b/tests/unit/core/settings/test_dict_deep_update.py @@ -0,0 +1,14 @@ +from tamarco.core.settings.utils import dict_deep_update + + +def test_dict_deep_update(): + original = {"resources": {"amqp": {"host": "127.0.0.1"}}} + + update = {"resources": {"amqp": {"port": 10000}}} + + merge = dict_deep_update(original, update) + merge_reverse = dict_deep_update(update, original) + + for results in (merge, merge_reverse): + assert results["resources"]["amqp"]["port"] == update["resources"]["amqp"]["port"] + assert results["resources"]["amqp"]["host"] == original["resources"]["amqp"]["host"] diff --git a/tests/unit/core/settings/test_settings_backends.py b/tests/unit/core/settings/test_settings_backends.py new file mode 100644 index 0000000..4b1e545 --- /dev/null +++ b/tests/unit/core/settings/test_settings_backends.py @@ -0,0 +1,106 @@ +import asyncio + +import pytest + +from tamarco.core.settings.backends.dictionary import DictSettingsBackend +from tamarco.core.settings.backends.file_based import JsonSettingsBackend, PythonSettingsBackend, YamlSettingsBackend + + +@pytest.mark.asyncio +async def test_dict_settings_backend(event_loop): + backend = DictSettingsBackend(dict_settings={"setting1": "foo", "setting2": "bar", "setting3": {"setting4": 3}}) + backend.set_loop(event_loop) + # Get + assert await backend.get("setting1") == "foo" + assert await backend.get("setting1", "bar") == "foo" + assert await backend.get("setting3.setting4") == 3 + assert await backend.get("foo", 10) == 10 + with pytest.raises(KeyError): + await backend.get("foo") + + # Set, delete + await backend.set("setting5", "baz") + assert await backend.get("setting5") == "baz" + await backend.delete("setting5") + with pytest.raises(KeyError): + await backend.get("setting5") + + callback_called = asyncio.Future(loop=event_loop) + + # Watch + async def callback(key, value): + + nonlocal callback_called + callback_called.set_result("callback called") + + await backend.watch("setting1", callback) + await backend.set("setting1", "foo2") + assert await asyncio.wait_for(callback_called, 0.5, loop=event_loop) == "callback called" + + +@pytest.mark.asyncio +async def test_json_file_based_settings_backend(event_loop): + backend = JsonSettingsBackend(file="./tests/unit/core/settings/example_settings.json", loop=event_loop) + assert await backend.get("hello") == "world" + assert await backend.get("cheap.is") == "dear" + + +@pytest.mark.asyncio +async def test_yaml_file_based_settings_backend(event_loop): + backend = YamlSettingsBackend(file="./tests/unit/core/settings/example_settings.yaml", loop=event_loop) + assert await backend.get("hello") == "world" + assert await backend.get("cheap.is") == "dear" + + +@pytest.mark.asyncio +async def test_python_file_based_settings_backend(event_loop): + backend = PythonSettingsBackend(file="./tests/unit/core/settings/example_settings.py", loop=event_loop) + assert await backend.get("settings.hello") == "world" + assert await backend.get("settings.cheap.is") == "dear" + + +@pytest.mark.asyncio +async def test_dictsettingsbackend_replace_dict_for_value(): + settings = DictSettingsBackend(dict_settings={"a": {"b": {"c": {"e": "f"}}}}) + + await settings.set("a", "d") + value = await settings.get("a") + assert value == "d" + + +@pytest.mark.asyncio +async def test_dictsettingsbackend_replace_value_for_value(): + settings = DictSettingsBackend(dict_settings={"a": {"b": {"c": "d"}}}) + + await settings.set("a.b.c", "e") + value = await settings.get("a") + assert value == {"b": {"c": "e"}} + + +@pytest.mark.asyncio +async def test_dictsettingsbackend_replace_value_for_dict(): + settings = DictSettingsBackend(dict_settings={"a": {"b": {"c": "d"}}}) + + await settings.set("a.b.c", {"e": "f"}) + value = await settings.get("a") + assert value == {"b": {"c": {"e": "f"}}} + + +@pytest.mark.asyncio +async def test_dictsettingsbackend_add_dict(): + settings = DictSettingsBackend(dict_settings={"a": "b"}) + + await settings.set("a.b.e", "f") + value = await settings.get("a") + assert value == {"b": {"e": "f"}} + + +@pytest.mark.asyncio +async def test_dictsettingsbackend_add_dict_2(): + settings = DictSettingsBackend(dict_settings={"a": "b"}) + + await settings.set("z", "f") + value = await settings.get("a") + assert value == "b" + value = await settings.get("z") + assert value == "f" diff --git a/tests/unit/core/test_dependency_resolver.py b/tests/unit/core/test_dependency_resolver.py new file mode 100755 index 0000000..5866c9b --- /dev/null +++ b/tests/unit/core/test_dependency_resolver.py @@ -0,0 +1,36 @@ +import pytest + +from tamarco.core.dependency_resolver import CantSolveDependencies, resolve_dependency_order + + +def test_resolve_dependencies(): + dependency_graph = { + "settings": [], + "logging": ["settings"], + "metrics": ["logging", "settings"], + "status": ["logging", "settings"], + "amqp": ["logging", "settings", "status"], + } + result = resolve_dependency_order(dependency_graph) + + assert result[0] == "settings" + assert result[1] == "logging" + + assert "metrics" in result[2:4] + assert "status" in result[2:4] + + assert result[-1] == "amqp" + + +def test_resolve_dependencies_fail(): + dependency_graph = { + "settings": [], + "logging": ["settings", "status"], + "metrics": ["logging", "settings"], + "status": ["logging", "settings"], + "amqp": ["logging", "settings", "status"], + } + + with pytest.raises(CantSolveDependencies): + result = resolve_dependency_order(dependency_graph) + assert result diff --git a/tests/unit/core/test_logging.py b/tests/unit/core/test_logging.py new file mode 100644 index 0000000..268295e --- /dev/null +++ b/tests/unit/core/test_logging.py @@ -0,0 +1,180 @@ +import socket +import time +from datetime import datetime +from logging import LogRecord + +import ujson + +from tamarco.core.logging.formatters.colored import ColoredFormatter +from tamarco.core.logging.formatters.logstash import LogstashFormatterVersion0, LogstashFormatterVersion1 +from tamarco.core.logging.formatters.syslog import SyslogFormatter +from tamarco.core.logging.handlers.asyncronous import AsyncWrapperHandler, MAX_QUEUE_SIZE, VALUE_TOLERANCE_PERCENTAGE + + +def test_logging_colored_formatter_format_timestamp(): + timestamp = time.time() + _datetime = datetime.fromtimestamp(timestamp).isoformat(sep=" ") + assert _datetime == ColoredFormatter.format_timestamp(timestamp) + + +def test_logging_logstash_formatter_version_0(): + log_record = LogRecord( + name="test_logging_ColoredFormatter_format", + level=1, + pathname="test/logging", + lineno=1, + msg="hello world", + args=["args1", "args2"], + exc_info=None, + ) + time_stamp = 1_400_000_000 + log_record.created = time_stamp + formater_base = LogstashFormatterVersion0() + hostname = socket.gethostname() + data_format = formater_base.format(log_record).decode() + data = ujson.loads(data_format) + + assert data == { + "@timestamp": "2014-05-13T16:53:20.000Z", + "@message": "hello world", + "@source": f"Logstash://{hostname}/test/logging", + "@source_host": f"{hostname}", + "@source_path": "test/logging", + "@tags": [], + "@type": "Logstash", + "@fields": {"levelname": "Level 1", "logger": "test_logging_ColoredFormatter_format", "stack_info": None}, + } + + +def test_logging_logstash_formatter_version_1(): + log_record = LogRecord( + name="test_logging_ColoredFormatter_format", + level=1, + pathname="test/logging", + lineno=1, + msg="hello world", + args=["args1", "args2"], + exc_info=None, + ) + time_stamp = 1_400_000_000 + log_record.created = time_stamp + formater_base = LogstashFormatterVersion1() + hostname = socket.gethostname() + data_format = formater_base.format(log_record).decode() + data = ujson.loads(data_format) + + assert data == { + "@timestamp": "2014-05-13T16:53:20.000Z", + "@version": "1", + "message": "hello world", + "host": f"{hostname}", + "path": "test/logging", + "tags": [], + "type": "Logstash", + "level": "Level 1", + "logger_name": "test_logging_ColoredFormatter_format", + "stack_info": None, + } + + +def test_logging_syslog_formatter(): + log_record = LogRecord( + name="test_logging_ColoredFormatter_format", + level=1, + pathname="test/logging", + lineno=1, + msg="hello world", + args=["args1", "args2"], + exc_info=None, + ) + timestamp = 1_400_000_000 + log_record.created = timestamp + log_record.extra_msg = {"extra_msg1": "foo"} + formater_base = SyslogFormatter() + hostname = socket.getfqdn() + formatted_time = datetime.fromtimestamp(timestamp).isoformat(sep=" ") + assert ( + formater_base.format(log_record).strip() == f"[{formatted_time}] [{hostname}] [Level 1] " + "[(test_logging_ColoredFormatter_format) logging:1] " + '[null] [hello world] [{"json_extra_msg1":"foo"}]' + ) + + +class HandlerCheck: + check_data = False + + def __init__(self): + HandlerCheck.check_data = False + + def handle(self, record): + HandlerCheck.check_data = True + + +def test_logging_async_wrapper_handler(): + async_wrapper = AsyncWrapperHandler(handler=HandlerCheck) + assert async_wrapper.queue.maxsize == MAX_QUEUE_SIZE + + log_record_array_inter = [] + log_record_array_outer = [] + + assert not HandlerCheck.check_data + + for _ in range(80): + log_record = LogRecord( + name="test_logging_ColoredFormatter_format", + level=1, + pathname="test/logging", + lineno=1, + msg="hello world", + args=["args1", "args2"], + exc_info=None, + ) + time_stamp = 1_400_000_000 + log_record.created = time_stamp + log_record_array_inter.append(log_record) + async_wrapper.emit(log_record) + + for _ in range(80): + check_enqueue_d = async_wrapper.listener.dequeue(False) + log_record_array_outer.append(check_enqueue_d) + + assert async_wrapper.queue.empty() + assert log_record_array_inter == log_record_array_outer + + +def test_logging_async_wrapper_handler_tolerance(): + async_wrapper = AsyncWrapperHandler(handler=HandlerCheck) + assert async_wrapper.queue.maxsize == MAX_QUEUE_SIZE + + num_record_array_enqueue = 0 + num_record_array_dequeue = 0 + counter = 0 + assert not HandlerCheck.check_data + + async_wrapper.listener.stop() + time.sleep(2) + + for _ in range(MAX_QUEUE_SIZE): + time_stamp = 1_400_000_000 + log_record = LogRecord( + name="test_logging_ColoredFormatter_format", + level=1, + pathname="test/logging", + lineno=1, + msg="hello world", + args=["args1", "args2"], + exc_info=None, + ) + log_record.created = time_stamp + num_record_array_enqueue += 1 + async_wrapper.emit(log_record) + counter += 1 + + try: + for _ in range(MAX_QUEUE_SIZE * 10): + async_wrapper.listener.dequeue(False) + num_record_array_dequeue += 1 + except Exception: + pass + + assert num_record_array_dequeue == VALUE_TOLERANCE_PERCENTAGE diff --git a/tests/unit/core/test_microservice.py b/tests/unit/core/test_microservice.py new file mode 100755 index 0000000..ce3c283 --- /dev/null +++ b/tests/unit/core/test_microservice.py @@ -0,0 +1,268 @@ +import asyncio +import time + +import pytest + +from tamarco.core.microservice import MicroserviceContext, task, task_timer + + +@pytest.fixture +def yaml_settings_ms_ctx(request, event_loop, inject_in_env_settings_file_path): + ms_ctx = MicroserviceContext() + event_loop.run_until_complete(ms_ctx.start()) + yield ms_ctx + event_loop.run_until_complete(ms_ctx.stop()) + + +async def test_deploy_name_loads_from_settings(): + ms_ctx = MicroserviceContext() + await ms_ctx.start() + assert yaml_settings_ms_ctx.deploy_name + assert yaml_settings_ms_ctx.deploy_name == "test_tamarco" + await ms_ctx.stop() + + +class MicroserviceTestTaskContext(MicroserviceContext): + __init_time_stamp_exec = [] + + def __init__(self): + super().__init__() + self.time_stamp_exec = [] + self.check_pass = False + self.settings.update_internal( + {"system": {"deploy_name": "test", "logging": {"profile": "DEVELOP", "stdout": True}}} + ) + + +class TaskCheckedDecorator(MicroserviceTestTaskContext): + """ + Class check task decorator + """ + + name = "TaskCheckedTaskDecorator" + + @task + async def check_decorator(self): + self.check_pass = True + self.time_stamp_exec.append(time.time()) + + +class TaskTimerPeriodic(MicroserviceTestTaskContext): + """ + Class check task periodic decorator + """ + + name = "TaskTimerPeriodic" + + @task_timer(interval=500, one_shot=False, autostart=False) + async def task_timer_periodic(self): + self.check_pass = True + self.time_stamp_exec.append(time.time()) + + +class TaskExcecuteBeforePeriodicTime(MicroserviceTestTaskContext): + """ + Class check task periodic what execute after than periodic time + """ + + name = "TaskExcecuteBeforePeriodicTime" + + @task_timer(interval=500, one_shot=False, autostart=True) + async def task_exec_before_periodic_time(self): + self.check_pass = True + self.time_stamp_exec.append(time.time()) + + +class TaskOneShot(MicroserviceTestTaskContext): + """ + Class check a task execute after a periodic time + """ + + name = "TaskExcecuteBeforePeriodicTime" + + @task_timer(interval=500, one_shot=True, autostart=False) + async def task_task_one_shot(self): + self.check_pass = True + self.time_stamp_exec.append(time.time()) + + +class TaskMultipleTaskAndTimerTask(MicroserviceTestTaskContext): + """ + Class check multiple task: + * Task + * Periodic Task + * Periodic One Shot + * Periodic Task periodic before execute + """ + + name = "TaskMultipleTaskAndTimerTask" + + check_pass_one_shot = False + time_stamp_exec_one_shot = [] + + check_pass_task = False + time_stamp_exec_task = [] + + check_pass_task_periodic = False + time_stamp_exec_task_periodic = [] + + check_pass_task_execute_before_periodic = False + time_stamp_exec_task_execute_before_periodic = [] + + exception_task_times = 0 + + # Task is executed after a time (1500 miliseconds) + @task_timer(interval=1500, one_shot=True, autostart=False) + async def task_multiple_task_one_shot(self): + self.check_pass_one_shot = True + self.time_stamp_exec_one_shot.append(time.time()) + + # Task is executed immediately and after a period (1000 miliseconds) + @task_timer(interval=1000, one_shot=False, autostart=True) + async def task_multiple_task_executer_before_periodic(self): + self.check_pass_task_execute_before_periodic = True + self.time_stamp_exec_task_execute_before_periodic.append(time.time()) + + # Task is executed according to a period (500 miliseconds) + @task_timer(interval=500, one_shot=False, autostart=False) + async def task_multiple_task_periodic(self): + self.check_pass_task_periodic = True + self.time_stamp_exec_task_periodic.append(time.time()) + + # Task raises an exception and is stopped + @task_timer(interval=1000) + async def task_with_exception(self): + self.exception_task_times += 1 + raise Exception + + # Normal Task + @task + async def task_checked_decorated(self): + self.check_pass_task = True + self.time_stamp_exec_task.append(time.time()) + + +@pytest.mark.asyncio +async def test_task_checked_decorator(event_loop): + test_microservice_task_decorator = TaskCheckedDecorator() + test_microservice_task_decorator.loop = event_loop + + await test_microservice_task_decorator.start() + await asyncio.sleep(1) + + # Check Test + assert test_microservice_task_decorator.check_pass, "Not executed task correctly" + assert len(test_microservice_task_decorator.time_stamp_exec) == 1, "Not executed once" + await test_microservice_task_decorator.stop() + + +@pytest.mark.asyncio +async def test_task_timer_periodic(event_loop): + test_microservice_periodic = TaskTimerPeriodic() + test_microservice_periodic.loop = event_loop + time_start = time.time() + + await test_microservice_periodic.start() + await asyncio.sleep(1) + + # Check Test + assert test_microservice_periodic.check_pass, "Not executed correctly timer periodic" + assert len(test_microservice_periodic.time_stamp_exec) >= 1, "Not executed 1 times. Once periodic interval" + assert ( + test_microservice_periodic.time_stamp_exec[0] - time_start >= 0.5 + ), "Not executed correctly because time periodic is less than interval period time" + await test_microservice_periodic.stop() + + +@pytest.mark.asyncio +async def test_task_timer_periodic_excecute_before_sleep(event_loop): + test_microservice_periodic_execute_before_sleep = TaskExcecuteBeforePeriodicTime() + test_microservice_periodic_execute_before_sleep.loop = event_loop + time_start = time.time() + + await test_microservice_periodic_execute_before_sleep.start() + await asyncio.sleep(1) + + # Check Test + assert test_microservice_periodic_execute_before_sleep.check_pass, "Not executed correctly timer periodic" + assert ( + len(test_microservice_periodic_execute_before_sleep.time_stamp_exec) >= 2 + ), "Not executed 2 times. The start time and a periodic interval" + assert ( + test_microservice_periodic_execute_before_sleep.time_stamp_exec[0] - time_start < 1 + ), "Not executed correctly because time execute is greater than interval " + assert ( + not test_microservice_periodic_execute_before_sleep.time_stamp_exec[1] - time_start < 0.5 + and not test_microservice_periodic_execute_before_sleep.time_stamp_exec[1] - time_start > 3 + ), "Not executed correctly because time periodic " + await test_microservice_periodic_execute_before_sleep.stop() + + +@pytest.mark.asyncio +async def test_task_timer_oneshot(event_loop): + test_microservice_task_one_shot = TaskOneShot() + test_microservice_task_one_shot.loop = event_loop + time_start = time.time() + + await test_microservice_task_one_shot.start() + await asyncio.sleep(1) + + # Check Test + assert test_microservice_task_one_shot.check_pass, "Not executed timer correctly" + assert len(test_microservice_task_one_shot.time_stamp_exec) == 1, "Not executed 1 times" + assert test_microservice_task_one_shot.time_stamp_exec[0] - time_start >= 0.5, "Not executed in time correctly" + await test_microservice_task_one_shot.stop() + + +@pytest.mark.asyncio +async def test_multiple_task(event_loop): + test_microservice_multiple_task = TaskMultipleTaskAndTimerTask() + test_microservice_multiple_task.loop = event_loop + time_start = time.time() + + await test_microservice_multiple_task.start() + await asyncio.sleep(4) + + assert test_microservice_multiple_task.exception_task_times, 1 + + # Checked Task decorator + assert test_microservice_multiple_task.check_pass_task, "Not executed task checked decorated" + assert len(test_microservice_multiple_task.time_stamp_exec_task) == 1, "Not executed task checked decorated once" + assert ( + test_microservice_multiple_task.time_stamp_exec_task[0] - time_start < 1 + ), "Not executed task checked decorated once the first" + + # Checked Task Periodic + assert test_microservice_multiple_task.check_pass_task_periodic, "Not executed task periodic" + assert len(test_microservice_multiple_task.time_stamp_exec_task_periodic) > 1, "Not executed task once" + assert ( + test_microservice_multiple_task.time_stamp_exec_task_periodic[0] - time_start >= 0.5 + ), "Not executed the first" + for i in range(1, len(test_microservice_multiple_task.time_stamp_exec_task_periodic)): + diff_time = ( + test_microservice_multiple_task.time_stamp_exec_task_periodic[i] + - test_microservice_multiple_task.time_stamp_exec_task_periodic[i - 1] + ) + assert diff_time > 0.45 and diff_time < 0.65, "No correctly periocity" + + # Checked Task execute before Periodic + assert ( + test_microservice_multiple_task.check_pass_task_execute_before_periodic + ), "Not executed task execute before periodic" + assert ( + len(test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic) > 1 + ), "Not executed task once" + assert ( + test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic[0] - time_start < 1 + ), "Not executed the first" + for i in range(1, len(test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic)): + diff_time = ( + test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic[i] + - test_microservice_multiple_task.time_stamp_exec_task_execute_before_periodic[i - 1] + ) + assert diff_time > 0.95 and diff_time < 1.5, "No correctly periocity" + + # Check Task One Shot + assert test_microservice_multiple_task.check_pass_one_shot, "Not executed task oneshot" + assert len(test_microservice_multiple_task.time_stamp_exec_one_shot) == 1, "Not executed task once" + assert test_microservice_multiple_task.time_stamp_exec_one_shot[0] - time_start >= 1.45, "Not executed the first" diff --git a/tests/unit/core/test_signals.py b/tests/unit/core/test_signals.py new file mode 100644 index 0000000..30e1a5a --- /dev/null +++ b/tests/unit/core/test_signals.py @@ -0,0 +1,62 @@ +import asyncio +import signal +import threading +from time import sleep +from unittest import mock + +import pytest + +from tamarco.core.signals import SignalsManager, signal_handler + + +@pytest.mark.asyncio +async def test_signal_manager(event_loop): + signals_manager = SignalsManager() + signals_manager.set_loop(event_loop) + flag1 = False + flag2 = False + + def sigalrm_handler(signum, frame): + nonlocal flag1 + flag1 = True + + async def sigint_handler(signum, frame): + nonlocal flag2 + flag2 = True + + def exception_handler(signum, frame): + raise Exception + + signals_manager.register_signal(sigalrm_handler, signal.SIGALRM) + signals_manager.register_signal(sigint_handler, signal.SIGALRM) + signals_manager.register_signal(exception_handler, signal.SIGQUIT) + + assert sigalrm_handler == signals_manager.handlers[signal.SIGALRM][0] + assert sigint_handler == signals_manager.handlers[signal.SIGALRM][1] + assert exception_handler == signals_manager.handlers[signal.SIGQUIT][0] + + def use_alarm(): + signal.alarm(1) + + alarm_thread = threading.Thread(target=use_alarm, name="alarm_thread") + alarm_thread.start() + sleep(1.1) + alarm_thread.join() + assert flag1 + + # Allow asyncio to execute queued tasks, in this case the asynchronous signal_number handler + await asyncio.sleep(0) + await asyncio.sleep(0) + assert flag2 + + with mock.patch("tamarco.core.signals.logger") as mock_logger: + signals_manager._dispatch_signal(signal.SIGQUIT, None) + assert mock_logger.warning.called + + +def test_signal_handler(): + @signal_handler(signal.SIGALRM) + def handler(): + pass + + assert handler in SignalsManager().handlers[signal.SIGALRM] diff --git a/tests/unit/core/test_tasks.py b/tests/unit/core/test_tasks.py new file mode 100755 index 0000000..afb8553 --- /dev/null +++ b/tests/unit/core/test_tasks.py @@ -0,0 +1,82 @@ +import asyncio +import time +from threading import current_thread + +import pytest + +from tamarco.core.tasks import TasksManager + + +@pytest.fixture +def tasks_manager(event_loop): + tasks_manager = TasksManager() + tasks_manager.set_loop(event_loop) + + yield tasks_manager + + tasks_manager.stop_all() + + +@pytest.mark.asyncio +async def test_task_handle(event_loop, tasks_manager): + tasks_manager.task_limit = 2 + + assert len(tasks_manager.tasks_coros) == 0 + assert len(tasks_manager.tasks) == 0 + + async def sleeper(): + while True: + await asyncio.sleep(0.1) + + tasks_manager.register_task(name="sleeper", task_coro=sleeper) + + assert len(tasks_manager.tasks_coros) == 1 + assert len(tasks_manager.tasks) == 0 + + tasks_manager.start_all() + + assert len(tasks_manager.tasks) == 1 + assert len(tasks_manager.tasks_coros) == 0 + + tasks_manager.stop_all() + + assert len(tasks_manager.tasks) == 0 + assert len(tasks_manager.tasks_coros) == 0 + + await tasks_manager.wait_for_start_task(name="1", task_coro=sleeper()) + await tasks_manager.wait_for_start_task(name="2", task_coro=sleeper()) + coro = tasks_manager.wait_for_start_task(name="3", task_coro=sleeper()) + asyncio.ensure_future(coro, loop=event_loop) + await asyncio.sleep(0.15, loop=event_loop) + tasks_manager.stop_all() + await asyncio.sleep(0.15, loop=event_loop) + assert len(tasks_manager.tasks) == 1 + + tasks_manager.stop_all() + + assert len(tasks_manager.tasks) == 0 + + +@pytest.mark.asyncio +async def test_thread_handle(event_loop, tasks_manager): + assert len(tasks_manager.threads_fns) == 0 + assert len(tasks_manager.threads) == 0 + + def sleeper(): + while not current_thread().stop: + time.sleep(0.1) + + tasks_manager.register_thread(name="sleeper", thread_fn=sleeper) + + assert len(tasks_manager.threads_fns) == 1 + assert len(tasks_manager.threads) == 0 + + tasks_manager.start_all() + + assert len(tasks_manager.threads) == 1 + assert len(tasks_manager.threads_fns) == 0 + + tasks_manager.stop_all() + + assert len(tasks_manager.threads) == 0 + assert len(tasks_manager.threads_fns) == 0 diff --git a/tests/unit/core/test_utils.py b/tests/unit/core/test_utils.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/resources/__init__.py b/tests/unit/resources/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/resources/basic/__init__.py b/tests/unit/resources/basic/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/resources/basic/metrics/__init__.py b/tests/unit/resources/basic/metrics/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/resources/basic/metrics/conftest.py b/tests/unit/resources/basic/metrics/conftest.py new file mode 100755 index 0000000..977f81a --- /dev/null +++ b/tests/unit/resources/basic/metrics/conftest.py @@ -0,0 +1,47 @@ +from random import random + +import pytest + +from tamarco.resources.basic.metrics.meters import Counter, Gauge, Summary + + +@pytest.fixture +def sample_metrics(): + return [ + ("index.requests", 45, "requests", 126874812.0), + ("main.requests", 60, "requests", 126874812.0), + ("response.time", 12.64, "s", 126874812.0), + ] + + +@pytest.fixture +def clean_flyweights(): + Counter._Flyweight__instances = {} + Gauge._Flyweight__instances = {} + Summary._Flyweight__instances = {} + + Counter._ExtendedFlyweight__instances = {} + Gauge._ExtendedFlyweight__instances = {} + Summary._ExtendedFlyweight__instances = {} + + +@pytest.fixture +def sample_meters(clean_flyweights): + cat_counter = Counter("cat", "cats") + cat_counter.value = 5 + + cat_weight_gauge = Gauge("cat_weight", "kg") + cat_weight_gauge.value = 2.785 + + meow_time_summary_doraemon = Summary("meow_time", "seconds", labels={"name": "Doraemon"}) + meow_time_summary_doraemon.values = [random() for _ in range(100)] + + meow_time_summary_garfield = Summary("meow_time", "seconds", labels={"name": "Garfield"}) + meow_time_summary_garfield.values = [random() for _ in range(100)] + + return [cat_counter, cat_weight_gauge, meow_time_summary_doraemon, meow_time_summary_garfield] + + +@pytest.fixture +def sample_one_metric(): + return [("index.requests", 45, "requests", 126874812.0)] diff --git a/tests/unit/resources/basic/metrics/meters/__init__.py b/tests/unit/resources/basic/metrics/meters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/resources/basic/metrics/meters/test_base.py b/tests/unit/resources/basic/metrics/meters/test_base.py new file mode 100644 index 0000000..aa0e9fa --- /dev/null +++ b/tests/unit/resources/basic/metrics/meters/test_base.py @@ -0,0 +1,94 @@ +import asyncio +import time + +import pytest + +from tamarco.core.patterns import Flyweight, FlyweightWithLabels +from tamarco.resources.basic.metrics.meters.base import Timer + + +def test_flyweight(): + class Cat(metaclass=Flyweight): + def __init__(self, name): + self.name = name + + garfield_1 = Cat("Garfield") + garfield_2 = Cat("Garfield") + doraemon = Cat("Doraemon") + + assert garfield_1 == garfield_2 + assert garfield_1 != doraemon and garfield_2 != doraemon + assert hasattr(Cat, "_Flyweight__instances") + assert "Garfield" in Cat._Flyweight__instances + + +def test_extended_flyweight(): + # Behavior of flyweight + class Cat(metaclass=FlyweightWithLabels): + def __init__(self, name): + self.name = name + + garfield_1 = Cat("Garfield") + garfield_2 = Cat("Garfield") + doraemon = Cat("Doraemon") + + assert garfield_1 == garfield_2 + assert garfield_1 != doraemon and garfield_2 != doraemon + assert hasattr(Cat, "_Flyweight__instances") + assert "Garfield" in Cat._Flyweight__instances + + class Animal(metaclass=FlyweightWithLabels): + def __init__(self, name, labels=None): + self.name = name + + # Behavior of the extended flyweight + cat = Animal("cat") + dog = Animal("dog") + cat_garfield_1 = Animal("cat", labels={"name": "garfield"}) + cat_garfield_2 = Animal("cat", labels={"name": "garfield"}) + dog_garfield = Animal("dog", labels={"name": "garfield"}) + cat_doraemon = Animal("cat", labels={"name": "doraemon"}) + + assert cat != dog + assert cat_garfield_1 == cat_garfield_2 + assert cat_garfield_1 != cat_doraemon and cat_garfield_2 != cat_doraemon + assert dog_garfield != cat_garfield_1 and dog_garfield != cat_garfield_2 + + # Test more than one labels + cat_garfield_sleep = Animal("cat", labels={"name": "garfield", "power": "sleep"}) + cat_garfield = Animal("cat", labels={"name": "garfield"}) + cat_garfield_sleep_same_instance = Animal("cat", labels={"name": "garfield", "power": "sleep"}) + + assert cat_garfield_sleep != cat_garfield + assert cat_garfield_sleep == cat_garfield_sleep_same_instance + + +def test_timer_context_manager(): + timer_value = [] + timer = Timer(lambda time: timer_value.append(time)) + with timer: + time.sleep(0.01) + assert 0.0075 < timer_value.pop() < 0.1 + + +def test_timer_decorator(): + timer_value = [] + + @Timer(lambda time: timer_value.append(time)) + def time_me(): + time.sleep(0.01) + + time_me() + assert 0.0075 < timer_value.pop() < 0.012 + + +@pytest.mark.asyncio +async def test_timer_async_decorator(): + timer_value = [] + + @Timer(lambda time: timer_value.append(time)) + async def time_me(): + await asyncio.sleep(0.01) + + await time_me() + assert 0.0075 < timer_value.pop() < 0.012 diff --git a/tests/unit/resources/basic/metrics/meters/test_counter.py b/tests/unit/resources/basic/metrics/meters/test_counter.py new file mode 100644 index 0000000..8421fe6 --- /dev/null +++ b/tests/unit/resources/basic/metrics/meters/test_counter.py @@ -0,0 +1,103 @@ +import asyncio + +import pytest + +from tamarco.resources.basic.metrics.meters import Counter + + +class MyException(Exception): + pass + + +@pytest.mark.asyncio +async def test_counter(): + counter = Counter("test.count", "request/s") + assert counter.counter == 0 + counter.inc() + assert counter.counter == 1 + + @Counter("test.count", "request/s") + def count_a_request(): + pass + + count_a_request() + assert counter.counter == 2 + + @Counter("test.count", "request/s") + async def count_a_async_request(): + pass + + assert asyncio.iscoroutinefunction(count_a_async_request) + assert count_a_async_request.__name__ == "count_a_async_request" + + await count_a_async_request() + assert counter.counter == 3 + + collected_data = counter._collect_metrics()[0] + + assert collected_data.id == counter.metric_id + assert counter.metric_type == "counter" + assert collected_data.units == counter.measurement_unit + + +def test_counter_invalid_value(): + counter = Counter("test.count.invalid_value", "request/s") + + with pytest.raises(AssertionError): + counter.inc(-0.1) + + +@pytest.mark.asyncio +async def test_exception_counter_decorator(): + ex_counter = Counter("test.exceptions.count", "exception/s") + assert ex_counter.counter == 0 + ex_counter.inc() + assert ex_counter.counter == 1 + + @ex_counter.count_exceptions() + def exception_count(): + raise MyException + + with pytest.raises(MyException): + exception_count() + + assert ex_counter.counter == 2 + + @ex_counter.count_exceptions() + def async_exception_count(): + raise MyException + + with pytest.raises(MyException): + await async_exception_count() + + assert ex_counter.counter == 3 + + collected_data = ex_counter._collect_metrics()[0] + assert collected_data.id == ex_counter.metric_id + assert ex_counter.metric_type == "counter" + assert collected_data.units == ex_counter.measurement_unit + + +@pytest.mark.asyncio +async def test_exception_counter_context_manager(): + ex_counter = Counter("test.exceptions.context_manager", "exception/s") + assert ex_counter.current_value() == 0 + + with pytest.raises(MyException): + with ex_counter.count_exceptions(): + raise MyException("testing exception") + + assert ex_counter.current_value() == 1 + + with ex_counter.count_exceptions(): + pass + + assert ex_counter.current_value() == 1 + + +@pytest.mark.asyncio +async def test_counter_new_labels(): + counter = Counter("test.counter.new_labels", "test", labels={1: 1}) + counter_new_label = counter.new_labels({2: 2}) + assert 1 in counter.labels and 2 not in counter.labels + assert 2 in counter_new_label.labels and 2 in counter_new_label.labels diff --git a/tests/unit/resources/basic/metrics/meters/test_custom.py b/tests/unit/resources/basic/metrics/meters/test_custom.py new file mode 100644 index 0000000..5a67aee --- /dev/null +++ b/tests/unit/resources/basic/metrics/meters/test_custom.py @@ -0,0 +1,81 @@ +import pytest +from sanic.response import HTTPResponse + +from tamarco.resources.basic.metrics.meters.counter import HTTPCounter, HTTPCounterHeaderMap, HeaderToLabel + + +@pytest.mark.asyncio +async def test_http_counter(): + http_counter = HTTPCounter("viewers") + + @http_counter + async def endpoint_handler(request=None): + return HTTPResponse(status=500) + + assert http_counter.exception_counter.current_value() == 0 + + await endpoint_handler() + + assert http_counter.current_value() == 1 + assert ( + (id(http_counter) != id(http_counter.errors_counter)) + and (id(http_counter) != id(http_counter.exception_counter)) + and (id(http_counter.errors_counter) != id(http_counter.exception_counter)) + ) + assert http_counter.errors_counter.current_value() == 1 + assert http_counter.exception_counter.current_value() == 0 + + @http_counter + async def endpoint_handler(request=None): + raise Exception() + + with pytest.raises(Exception): + await endpoint_handler() + + assert http_counter.current_value() == 2 + assert http_counter.errors_counter.current_value() == 1 + assert http_counter.errors_counter.current_value() == 1 + + +@pytest.mark.asyncio +async def test_http_counter_header_map(): + header_map = HeaderToLabel( + header="my_header", label="my_header_label", default_header_value="my_header_default_value" + ) + http_counter = HTTPCounterHeaderMap("viewers", header_to_label_maps=[header_map]) + + class Request: + headers = {"my_header": "my_header_value"} + path = "/my_header_test/meow" + method = "PUT" + + prometheus_labels = { + "my_header_label": "my_header_value", + "status_code": 500, + "path": Request.path, + "method": Request.method, + } + + @http_counter + async def endpoint_handler(request=None): + return HTTPResponse(status=500) + + await endpoint_handler(Request) + + assert http_counter.new_labels(prometheus_labels).current_value() == 1 + assert ( + (id(http_counter) != id(http_counter.new_labels(prometheus_labels).errors_counter)) + and (id(http_counter) != id(http_counter.new_labels(prometheus_labels).exception_counter)) + and (id(http_counter.errors_counter) != id(http_counter.exception_counter)) + ) + assert http_counter.exception_counter.new_labels(prometheus_labels).current_value() == 0 + + @http_counter + async def endpoint_handler(request=None): + raise Exception() + + with pytest.raises(Exception): + await endpoint_handler(Request) + + prometheus_labels.pop("status_code") + assert http_counter.exception_counter.new_labels(prometheus_labels).current_value() == 1 diff --git a/tests/unit/resources/basic/metrics/meters/test_gauge.py b/tests/unit/resources/basic/metrics/meters/test_gauge.py new file mode 100644 index 0000000..7640b7e --- /dev/null +++ b/tests/unit/resources/basic/metrics/meters/test_gauge.py @@ -0,0 +1,71 @@ +import asyncio +import time + +import pytest + +from tamarco.resources.basic.metrics.meters.gauge import Gauge + + +def test_gauge(): + gauge = Gauge("test_gauge", "requests") + + gauge.inc() + assert gauge.value == 1 + gauge.dec() + assert gauge.value == 0 + + gauge.inc(2) + assert gauge.value == 2 + gauge.dec(2) + assert gauge.value == 0 + + gauge.set(15.5) + assert gauge.value == 15.5 + + gauge.set_to_current_time() + assert time.time() - gauge.value < 0.01 + + +def test_gauge_invalid_values(): + gauge = Gauge("test_gauge_invalid_values", "requests") + + with pytest.raises(AssertionError): + gauge.inc("invalid value") + + with pytest.raises(AssertionError): + gauge.dec("invalid value") + + with pytest.raises(AssertionError): + gauge.set("invalid value") + + +def test_gauge_timeit(): + gauge = Gauge("test_gauge_timeit", "function_calls") + + @gauge.timeit() + def count_time(): + time.sleep(0.01) + + count_time() + assert 0.0075 < gauge.value < 0.030 + + +@pytest.mark.asyncio +async def test_gauge_async_timeit(): + gauge = Gauge("test_gauge_async_timeit", "function_calls") + + @gauge.timeit() + async def count_time(): + await asyncio.sleep(0.01) + + await count_time() + + assert 0.0075 < gauge.value < 0.012 + + +@pytest.mark.asyncio +async def test_gauge_new_labels(): + gauge = Gauge("test.gauge.new_labels", "test", labels={1: 1}) + gauge_new_label = gauge.new_labels({2: 2}) + assert 1 in gauge.labels and 2 not in gauge.labels + assert 2 in gauge_new_label.labels and 2 in gauge_new_label.labels diff --git a/tests/unit/resources/basic/metrics/meters/test_summary.py b/tests/unit/resources/basic/metrics/meters/test_summary.py new file mode 100644 index 0000000..13efff7 --- /dev/null +++ b/tests/unit/resources/basic/metrics/meters/test_summary.py @@ -0,0 +1,66 @@ +import asyncio + +import pytest + +from tamarco.resources.basic.metrics.meters import Summary +from tamarco.resources.basic.metrics.meters.summary import DEFAULT_SUMMARY_QUANTILES + + +def test_summary(): + meow_summary = Summary("meow_time", "cats") + + [meow_summary.observe(n) for n in range(100)] + + assert all(n in meow_summary.values for n in range(100)) + + metrics = meow_summary._collect_metrics() + + assert len(metrics) == 7 + + assert metrics[0].id == "meow_time_sum" + assert metrics[0].value == sum(range(100)) + + assert metrics[1].id == "meow_time_count" + assert metrics[1].value == 100 + + for quantile_metric, quantile in zip(metrics[2:], DEFAULT_SUMMARY_QUANTILES): + assert quantile_metric.id == "meow_time" + assert quantile_metric.labels.pop("quantile") == quantile + assert quantile_metric.value == round(quantile * 100) - 1 + + +def test_summary_invalid_values(): + meow_summary = Summary("meow_time", "cats") + + with pytest.raises(AssertionError): + meow_summary.observe("invalid_value") + + +@pytest.mark.asyncio +async def test_summary_timeit_values(event_loop): + meow_time_summary = Summary("meow_timeit_values", "cats") + + @meow_time_summary.timeit() + async def meow(): + await asyncio.sleep(0.01) + + # opening 100 concurrent meows() + futures = [asyncio.ensure_future(meow(), loop=event_loop) for _ in range(10)] + # waiting for the 100 futures to complete + await asyncio.wait(fs=futures, loop=event_loop, timeout=1) + + metrics = meow_time_summary._collect_metrics() + + # The time have less than a 10% of error + assert 0.007 * 10 < metrics[0].value < 0.013 * 10 + assert metrics[1].value == 10 + assert 0.007 < metrics[2].value < 0.013 + assert len(metrics) == 2 + len(DEFAULT_SUMMARY_QUANTILES) + + +@pytest.mark.asyncio +async def test_summary_new_labels(): + summary = Summary("test.summary.new_labels", "test", labels={1: 1}) + summary_new_label = summary.new_labels({2: 2}) + assert 1 in summary.labels and 2 not in summary.labels + assert 2 in summary_new_label.labels and 2 in summary_new_label.labels diff --git a/tests/unit/resources/basic/metrics/reporters/__init__.py b/tests/unit/resources/basic/metrics/reporters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/resources/basic/metrics/reporters/test_base.py b/tests/unit/resources/basic/metrics/reporters/test_base.py new file mode 100644 index 0000000..f790dd5 --- /dev/null +++ b/tests/unit/resources/basic/metrics/reporters/test_base.py @@ -0,0 +1,35 @@ +from tamarco.resources.basic.metrics.reporters.base import CarbonBaseHandler + + +def test_base_handler_creation_no_metric_prefix(): + base_handler = CarbonBaseHandler() + assert base_handler.metric_prefix == "" + + +def test_base_handler_creation_metric_prefix(): + base_handler = CarbonBaseHandler("test") + assert base_handler.metric_prefix == "test." + + +def test_carbon_base_format(sample_meters): + base_handler = CarbonBaseHandler("test") + + metrics_str = base_handler.format_metrics(meters=sample_meters) + check_metric_str(metrics_str) + + +def check_metric_str(metrics_str): + metrics_by_line = metrics_str.split("\n")[:-1] + assert all(len(metric.split(" ")) == 3 for metric in metrics_by_line) + + metrics_names = [metric.split(" ")[0] for metric in metrics_by_line] + assert all("test" in name for name in metrics_names) + assert all("__" in name for name in metrics_names) + assert "cat_counter" in metrics_names[0] + assert "cat_weight" in metrics_names[1] + assert all("meow_time" in name for name in metrics_names[2:]) + + metrics_values = [metric.split(" ")[1] for metric in metrics_by_line] + metrics_timestamp = [metric.split(" ")[2] for metric in metrics_by_line] + assert [float(value) for value in metrics_values] + assert [float(timestamp) for timestamp in metrics_timestamp] diff --git a/tests/unit/resources/basic/metrics/reporters/test_carbon.py b/tests/unit/resources/basic/metrics/reporters/test_carbon.py new file mode 100644 index 0000000..460b0a9 --- /dev/null +++ b/tests/unit/resources/basic/metrics/reporters/test_carbon.py @@ -0,0 +1,22 @@ +import socket +from unittest import mock + +from tamarco.resources.basic.metrics.reporters import CarbonHandler +from tests.unit.resources.basic.metrics.reporters.test_base import check_metric_str + + +def test_carbon(sample_meters): + with mock.patch("socket.socket") as socket_mock: + handler = CarbonHandler(metric_prefix="test") + handler.write(sample_meters) + socket_call = socket_mock.mock_calls[2][1][0] + check_metric_str(socket_call.decode()) + + +def test_carbon_socket_error(sample_meters): + with mock.patch("socket.socket"): + handler = CarbonHandler() + handler.socket.send = mock.Mock(side_effect=socket.timeout) + handler.write(sample_meters) + handler.socket.connect.assert_called_with((handler.host, handler.port)) + handler.socket.send.assert_called() diff --git a/tests/unit/resources/basic/metrics/reporters/test_file.py b/tests/unit/resources/basic/metrics/reporters/test_file.py new file mode 100644 index 0000000..04edfe0 --- /dev/null +++ b/tests/unit/resources/basic/metrics/reporters/test_file.py @@ -0,0 +1,12 @@ +from unittest import mock + +from tamarco.resources.basic.metrics.reporters import FileHandler +from tests.unit.resources.basic.metrics.reporters.test_base import check_metric_str + + +def test_file(sample_meters): + with mock.patch("tamarco.resources.basic.metrics.reporters.file.open", mock.mock_open(), create=True) as open_mock: + handler = FileHandler(metric_prefix="test") + handler.write(sample_meters) + file_mock = open_mock.mock_calls[1][1][0] + check_metric_str(file_mock) diff --git a/tests/unit/resources/basic/metrics/reporters/test_prometheus.py b/tests/unit/resources/basic/metrics/reporters/test_prometheus.py new file mode 100644 index 0000000..b46bd58 --- /dev/null +++ b/tests/unit/resources/basic/metrics/reporters/test_prometheus.py @@ -0,0 +1,17 @@ +from tamarco.resources.basic.metrics.reporters.prometheus import PrometheusHandler + + +def test_prometheus_handler_parse_labels(): + labels = {"protocol": "http", "method": "get"} + parsed_labels = PrometheusHandler.parse_labels(labels) + assert parsed_labels == '{protocol="http",method="get"}' + + +def test_prometheus_handler_parse_line(): + type_line = PrometheusHandler.parse_type_line("http_request_time", "counter") + assert type_line == "# TYPE http_request_time counter\n" + + +def test_prometheus_handler_format_metrics(sample_meters): + http_body = PrometheusHandler().format_metrics(sample_meters) + assert http_body diff --git a/tests/unit/resources/basic/metrics/reporters/test_stdout.py b/tests/unit/resources/basic/metrics/reporters/test_stdout.py new file mode 100644 index 0000000..5f4d7f3 --- /dev/null +++ b/tests/unit/resources/basic/metrics/reporters/test_stdout.py @@ -0,0 +1,15 @@ +from unittest import mock + +from tamarco.resources.basic.metrics.reporters import StdoutHandler +from tests.unit.resources.basic.metrics.reporters.test_base import check_metric_str + + +def test_stdout(sample_meters): + handler = StdoutHandler("test") + with mock.patch("builtins.print") as print_mock: + handler.write(sample_meters) + stdout = print_mock.call_args[0][0] + from pprint import pprint + + pprint(stdout) + check_metric_str(stdout) diff --git a/tests/unit/resources/basic/metrics/test_collector.py b/tests/unit/resources/basic/metrics/test_collector.py new file mode 100755 index 0000000..5eaea57 --- /dev/null +++ b/tests/unit/resources/basic/metrics/test_collector.py @@ -0,0 +1,26 @@ +from tamarco.resources.basic.metrics.collector import CollectorThread, MetricsCollector +from tamarco.resources.basic.metrics.meters import Counter +from tamarco.resources.basic.metrics.reporters import FileHandler +from tamarco.resources.basic.metrics.reporters.base import CarbonBaseHandler + + +def test_class_metrics_collector_add_handler(): + MetricsCollector.handlers.clear() + MetricsCollector.add_handler(CarbonBaseHandler()) + assert isinstance(MetricsCollector.handlers[0], CarbonBaseHandler) + + +def test_class_metrics_collector(): + MetricsCollector.handlers.clear() + MetricsCollector.add_handler(FileHandler()) + MetricsCollector.collect_period = 2 + MetricsCollector.meters = [] + counter = Counter("test_name", "test_unit") + counter.inc() + collector_thread = CollectorThread() + assert isinstance(MetricsCollector.handlers[0], FileHandler) + collector_thread.start() + if not isinstance(MetricsCollector.meters[0], Counter): + collector_thread.stop = True + assert False, "type(MetricsCollector.meters[0]) is not Counter" # noqa: B011 + collector_thread.stop = True diff --git a/tests/unit/resources/basic/metrics/test_manager.py b/tests/unit/resources/basic/metrics/test_manager.py new file mode 100755 index 0000000..79773b3 --- /dev/null +++ b/tests/unit/resources/basic/metrics/test_manager.py @@ -0,0 +1,11 @@ +from tamarco.resources.basic.metrics import MetersManager +from tamarco.resources.basic.metrics.collector import MetricsCollector +from tamarco.resources.basic.metrics.reporters import FileHandler + + +def test_configure_meters_manager(): + MetricsCollector.handlers.clear() + MetricsCollector.collect_period = 2 + MetersManager.configure({"handlers": [{"handler": FileHandler, "file_path": "/tmp/metrics"}], "collect_period": 2}) + assert isinstance(MetricsCollector.handlers[0], FileHandler) + MetersManager.thread.stop = True diff --git a/tests/unit/resources/basic/registry/__init__.py b/tests/unit/resources/basic/registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/resources/basic/registry/test_resource.py b/tests/unit/resources/basic/registry/test_resource.py new file mode 100644 index 0000000..34084a9 --- /dev/null +++ b/tests/unit/resources/basic/registry/test_resource.py @@ -0,0 +1,42 @@ +import asyncio +import uuid +from unittest.mock import MagicMock + +import pytest + +from tamarco.resources.basic.registry.resource import Registry +from tamarco.resources.basic.registry.settings import ETCD_REGISTRY_TTL_SECONDS +from tests.utils import AsyncMock + + +@pytest.fixture +def instance_id(): + return uuid.uuid4() + + +@pytest.fixture +def registry(instance_id): + microservice = MagicMock() + microservice.instance_id = instance_id + microservice.name = "test_registry" + registry = Registry() + registry.microservice = microservice + return registry + + +@pytest.mark.asyncio +async def test_registry_post_stop(registry, event_loop): + registry.register_task = asyncio.ensure_future(asyncio.Future(), loop=event_loop) + await registry.post_stop() + + +@pytest.mark.asyncio +async def test_register_in_etcd(registry): + registry.etcd_client = AsyncMock() + registry.etcd_client.set = AsyncMock() + await registry.register_in_etcd("key") + registry.etcd_client.set.assert_called_once_with(key="key", value=registry.own_ip, ttl=ETCD_REGISTRY_TTL_SECONDS) + + +def test_get_register_key(registry, instance_id): + assert registry.get_register_key("test") == f"test/test_registry/{instance_id}" diff --git a/tests/unit/resources/basic/status/__init__.py b/tests/unit/resources/basic/status/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/resources/basic/status/test_status.py b/tests/unit/resources/basic/status/test_status.py new file mode 100755 index 0000000..1c58b50 --- /dev/null +++ b/tests/unit/resources/basic/status/test_status.py @@ -0,0 +1,144 @@ +import ast +import asyncio +from unittest import mock +from unittest.mock import Mock + +import pytest +import sanic.response + +from tamarco.core.microservice import Microservice +from tamarco.resources.basic.status.resource import StatusResource, get_global_status +from tamarco.resources.basic.status.status_codes import StatusCodes +from tamarco.resources.io.http.resource import HTTPServerResource +from tests.utils import AsyncMock + + +@pytest.fixture +def status_resource(): + status_resource = StatusResource() + status_resource.resources_to_restart_on_failure = ["status"] + status_resource.critical_resources = ["status"] + return status_resource + + +@pytest.mark.asyncio +async def test_get_status(status_resource): + status_response = await status_resource.status() + assert isinstance(status_response, dict) + assert "status" in status_response + assert isinstance(status_response["status"], int) + + +@pytest.mark.parametrize( + "resources_states,global_status", + [ + ({"r1": {"status": StatusCodes.STARTED}, "r2": {"status": StatusCodes.STARTED, "foo": "bar"}}, 200), + ({"r1": {"status": StatusCodes.STARTED}, "r2": {"status": StatusCodes.FAILED}}, 500), + ({"r1": {"status": StatusCodes.STARTED}, "r2": {"status": StatusCodes.STOPPED}}, 500), + ({"r1": {"status": StatusCodes.STOPPED}, "r2": {"status": StatusCodes.FAILED}}, 500), + ({"r1": {"status": StatusCodes.NOT_STARTED}, "r2": {"status": StatusCodes.STARTED}}, 102), + ({"r1": {"status": StatusCodes.CONNECTING}, "r2": {"status": StatusCodes.STARTED}}, 102), + ({"r1": {"status": StatusCodes.NOT_STARTED}, "r2": {"status": StatusCodes.FAILED}}, 500), + ({"r1": {"status": StatusCodes.CONNECTING}, "r2": {"status": StatusCodes.FAILED}}, 500), + ({"r1": {"status": StatusCodes.NOT_STARTED}, "r2": {"status": StatusCodes.STOPPED}}, 500), + ({"r1": {"status": StatusCodes.CONNECTING}, "r2": {"status": StatusCodes.STOPPED}}, 500), + ({"r1": {"status": StatusCodes.NOT_STARTED}, "r2": {"status": StatusCodes.CONNECTING}}, 102), + ], +) +def test_get_global_status(resources_states, global_status): + assert get_global_status(resources_states) == global_status + + +@pytest.mark.asyncio +async def test_request_status_endpoint(): + from tamarco.resources.basic.status.resource import sanic_status_endpoint + + with mock.patch("sanic.request.Request") as mock_request: + StatusResource().microservice = Mock() + StatusResource().microservice.resources = {"http_server": HTTPServerResource()} + response = await sanic_status_endpoint(mock_request) + assert isinstance(response, sanic.response.HTTPResponse) + end_response = ast.literal_eval(response.body.decode("utf-8")) + assert "status" in end_response["http_server"] + assert isinstance(end_response["http_server"]["status"], int) + + +class StatusMicroservice(Microservice): + name = "test" + http_server = HTTPServerResource() + + def __init__(self): + super().__init__() + self.settings.update_internal( + { + "system": { + "deploy_name": "test", + "logging": {"profile": "DEVELOP", "stdout": True}, + "resources": { + "status": { + "restart_policy": { + "resources": { + "restart_microservice_on_failure": ["http_server"], + "restart_resource_on_failure": [], + } + } + }, + "http_server": {"host": "127.0.0.1", "port": 8080, "debug": True}, + }, + } + } + ) + + +@pytest.mark.asyncio +async def test_check_status_with_failed_status(status_resource): + with mock.patch( + "tamarco.resources.basic.status.resource.StatusResource.status", + new_callable=AsyncMock, + return_value={"status": StatusCodes.FAILED}, + ), mock.patch("tamarco.core.microservice.Microservice.stop_gracefully", new_callable=AsyncMock) as stop_g_mock: + status_resource.microservice = StatusMicroservice() + await status_resource._restart_microservice_on_failure() + stop_g_mock.assert_called() + + +@pytest.mark.asyncio +async def test_check_status_with_wrong_status(status_resource): + with mock.patch( + "tamarco.resources.basic.status.resource.StatusResource.status", + new_callable=AsyncMock, + return_value={"wrong": "bad_status"}, + ), mock.patch("tamarco.core.microservice.Microservice.stop_gracefully", new_callable=AsyncMock) as stop_g_mock: + status_resource.microservice = StatusMicroservice() + await status_resource._restart_microservice_on_failure() + stop_g_mock.assert_called() + + +@pytest.mark.asyncio +async def test_stop_check_status(status_resource, event_loop): + with mock.patch( + "tamarco.resources.basic.status.resource.StatusResource._check_status_repeatedly", new_callable=AsyncMock + ), mock.patch("tamarco.resources.io.http.resource.HTTPServerResource.start", new_callable=AsyncMock): + status_task = asyncio.ensure_future(status_resource._check_status_repeatedly(), loop=event_loop) + status_resource._status = status_resource.status_codes.STOPPED + await asyncio.sleep(0.5) + assert status_task.done() + + +@pytest.mark.asyncio +async def test_restart_resource_on_failure(status_resource): + with mock.patch( + "tamarco.resources.basic.status.resource.StatusResource.status", + new_callable=AsyncMock, + return_value={"status": StatusCodes.FAILED}, + ): + with mock.patch( + "tamarco.resources.basic.status.resource.StatusResource.start", new_callable=AsyncMock + ) as start_mock: + with mock.patch( + "tamarco.resources.basic.status.resource.StatusResource.stop", new_callable=AsyncMock + ) as stop_mock: + status_resource.microservice = StatusMicroservice() + await status_resource._restart_resource_on_failure() + start_mock.assert_called() + stop_mock.assert_called() diff --git a/tests/unit/resources/debug/__init__.py b/tests/unit/resources/debug/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/resources/debug/test_profiler.py b/tests/unit/resources/debug/test_profiler.py new file mode 100644 index 0000000..6c5df4b --- /dev/null +++ b/tests/unit/resources/debug/test_profiler.py @@ -0,0 +1,85 @@ +import glob +import os +from collections import UserDict + +import pytest + +from tamarco.resources.debug.profiler import ProfilerResource + + +class AsyncDict(UserDict): + async def get(self, *args, **kwargs): + return super().get(*args, **kwargs) + + +class Microservice: + name = "test_profiler" + loop = None + + +@pytest.fixture +def profiler_file_path(): + return "/tmp/test_profiler_profile" + + +def remove_file(file_path): + try: + os.remove(file_path) + except FileNotFoundError: + pass + + +@pytest.fixture +def clean_fixture_file(profiler_file_path): + remove_file(profiler_file_path) + yield + remove_file(profiler_file_path) + + +@pytest.mark.asyncio +async def test_is_profiler_enabled(): + profiler_resource = ProfilerResource() + profiler_resource.microservice = Microservice + + settings = AsyncDict(microservices_with_profiler=[]) + profiler_resource.settings = settings + assert not await profiler_resource.is_profiler_enabled() + + settings = AsyncDict(microservices_with_profiler=["test_profiler"]) + profiler_resource.settings = settings + assert await profiler_resource.is_profiler_enabled() + + +@pytest.mark.asyncio +async def test_start_and_stop(event_loop): + profiler_resource = ProfilerResource() + profiler_resource.microservice = Microservice + profiler_resource.microservice.loop = event_loop + settings = AsyncDict(microservices_with_profiler=["test_profiler"]) + profiler_resource.settings = settings + + assert not profiler_resource.profiler + assert not profiler_resource.cpu_watcher_task + + await profiler_resource.start() + + assert profiler_resource.profiler + assert profiler_resource.cpu_watcher_task + assert not profiler_resource.cpu_watcher_task.cancelled() + + await profiler_resource.stop() + + assert not profiler_resource.profiler + assert profiler_resource.cpu_watcher_task._must_cancel + + +@pytest.mark.asyncio +async def test_save_profile_snapshot_to_file(event_loop, profiler_file_path, clean_fixture_file): + profiler_resource = ProfilerResource() + profiler_resource.profiler_file_path = profiler_file_path + assert not glob.glob(profiler_file_path) + + profiler_resource._initialize_profiler() + profiler_resource.save_profile_snapshot_to_file() + + assert glob.glob(profiler_file_path) diff --git a/tests/unit/resources/io/__init__.py b/tests/unit/resources/io/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/tests/unit/resources/io/http/test_http.py b/tests/unit/resources/io/http/test_http.py new file mode 100644 index 0000000..0ee410a --- /dev/null +++ b/tests/unit/resources/io/http/test_http.py @@ -0,0 +1,65 @@ +import asyncio + +import pytest +from sanic.response import HTTPResponse + +from tamarco.resources.io.http.resource import HTTPCacheMiddleware, HTTPErrorCacheMiddlewareEnabled, HTTPServerResource + + +@pytest.mark.asyncio +async def test_http_cache_middleware_1(): + m = HTTPCacheMiddleware(maxsize=1_000_000, ttl=60) + assert m is not None + assert m.maxsize == 1_000_000 + assert m.ttl == 60 + assert m.maxsize == m.cache.maxsize + assert m.ttl == m.cache.ttl + + +@pytest.mark.asyncio +async def test_http_cache_middleware_2(): + class RequestMock: + url = None + + m = HTTPCacheMiddleware(maxsize=1_000_000, ttl=2) + request = RequestMock() + request.url = "http:127.0.0.1:9090/v1/test1" + response = HTTPResponse() + response.body = {"data": "datatatata"} + response.headers = {"custom_header_name": "custom_header_value"} + assert await m.middleware_request(request) is None + await m.middleware_response(request, response) + assert response.headers.get("x-cache") == "MISS" + response_check = await m.middleware_request(request) + assert response + assert response_check.body == response.body + assert response_check.headers["x-cache"] == "HIT" + del response.headers["x-cache"] + del response_check.headers["x-cache"] + assert response_check.headers == response.headers + assert response_check.status == response.status + await asyncio.sleep(2) + assert await m.middleware_request(request) is None + + +@pytest.mark.asyncio +async def test_http_cache_resource_1(): + http_resource = HTTPServerResource() + + maxsize_cache_old = http_resource.middleware_cache.maxsize + ttl_cache_old = http_resource.middleware_cache.ttl + http_resource.set_cache_middleware(666, 2) + assert maxsize_cache_old != http_resource.middleware_cache.maxsize + assert http_resource.middleware_cache.maxsize == 666 + assert ttl_cache_old != http_resource.middleware_cache.ttl + assert http_resource.middleware_cache.ttl == 2 + + +@pytest.mark.asyncio +async def test_http_cache_resource_2(): + http_resource = HTTPServerResource() + + try: + http_resource.enable_cache_middleware() + except HTTPErrorCacheMiddlewareEnabled: + pytest.fail("Error cache middleware enabled.") diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..2071404 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,6 @@ +from unittest import mock + + +class AsyncMock(mock.MagicMock): + async def __call__(self, *args, **kwargs): + return super().__call__(*args, **kwargs)