From 8e5a1ec2fb8d072dc9725be700fce3c570d51de3 Mon Sep 17 00:00:00 2001 From: Ran Ziv Date: Tue, 28 Mar 2017 12:17:46 +0300 Subject: [PATCH] ARIA-48 Revamped ARIA CLI This is a large commit which revolves mostly around creating the new CLI, but is also tying ARIA's various components together for real for the first time - allowing a complete run of the "hello-world" example and more. This commit introduces a few other important modules: - aria/core.py - used for managing service-templates and services. - aria/orchestator/workflow_runner.py - used for managing a workflow execution on a service. - aria/orchestrator/dry.py - a "dry executor", used for dry-executing workflows and printing the tasks that would run. Other fixes that were required for the successful usage of ARIA end-to-end have also been introduced in this commit, but there have been too many to list; Review the commit for more info. --- aria/.pylintrc | 2 +- aria/__init__.py | 2 +- aria/cli/args_parser.py | 269 --------- aria/cli/cli.py | 113 ---- aria/cli/commands.py | 546 ------------------ aria/cli/commands/__init__.py | 26 + aria/cli/commands/executions.py | 172 ++++++ aria/cli/commands/logs.py | 65 +++ aria/cli/commands/node_templates.py | 93 +++ aria/cli/commands/nodes.py | 87 +++ aria/cli/commands/plugins.py | 99 ++++ aria/cli/commands/reset.py | 40 ++ aria/cli/commands/service_templates.py | 208 +++++++ aria/cli/commands/services.py | 179 ++++++ aria/cli/commands/workflows.py | 100 ++++ aria/cli/config/__init__.py | 14 + aria/cli/config/config.py | 73 +++ aria/cli/config/config_template.yaml | 12 + aria/cli/core/__init__.py | 14 + aria/cli/core/aria.py | 429 ++++++++++++++ aria/cli/csar.py | 13 +- aria/cli/defaults.py | 20 + aria/cli/dry.py | 93 --- aria/cli/env.py | 124 ++++ aria/cli/exceptions.py | 54 +- aria/cli/helptexts.py | 49 ++ aria/cli/inputs.py | 118 ++++ aria/cli/logger.py | 114 ++++ aria/cli/main.py | 58 ++ aria/cli/service_template_utils.py | 121 ++++ aria/cli/storage.py | 95 --- aria/cli/table.py | 116 ++++ aria/cli/utils.py | 115 ++++ aria/core.py | 124 ++++ aria/exceptions.py | 29 + aria/logger.py | 12 + aria/modeling/__init__.py | 2 + aria/modeling/exceptions.py | 25 + aria/modeling/models.py | 9 +- aria/modeling/orchestration.py | 21 +- aria/modeling/service_changes.py | 10 +- aria/modeling/service_common.py | 15 +- aria/modeling/service_instance.py | 16 +- aria/modeling/service_template.py | 73 ++- aria/modeling/utils.py | 92 ++- aria/orchestrator/context/common.py | 43 +- aria/orchestrator/context/operation.py | 2 - aria/orchestrator/context/workflow.py | 20 +- aria/orchestrator/exceptions.py | 28 + .../execution_plugin/ctx_proxy/server.py | 3 +- .../execution_plugin/instantiation.py | 2 +- aria/orchestrator/plugin.py | 27 +- aria/orchestrator/runner.py | 101 ---- aria/orchestrator/workflow_runner.py | 161 ++++++ aria/orchestrator/workflows/api/task.py | 96 ++- .../workflows/builtin/__init__.py | 1 + .../workflows/builtin/execute_operation.py | 16 +- aria/orchestrator/workflows/builtin/utils.py | 82 ++- aria/orchestrator/workflows/core/engine.py | 6 +- aria/orchestrator/workflows/core/task.py | 2 - aria/orchestrator/workflows/exceptions.py | 10 +- .../orchestrator/workflows/executor/celery.py | 2 +- aria/orchestrator/workflows/executor/dry.py | 51 ++ .../workflows/executor/process.py | 2 +- .../orchestrator/workflows/executor/thread.py | 3 +- aria/parser/consumption/__init__.py | 20 +- aria/parser/consumption/modeling.py | 3 +- aria/storage/core.py | 6 +- aria/storage/exceptions.py | 4 + aria/storage/instrumentation.py | 7 +- aria/storage/sql_mapi.py | 30 +- aria/utils/application.py | 294 ---------- aria/utils/archive.py | 63 ++ aria/utils/exceptions.py | 11 + aria/utils/file.py | 13 + aria/utils/formatting.py | 28 + aria/utils/http.py | 62 ++ aria/utils/threading.py | 24 + aria/utils/type.py | 61 ++ .../use-cases/block-storage-1/inputs.yaml | 3 + .../use-cases/block-storage-2/inputs.yaml | 3 + .../use-cases/block-storage-3/inputs.yaml | 2 + .../use-cases/block-storage-4/inputs.yaml | 2 + .../use-cases/block-storage-5/inputs.yaml | 3 + .../use-cases/block-storage-6/inputs.yaml | 3 + .../use-cases/compute-1/inputs.yaml | 1 + .../use-cases/multi-tier-1/inputs.yaml | 1 + .../use-cases/network-1/inputs.yaml | 1 + .../use-cases/network-2/inputs.yaml | 1 + .../use-cases/network-3/inputs.yaml | 1 + .../use-cases/object-storage-1/inputs.yaml | 1 + .../software-component-1/inputs.yaml | 1 + .../simple_v1_0/modeling/__init__.py | 3 +- requirements.in | 9 + requirements.txt | 22 +- setup.py | 2 +- tests/.pylintrc | 2 +- tests/cli/__init__.py | 14 + tests/cli/base_test.py | 77 +++ tests/cli/runner.py | 27 + tests/cli/test_node_templates.py | 133 +++++ tests/cli/test_nodes.py | 101 ++++ tests/cli/test_service_templates.py | 246 ++++++++ tests/cli/test_services.py | 205 +++++++ tests/cli/utils.py | 101 ++++ tests/conftest.py | 14 +- tests/end2end/test_orchestrator.py | 63 -- tests/fixtures.py | 70 +++ tests/mock/context.py | 7 +- tests/mock/models.py | 135 ++++- tests/mock/topology.py | 8 +- tests/mock/workflow.py | 26 + tests/modeling/test_models.py | 17 +- tests/orchestrator/context/test_operation.py | 45 +- .../context/test_resource_render.py | 12 +- tests/orchestrator/context/test_serialize.py | 13 +- tests/orchestrator/context/test_toolbelt.py | 11 +- tests/orchestrator/context/test_workflow.py | 10 +- .../execution_plugin/test_local.py | 15 +- .../orchestrator/execution_plugin/test_ssh.py | 48 +- tests/orchestrator/test_runner.py | 74 --- tests/orchestrator/test_workflow_runner.py | 292 ++++++++++ tests/orchestrator/workflows/api/test_task.py | 18 +- .../workflows/core/test_engine.py | 10 +- ...> test_task_graph_into_execution_graph.py} | 0 .../executor/test_process_executor.py | 34 +- ...ocess_executor_concurrent_modifications.py | 3 +- .../test_process_executor_extension.py | 3 +- .../test_process_executor_tracked_changes.py | 6 +- tests/parser/service_templates.py | 6 +- .../test_tosca_simple_v1_0.py | 2 +- tests/utils/test_plugin.py | 29 +- .../utils/test_threading.py | 35 +- tox.ini | 2 +- 134 files changed, 5123 insertions(+), 2195 deletions(-) delete mode 100644 aria/cli/args_parser.py delete mode 100644 aria/cli/cli.py delete mode 100644 aria/cli/commands.py create mode 100644 aria/cli/commands/__init__.py create mode 100644 aria/cli/commands/executions.py create mode 100644 aria/cli/commands/logs.py create mode 100644 aria/cli/commands/node_templates.py create mode 100644 aria/cli/commands/nodes.py create mode 100644 aria/cli/commands/plugins.py create mode 100644 aria/cli/commands/reset.py create mode 100644 aria/cli/commands/service_templates.py create mode 100644 aria/cli/commands/services.py create mode 100644 aria/cli/commands/workflows.py create mode 100644 aria/cli/config/__init__.py create mode 100644 aria/cli/config/config.py create mode 100644 aria/cli/config/config_template.yaml create mode 100644 aria/cli/core/__init__.py create mode 100644 aria/cli/core/aria.py create mode 100644 aria/cli/defaults.py delete mode 100644 aria/cli/dry.py create mode 100644 aria/cli/env.py create mode 100644 aria/cli/helptexts.py create mode 100644 aria/cli/inputs.py create mode 100644 aria/cli/logger.py create mode 100644 aria/cli/main.py create mode 100644 aria/cli/service_template_utils.py delete mode 100644 aria/cli/storage.py create mode 100644 aria/cli/table.py create mode 100644 aria/cli/utils.py create mode 100644 aria/core.py delete mode 100644 aria/orchestrator/runner.py create mode 100644 aria/orchestrator/workflow_runner.py create mode 100644 aria/orchestrator/workflows/executor/dry.py delete mode 100644 aria/utils/application.py create mode 100644 aria/utils/archive.py create mode 100644 aria/utils/http.py create mode 100644 aria/utils/type.py create mode 100644 examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml create mode 100644 examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml create mode 100644 tests/cli/__init__.py create mode 100644 tests/cli/base_test.py create mode 100644 tests/cli/runner.py create mode 100644 tests/cli/test_node_templates.py create mode 100644 tests/cli/test_nodes.py create mode 100644 tests/cli/test_service_templates.py create mode 100644 tests/cli/test_services.py create mode 100644 tests/cli/utils.py delete mode 100644 tests/end2end/test_orchestrator.py create mode 100644 tests/fixtures.py create mode 100644 tests/mock/workflow.py delete mode 100644 tests/orchestrator/test_runner.py create mode 100644 tests/orchestrator/test_workflow_runner.py rename tests/orchestrator/workflows/core/{test_task_graph_into_exececution_graph.py => test_task_graph_into_execution_graph.py} (100%) rename tests/{end2end => parser}/test_tosca_simple_v1_0.py (97%) rename aria/cli/config.py => tests/utils/test_threading.py (51%) diff --git a/aria/.pylintrc b/aria/.pylintrc index 72226053..7da8c56f 100644 --- a/aria/.pylintrc +++ b/aria/.pylintrc @@ -77,7 +77,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,logging-format-interpolation,import-error,redefined-variable-type,broad-except,protected-access,global-statement,no-member +disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,logging-format-interpolation,import-error,redefined-variable-type,broad-except,protected-access,global-statement,no-member,unused-argument [REPORTS] diff --git a/aria/__init__.py b/aria/__init__.py index b9251d58..df75b1e7 100644 --- a/aria/__init__.py +++ b/aria/__init__.py @@ -84,6 +84,6 @@ def application_resource_storage(api, api_kwargs=None, initiator=None, initiator return storage.ResourceStorage(api_cls=api, api_kwargs=api_kwargs, - items=['blueprint', 'deployment', 'plugin'], + items=['service_template', 'service', 'plugin'], initiator=initiator, initiator_kwargs=initiator_kwargs) diff --git a/aria/cli/args_parser.py b/aria/cli/args_parser.py deleted file mode 100644 index 81ee513c..00000000 --- a/aria/cli/args_parser.py +++ /dev/null @@ -1,269 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Argument parsing configuration and functions -""" - -import argparse -from functools import partial - -from ..utils.argparse import ArgumentParser - -NO_VERBOSE = 0 - - -class SmartFormatter(argparse.HelpFormatter): - """ - TODO: what is this? - """ - def _split_lines(self, text, width): - if text.startswith('R|'): - return text[2:].splitlines() - return super(SmartFormatter, self)._split_lines(text, width) - - -def sub_parser_decorator(func=None, **parser_settings): - """ - Decorated for sub_parser argument definitions - """ - if not func: - return partial(sub_parser_decorator, **parser_settings) - - def _wrapper(parser): - sub_parser = parser.add_parser(**parser_settings) - sub_parser.add_argument( - '-v', '--verbose', - dest='verbosity', - action='count', - default=NO_VERBOSE, - help='Set verbosity level (can be passed multiple times)') - func(sub_parser) - return sub_parser - return _wrapper - - -def config_parser(parser=None): - """ - Top level argparse configuration - """ - parser = parser or ArgumentParser( - prog='ARIA', - description="ARIA's Command Line Interface", - formatter_class=SmartFormatter) - parser.add_argument('-v', '--version', action='version') - sub_parser = parser.add_subparsers(title='Commands', dest='command') - add_init_parser(sub_parser) - add_execute_parser(sub_parser) - add_parse_parser(sub_parser) - add_workflow_parser(sub_parser) - add_spec_parser(sub_parser) - add_csar_create_parser(sub_parser) - add_csar_open_parser(sub_parser) - add_csar_validate_parser(sub_parser) - return parser - - -@sub_parser_decorator( - name='parse', - help='Parse a blueprint', - formatter_class=SmartFormatter) -def add_parse_parser(parse): - """ - ``parse`` command parser configuration - """ - parse.add_argument( - 'uri', - help='URI or file path to service template') - parse.add_argument( - 'consumer', - nargs='?', - default='validate', - help='"validate" (default), "presentation", "template", "types", "instance", or consumer ' - 'class name (full class path or short name)') - parse.add_argument( - '--loader-source', - default='aria.parser.loading.DefaultLoaderSource', - help='loader source class for the parser') - parse.add_argument( - '--reader-source', - default='aria.parser.reading.DefaultReaderSource', - help='reader source class for the parser') - parse.add_argument( - '--presenter-source', - default='aria.parser.presentation.DefaultPresenterSource', - help='presenter source class for the parser') - parse.add_argument( - '--presenter', - help='force use of this presenter class in parser') - parse.add_argument( - '--prefix', nargs='*', - help='prefixes for imports') - parse.add_flag_argument( - 'debug', - help_true='print debug info', - help_false='don\'t print debug info') - parse.add_flag_argument( - 'cached-methods', - help_true='enable cached methods', - help_false='disable cached methods', - default=True) - - -@sub_parser_decorator( - name='workflow', - help='Run a workflow on a blueprint', - formatter_class=SmartFormatter) -def add_workflow_parser(workflow): - """ - ``workflow`` command parser configuration - """ - workflow.add_argument( - 'uri', - help='URI or file path to service template') - workflow.add_argument( - '-w', '--workflow', - default='install', - help='The workflow name') - workflow.add_flag_argument( - 'dry', - default=True, - help_true='dry run', - help_false='wet run') - - -@sub_parser_decorator( - name='init', - help='Initialize environment', - formatter_class=SmartFormatter) -def add_init_parser(init): - """ - ``init`` command parser configuration - """ - init.add_argument( - '-d', '--deployment-id', - required=True, - help='A unique ID for the deployment') - init.add_argument( - '-p', '--blueprint-path', - dest='blueprint_path', - required=True, - help='The path to the desired blueprint') - init.add_argument( - '-i', '--inputs', - dest='input', - action='append', - help='R|Inputs for the local workflow creation \n' - '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files, \n' - 'a JSON string or as "key1=value1;key2=value2"). \n' - 'This argument can be used multiple times') - init.add_argument( - '-b', '--blueprint-id', - dest='blueprint_id', - required=True, - help='The blueprint ID' - ) - - -@sub_parser_decorator( - name='execute', - help='Execute a workflow', - formatter_class=SmartFormatter) -def add_execute_parser(execute): - """ - ``execute`` command parser configuration - """ - execute.add_argument( - '-d', '--deployment-id', - required=True, - help='A unique ID for the deployment') - execute.add_argument( - '-w', '--workflow', - dest='workflow_id', - help='The workflow to execute') - execute.add_argument( - '-p', '--parameters', - dest='parameters', - action='append', - help='R|Parameters for the workflow execution\n' - '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files,\n' - 'a JSON string or as "key1=value1;key2=value2").\n' - 'This argument can be used multiple times.') - execute.add_argument( - '--task-retries', - dest='task_retries', - type=int, - help='How many times should a task be retried in case of failure') - execute.add_argument( - '--task-retry-interval', - dest='task_retry_interval', - default=1, - type=int, - help='How many seconds to wait before each task is retried') - - -@sub_parser_decorator( - name='csar-create', - help='Create a CSAR file from a TOSCA service template directory', - formatter_class=SmartFormatter) -def add_csar_create_parser(parse): - parse.add_argument( - 'source', - help='Service template directory') - parse.add_argument( - 'entry', - help='Entry definition file relative to service template directory') - parse.add_argument( - '-d', '--destination', - help='Output CSAR zip destination', - required=True) - - -@sub_parser_decorator( - name='csar-open', - help='Extracts a CSAR file to a TOSCA service template directory', - formatter_class=SmartFormatter) -def add_csar_open_parser(parse): - parse.add_argument( - 'source', - help='CSAR file location') - parse.add_argument( - '-d', '--destination', - help='Output directory to extract the CSAR into', - required=True) - - -@sub_parser_decorator( - name='csar-validate', - help='Validates a CSAR file', - formatter_class=SmartFormatter) -def add_csar_validate_parser(parse): - parse.add_argument( - 'source', - help='CSAR file location') - - -@sub_parser_decorator( - name='spec', - help='Specification tool', - formatter_class=SmartFormatter) -def add_spec_parser(spec): - """ - ``spec`` command parser configuration - """ - spec.add_argument( - '--csv', - action='store_true', - help='output as CSV') diff --git a/aria/cli/cli.py b/aria/cli/cli.py deleted file mode 100644 index 8d014b32..00000000 --- a/aria/cli/cli.py +++ /dev/null @@ -1,113 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -CLI Entry point -""" - -import os -import logging -import tempfile - -from .. import install_aria_extensions -from ..logger import ( - create_logger, - create_console_log_handler, - create_file_log_handler, - LoggerMixin, -) -from ..utils.exceptions import print_exception -from .args_parser import config_parser -from .commands import ( - ParseCommand, - WorkflowCommand, - InitCommand, - ExecuteCommand, - CSARCreateCommand, - CSAROpenCommand, - CSARValidateCommand, - SpecCommand, -) - -__version__ = '0.1.0' - - -class AriaCli(LoggerMixin): - """ - Context manager based class that enables proper top level error handling - """ - - def __init__(self, *args, **kwargs): - super(AriaCli, self).__init__(*args, **kwargs) - self.commands = { - 'parse': ParseCommand.with_logger(base_logger=self.logger), - 'workflow': WorkflowCommand.with_logger(base_logger=self.logger), - 'init': InitCommand.with_logger(base_logger=self.logger), - 'execute': ExecuteCommand.with_logger(base_logger=self.logger), - 'csar-create': CSARCreateCommand.with_logger(base_logger=self.logger), - 'csar-open': CSAROpenCommand.with_logger(base_logger=self.logger), - 'csar-validate': CSARValidateCommand.with_logger(base_logger=self.logger), - 'spec': SpecCommand.with_logger(base_logger=self.logger), - } - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """ - Here we will handle errors - :param exc_type: - :param exc_val: - :param exc_tb: - :return: - """ - # todo: error handling - # todo: cleanup if needed - # TODO: user message if needed - pass - - def run(self): - """ - Parses user arguments and run the appropriate command - """ - parser = config_parser() - args, unknown_args = parser.parse_known_args() - - command_handler = self.commands[args.command] - self.logger.debug('Running command: {args.command} handler: {0}'.format( - command_handler, args=args)) - try: - command_handler(args, unknown_args) - except Exception as e: - print_exception(e) - - -def main(): - """ - CLI entry point - """ - install_aria_extensions() - create_logger( - handlers=[ - create_console_log_handler(), - create_file_log_handler(file_path=os.path.join(tempfile.gettempdir(), 'aria_cli.log')), - ], - level=logging.INFO) - with AriaCli() as aria: - aria.run() - - -if __name__ == '__main__': - main() diff --git a/aria/cli/commands.py b/aria/cli/commands.py deleted file mode 100644 index ee329e7a..00000000 --- a/aria/cli/commands.py +++ /dev/null @@ -1,546 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -CLI various commands implementation -""" - -import json -import os -import sys -import csv -import shutil -import tempfile -from glob import glob -from importlib import import_module - -from ruamel import yaml # @UnresolvedImport - -from .. import extension -from ..logger import LoggerMixin -from ..parser import iter_specifications -from ..parser.consumption import ( - ConsumptionContext, - ConsumerChain, - Read, - Validate, - ServiceTemplate, - Types, - Inputs, - ServiceInstance -) -from ..parser.loading import LiteralLocation, UriLocation -from ..utils.application import StorageManager -from ..utils.caching import cachedmethod -from ..utils.console import (puts, Colored, indent) -from ..utils.imports import (import_fullname, import_modules) -from ..utils.collections import OrderedDict -from ..orchestrator import WORKFLOW_DECORATOR_RESERVED_ARGUMENTS -from ..orchestrator.runner import Runner -from ..orchestrator.workflows.builtin import BUILTIN_WORKFLOWS -from .dry import convert_to_dry - -from .exceptions import ( - AriaCliFormatInputsError, - AriaCliYAMLInputsError, - AriaCliInvalidInputsError -) -from . import csar - - -class BaseCommand(LoggerMixin): - """ - Base class for CLI commands. - """ - - def __repr__(self): - return 'AriaCli({cls.__name__})'.format(cls=self.__class__) - - def __call__(self, args_namespace, unknown_args): - """ - __call__ method is called when running command - :param args_namespace: - """ - pass - - def parse_inputs(self, inputs): - """ - Returns a dictionary of inputs `resources` can be: - - A list of files. - - A single file - - A directory containing multiple input files - - A key1=value1;key2=value2 pairs string. - - Wildcard based string (e.g. *-inputs.yaml) - """ - - parsed_dict = {} - - def _format_to_dict(input_string): - self.logger.info('Processing inputs source: {0}'.format(input_string)) - try: - input_string = input_string.strip() - try: - parsed_dict.update(json.loads(input_string)) - except BaseException: - parsed_dict.update((i.split('=') - for i in input_string.split(';') - if i)) - except Exception as exc: - raise AriaCliFormatInputsError(str(exc), inputs=input_string) - - def _handle_inputs_source(input_path): - self.logger.info('Processing inputs source: {0}'.format(input_path)) - try: - with open(input_path) as input_file: - content = yaml.safe_load(input_file) - except yaml.YAMLError as exc: - raise AriaCliYAMLInputsError( - '"{0}" is not a valid YAML. {1}'.format(input_path, str(exc))) - if isinstance(content, dict): - parsed_dict.update(content) - return - if content is None: - return - raise AriaCliInvalidInputsError('Invalid inputs', inputs=input_path) - - for input_string in inputs if isinstance(inputs, list) else [inputs]: - if os.path.isdir(input_string): - for input_file in os.listdir(input_string): - _handle_inputs_source(os.path.join(input_string, input_file)) - continue - input_files = glob(input_string) - if input_files: - for input_file in input_files: - _handle_inputs_source(input_file) - continue - _format_to_dict(input_string) - return parsed_dict - - -class ParseCommand(BaseCommand): - """ - :code:`parse` command. - - Given a blueprint, emits information in human-readable, JSON, or YAML format from various phases - of the ARIA parser. - """ - - def __call__(self, args_namespace, unknown_args): - super(ParseCommand, self).__call__(args_namespace, unknown_args) - - if args_namespace.prefix: - for prefix in args_namespace.prefix: - extension.parser.uri_loader_prefix().append(prefix) - - cachedmethod.ENABLED = args_namespace.cached_methods - - context = ParseCommand.create_context_from_namespace(args_namespace) - context.args = unknown_args - - consumer = ConsumerChain(context, (Read, Validate)) - - consumer_class_name = args_namespace.consumer - dumper = None - if consumer_class_name == 'validate': - dumper = None - elif consumer_class_name == 'presentation': - dumper = consumer.consumers[0] - elif consumer_class_name == 'template': - consumer.append(ServiceTemplate) - elif consumer_class_name == 'types': - consumer.append(ServiceTemplate, Types) - elif consumer_class_name == 'instance': - consumer.append(ServiceTemplate, Inputs, ServiceInstance) - else: - consumer.append(ServiceTemplate, Inputs, ServiceInstance) - consumer.append(import_fullname(consumer_class_name)) - - if dumper is None: - # Default to last consumer - dumper = consumer.consumers[-1] - - consumer.consume() - - if not context.validation.dump_issues(): - dumper.dump() - exit(1) - - @staticmethod - def create_context_from_namespace(namespace, **kwargs): - args = vars(namespace).copy() - args.update(kwargs) - return ParseCommand.create_context(**args) - - @staticmethod - def create_context(uri, - loader_source, - reader_source, - presenter_source, - presenter, - debug, - **kwargs): - context = ConsumptionContext() - context.loading.loader_source = import_fullname(loader_source)() - context.reading.reader_source = import_fullname(reader_source)() - context.presentation.location = UriLocation(uri) if isinstance(uri, basestring) else uri - context.presentation.presenter_source = import_fullname(presenter_source)() - context.presentation.presenter_class = import_fullname(presenter) - context.presentation.print_exceptions = debug - return context - - -class WorkflowCommand(BaseCommand): - """ - :code:`workflow` command. - """ - - WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies') - - def __call__(self, args_namespace, unknown_args): - super(WorkflowCommand, self).__call__(args_namespace, unknown_args) - - context = self._parse(args_namespace.uri) - workflow_fn, inputs = self._get_workflow(context, args_namespace.workflow) - self._dry = args_namespace.dry - self._run(context, args_namespace.workflow, workflow_fn, inputs) - - def _parse(self, uri): - # Parse - context = ConsumptionContext() - context.presentation.location = UriLocation(uri) - consumer = ConsumerChain(context, (Read, Validate, ServiceTemplate, Inputs, - ServiceInstance)) - consumer.consume() - - if context.validation.dump_issues(): - exit(1) - - return context - - def _get_workflow(self, context, workflow_name): - if workflow_name in BUILTIN_WORKFLOWS: - workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.{0}'.format( - workflow_name)) - inputs = {} - else: - workflow = context.modeling.instance.policies.get(workflow_name) - if workflow is None: - raise AttributeError('workflow policy does not exist: "{0}"'.format(workflow_name)) - if workflow.type.role != 'workflow': - raise AttributeError('policy is not a workflow: "{0}"'.format(workflow_name)) - - sys.path.append(os.path.dirname(str(context.presentation.location))) - - workflow_fn = import_fullname(workflow.properties['implementation'].value) - - for k in workflow.properties: - if k in WORKFLOW_DECORATOR_RESERVED_ARGUMENTS: - raise AttributeError('workflow policy "{0}" defines a reserved property: "{1}"' - .format(workflow_name, k)) - - inputs = OrderedDict([ - (k, v.value) for k, v in workflow.properties.iteritems() - if k not in WorkflowCommand.WORKFLOW_POLICY_INTERNAL_PROPERTIES - ]) - - return workflow_fn, inputs - - def _run(self, context, workflow_name, workflow_fn, inputs): - # Storage - def _initialize_storage(model_storage): - if self._dry: - convert_to_dry(context.modeling.instance) - context.modeling.store(model_storage) - - # Create runner - runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage, - lambda: context.modeling.instance.id) - - # Run - runner.run() - - -class InitCommand(BaseCommand): - """ - :code:`init` command. - - Broken. Currently maintained for reference. - """ - - _IN_VIRTUAL_ENV = hasattr(sys, 'real_prefix') - - def __call__(self, args_namespace, unknown_args): - super(InitCommand, self).__call__(args_namespace, unknown_args) - self._workspace_setup() - inputs = self.parse_inputs(args_namespace.input) if args_namespace.input else None - plan, deployment_plan = self._parse_blueprint(args_namespace.blueprint_path, inputs) - self._create_storage( - blueprint_plan=plan, - blueprint_path=args_namespace.blueprint_path, - deployment_plan=deployment_plan, - blueprint_id=args_namespace.blueprint_id, - deployment_id=args_namespace.deployment_id, - main_file_name=os.path.basename(args_namespace.blueprint_path)) - self.logger.info('Initiated {0}'.format(args_namespace.blueprint_path)) - self.logger.info( - 'If you make changes to the blueprint, ' - 'run `aria local init -p {0}` command again to apply them'.format( - args_namespace.blueprint_path)) - - def _workspace_setup(self): - try: - create_user_space() - self.logger.debug( - 'created user space path in: {0}'.format(user_space())) - except IOError: - self.logger.debug( - 'user space path already exist - {0}'.format(user_space())) - try: - create_local_storage() - self.logger.debug( - 'created local storage path in: {0}'.format(local_storage())) - except IOError: - self.logger.debug( - 'local storage path already exist - {0}'.format(local_storage())) - return local_storage() - - def _parse_blueprint(self, blueprint_path, inputs=None): - # TODO - pass - - @staticmethod - def _create_storage( - blueprint_path, - blueprint_plan, - deployment_plan, - blueprint_id, - deployment_id, - main_file_name=None): - resource_storage = application_resource_storage( - FileSystemResourceDriver(local_resource_storage())) - model_storage = application_model_storage( - FileSystemModelDriver(local_model_storage())) - resource_storage.setup() - model_storage.setup() - storage_manager = StorageManager( - model_storage=model_storage, - resource_storage=resource_storage, - blueprint_path=blueprint_path, - blueprint_id=blueprint_id, - blueprint_plan=blueprint_plan, - deployment_id=deployment_id, - deployment_plan=deployment_plan - ) - storage_manager.create_blueprint_storage( - blueprint_path, - main_file_name=main_file_name - ) - storage_manager.create_nodes_storage() - storage_manager.create_deployment_storage() - storage_manager.create_node_instances_storage() - - -class ExecuteCommand(BaseCommand): - """ - :code:`execute` command. - - Broken. Currently maintained for reference. - """ - - def __call__(self, args_namespace, unknown_args): - super(ExecuteCommand, self).__call__(args_namespace, unknown_args) - parameters = (self.parse_inputs(args_namespace.parameters) - if args_namespace.parameters else {}) - resource_storage = application_resource_storage( - FileSystemResourceDriver(local_resource_storage())) - model_storage = application_model_storage( - FileSystemModelDriver(local_model_storage())) - deployment = model_storage.service_instance.get(args_namespace.deployment_id) - - try: - workflow = deployment.workflows[args_namespace.workflow_id] - except KeyError: - raise ValueError( - '{0} workflow does not exist. existing workflows are: {1}'.format( - args_namespace.workflow_id, - deployment.workflows.keys())) - - workflow_parameters = self._merge_and_validate_execution_parameters( - workflow, - args_namespace.workflow_id, - parameters - ) - workflow_context = WorkflowContext( - name=args_namespace.workflow_id, - model_storage=model_storage, - resource_storage=resource_storage, - deployment_id=args_namespace.deployment_id, - workflow_id=args_namespace.workflow_id, - parameters=workflow_parameters, - ) - workflow_function = self._load_workflow_handler(workflow['operation']) - tasks_graph = workflow_function(workflow_context, **workflow_context.parameters) - executor = ProcessExecutor() - workflow_engine = Engine(executor=executor, - workflow_context=workflow_context, - tasks_graph=tasks_graph) - workflow_engine.execute() - executor.close() - - @staticmethod - def _merge_and_validate_execution_parameters( - workflow, - workflow_name, - execution_parameters): - merged_parameters = {} - workflow_parameters = workflow.get('parameters', {}) - missing_mandatory_parameters = set() - - for name, param in workflow_parameters.iteritems(): - if 'default' not in param: - if name not in execution_parameters: - missing_mandatory_parameters.add(name) - continue - merged_parameters[name] = execution_parameters[name] - continue - merged_parameters[name] = (execution_parameters[name] if name in execution_parameters - else param['default']) - - if missing_mandatory_parameters: - raise ValueError( - 'Workflow "{0}" must be provided with the following ' - 'parameters to execute: {1}'.format( - workflow_name, ','.join(missing_mandatory_parameters))) - - custom_parameters = dict( - (k, v) for (k, v) in execution_parameters.iteritems() - if k not in workflow_parameters) - - if custom_parameters: - raise ValueError( - 'Workflow "{0}" does not have the following parameters declared: {1}. ' - 'Remove these parameters'.format( - workflow_name, ','.join(custom_parameters.keys()))) - - return merged_parameters - - @staticmethod - def _load_workflow_handler(handler_path): - module_name, spec_handler_name = handler_path.rsplit('.', 1) - try: - module = import_module(module_name) - return getattr(module, spec_handler_name) - except ImportError: - # TODO: exception handler - raise - except AttributeError: - # TODO: exception handler - raise - - -class BaseCSARCommand(BaseCommand): - @staticmethod - def _parse_and_dump(reader): - context = ConsumptionContext() - context.loading.prefixes += [os.path.join(reader.destination, 'definitions')] - context.presentation.location = LiteralLocation(reader.entry_definitions_yaml) - chain = ConsumerChain(context, (Read, Validate, Model, Instance)) - chain.consume() - if context.validation.dump_issues(): - raise RuntimeError('Validation failed') - dumper = chain.consumers[-1] - dumper.dump() - - def _read(self, source, destination): - reader = csar.read( - source=source, - destination=destination, - logger=self.logger) - self.logger.info( - 'Path: {r.destination}\n' - 'TOSCA meta file version: {r.meta_file_version}\n' - 'CSAR Version: {r.csar_version}\n' - 'Created By: {r.created_by}\n' - 'Entry definitions: {r.entry_definitions}' - .format(r=reader)) - self._parse_and_dump(reader) - - def _validate(self, source): - workdir = tempfile.mkdtemp() - try: - self._read( - source=source, - destination=workdir) - finally: - shutil.rmtree(workdir, ignore_errors=True) - - -class CSARCreateCommand(BaseCSARCommand): - def __call__(self, args_namespace, unknown_args): - super(CSARCreateCommand, self).__call__(args_namespace, unknown_args) - csar.write( - source=args_namespace.source, - entry=args_namespace.entry, - destination=args_namespace.destination, - logger=self.logger) - self._validate(args_namespace.destination) - - -class CSAROpenCommand(BaseCSARCommand): - def __call__(self, args_namespace, unknown_args): - super(CSAROpenCommand, self).__call__(args_namespace, unknown_args) - self._read( - source=args_namespace.source, - destination=args_namespace.destination) - - -class CSARValidateCommand(BaseCSARCommand): - def __call__(self, args_namespace, unknown_args): - super(CSARValidateCommand, self).__call__(args_namespace, unknown_args) - self._validate(args_namespace.source) - - -class SpecCommand(BaseCommand): - """ - :code:`spec` command. - - Emits all uses of :code:`@dsl_specification` in the codebase, in human-readable or CSV format. - """ - - def __call__(self, args_namespace, unknown_args): - super(SpecCommand, self).__call__(args_namespace, unknown_args) - - # Make sure that all @dsl_specification decorators are processed - for pkg in extension.parser.specification_package(): - import_modules(pkg) - - # TODO: scan YAML documents as well - - if args_namespace.csv: - writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL) - writer.writerow(('Specification', 'Section', 'Code', 'URL')) - for spec, sections in iter_specifications(): - for section, details in sections: - writer.writerow((spec, section, details['code'], details['url'])) - - else: - for spec, sections in iter_specifications(): - puts(Colored.cyan(spec)) - with indent(2): - for section, details in sections: - puts(Colored.blue(section)) - with indent(2): - for k, v in details.iteritems(): - puts('%s: %s' % (Colored.magenta(k), v)) diff --git a/aria/cli/commands/__init__.py b/aria/cli/commands/__init__.py new file mode 100644 index 00000000..a01a029f --- /dev/null +++ b/aria/cli/commands/__init__.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import ( + executions, + logs, + node_templates, + nodes, + plugins, + reset, + service_templates, + services, + workflows +) diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py new file mode 100644 index 00000000..e100f0d3 --- /dev/null +++ b/aria/cli/commands/executions.py @@ -0,0 +1,172 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from .. import helptexts +from .. import table +from .. import utils +from ..core import aria +from ...modeling.models import Execution +from ...orchestrator.workflow_runner import WorkflowRunner +from ...orchestrator.workflows.executor.dry import DryExecutor +from ...utils import formatting +from ...utils import threading + +EXECUTION_COLUMNS = ['id', 'workflow_name', 'status', 'service_name', + 'created_at', 'error'] + + +@aria.group(name='executions') +@aria.options.verbose() +def executions(): + """Handle workflow executions + """ + pass + + +@executions.command(name='show', + short_help='Show execution information') +@aria.argument('execution-id') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def show(execution_id, model_storage, logger): + """Show information for a specific execution + + `EXECUTION_ID` is the execution to get information on. + """ + logger.info('Showing execution {0}'.format(execution_id)) + execution = model_storage.execution.get(execution_id) + + table.print_data(EXECUTION_COLUMNS, execution, 'Execution:', col_max_width=50) + + # print execution parameters + logger.info('Execution Inputs:') + if execution.inputs: + #TODO check this section, havent tested it + execution_inputs = [ei.to_dict() for ei in execution.inputs] + for input_name, input_value in formatting.decode_dict( + execution_inputs).iteritems(): + logger.info('\t{0}: \t{1}'.format(input_name, input_value)) + else: + logger.info('\tNo inputs') + + +@executions.command(name='list', + short_help='List service executions') +@aria.options.service_name(required=False) +@aria.options.sort_by() +@aria.options.descending +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(service_name, + sort_by, + descending, + model_storage, + logger): + """List executions + + If `SERVICE_NAME` is provided, list executions for that service. + Otherwise, list executions for all services. + """ + if service_name: + logger.info('Listing executions for service {0}...'.format( + service_name)) + service = model_storage.service.get_by_name(service_name) + filters = dict(service=service) + else: + logger.info('Listing all executions...') + filters = {} + + executions_list = model_storage.execution.list( + filters=filters, + sort=utils.storage_sort_param(sort_by, descending)).items + + table.print_data(EXECUTION_COLUMNS, executions_list, 'Executions:') + + +@executions.command(name='start', + short_help='Execute a workflow') +@aria.argument('workflow-name') +@aria.options.service_name(required=True) +@aria.options.inputs(help=helptexts.EXECUTION_INPUTS) +@aria.options.dry_execution +@aria.options.task_max_attempts() +@aria.options.task_retry_interval() +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_resource_storage +@aria.pass_plugin_manager +@aria.pass_logger +def start(workflow_name, + service_name, + inputs, + dry, + task_max_attempts, + task_retry_interval, + model_storage, + resource_storage, + plugin_manager, + logger): + """Execute a workflow + + `WORKFLOW_NAME` is the name of the workflow to execute (e.g. `uninstall`) + """ + service = model_storage.service.get_by_name(service_name) + executor = DryExecutor() if dry else None # use WorkflowRunner's default executor + + workflow_runner = \ + WorkflowRunner(workflow_name, service.id, inputs, + model_storage, resource_storage, plugin_manager, + executor, task_max_attempts, task_retry_interval) + + execution_thread_name = '{0}_{1}'.format(service_name, workflow_name) + execution_thread = threading.ExceptionThread(target=workflow_runner.execute, + name=execution_thread_name) + execution_thread.daemon = True # allows force-cancel to exit immediately + + logger.info('Starting {0}execution. Press Ctrl+C cancel'.format('dry ' if dry else '')) + execution_thread.start() + try: + while execution_thread.is_alive(): + # using join without a timeout blocks and ignores KeyboardInterrupt + execution_thread.join(1) + except KeyboardInterrupt: + _cancel_execution(workflow_runner, execution_thread, logger) + + # raise any errors from the execution thread (note these are not workflow execution errors) + execution_thread.raise_error_if_exists() + + execution = workflow_runner.execution + logger.info('Execution has ended with "{0}" status'.format(execution.status)) + if execution.status == Execution.FAILED and execution.error: + logger.info('Execution error:{0}{1}'.format(os.linesep, execution.error)) + + if dry: + # remove traces of the dry execution (including tasks, logs, inputs..) + model_storage.execution.delete(execution) + + +def _cancel_execution(workflow_runner, execution_thread, logger): + logger.info('Cancelling execution. Press Ctrl+C again to force-cancel') + try: + workflow_runner.cancel() + while execution_thread.is_alive(): + execution_thread.join(1) + except KeyboardInterrupt: + logger.info('Force-cancelling execution') + # TODO handle execution (update status etc.) and exit process diff --git a/aria/cli/commands/logs.py b/aria/cli/commands/logs.py new file mode 100644 index 00000000..6c83347f --- /dev/null +++ b/aria/cli/commands/logs.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import utils +from ..core import aria + + +@aria.group(name='logs') +@aria.options.verbose() +def logs(): + """Show logs from workflow executions + """ + pass + + +@logs.command(name='list', + short_help='List execution logs') +@aria.argument('execution-id') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(execution_id, + model_storage, + logger): + """Display logs for an execution + """ + logger.info('Listing logs for execution id {0}'.format(execution_id)) + logs_list = model_storage.log.list(filters=dict(execution_fk=execution_id), + sort=utils.storage_sort_param('created_at', False)) + # TODO: print logs nicely + if logs_list: + for log in logs_list: + logger.info(log) + else: + logger.info('\tNo logs') + + +@logs.command(name='delete', + short_help='Delete execution logs') +@aria.argument('execution-id') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def delete(execution_id, model_storage, logger): + """Delete logs of an execution + + `EXECUTION_ID` is the execution logs to delete. + """ + logger.info('Deleting logs for execution id {0}'.format(execution_id)) + logs_list = model_storage.log.list(filters=dict(execution_fk=execution_id)) + for log in logs_list: + model_storage.log.delete(log) + logger.info('Deleted logs for execution id {0}'.format(execution_id)) diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py new file mode 100644 index 00000000..50c755e3 --- /dev/null +++ b/aria/cli/commands/node_templates.py @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import table +from .. import utils +from ..core import aria + + +NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name'] + + +@aria.group(name='node-templates') +@aria.options.verbose() +def node_templates(): + """Handle a service template's node templates + """ + pass + + +@node_templates.command(name='show', + short_help='Show node information') +@aria.argument('node-template-id') +# @aria.options.service_template_name(required=True) +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def show(node_template_id, model_storage, logger): + """Show information for a specific node of a specific service template + + `NODE_TEMPLATE_ID` is the node id to get information on. + """ + logger.info('Showing node template {0}'.format(node_template_id)) + node_template = model_storage.node_template.get(node_template_id) + + table.print_data(NODE_TEMPLATE_COLUMNS, node_template, 'Node template:', col_max_width=50) + + # print node template properties + logger.info('Node template properties:') + if node_template.properties: + logger.info(utils.get_parameter_templates_as_string(node_template.properties)) + else: + logger.info('\tNo properties') + + # print node IDs + nodes = node_template.nodes + logger.info('Nodes:') + if nodes: + for node in nodes: + logger.info('\t{0}'.format(node.name)) + else: + logger.info('\tNo nodes') + + +@node_templates.command(name='list', + short_help='List node templates for a service template') +@aria.options.service_template_name() +@aria.options.sort_by('service_template_name') +@aria.options.descending +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(service_template_name, sort_by, descending, model_storage, logger): + """List node templates + + If `SERVICE_TEMPLATE_NAME` is provided, list nodes for that service template. + Otherwise, list node templates for all service templates. + """ + if service_template_name: + logger.info('Listing node templates for service template {0}...'.format( + service_template_name)) + service_template = model_storage.service_template.get_by_name(service_template_name) + filters = dict(service_template=service_template) + else: + logger.info('Listing all node templates...') + filters = {} + + node_templates_list = model_storage.node_template.list( + filters=filters, + sort=utils.storage_sort_param(sort_by, descending)) + + table.print_data(NODE_TEMPLATE_COLUMNS, node_templates_list, 'Node templates:') diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py new file mode 100644 index 00000000..e43493fd --- /dev/null +++ b/aria/cli/commands/nodes.py @@ -0,0 +1,87 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import table +from .. import utils +from ..core import aria + + +NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state'] + + +@aria.group(name='nodes') +@aria.options.verbose() +def nodes(): + """Handle a service's nodes + """ + pass + + +@nodes.command(name='show', + short_help='Show node information') +@aria.argument('node_id') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def show(node_id, model_storage, logger): + """Showing information for a specific node + + `NODE_ID` is the id of the node to get information on. + """ + logger.info('Showing node {0}'.format(node_id)) + node = model_storage.node.get(node_id) + + table.print_data(NODE_COLUMNS, node, 'Node:', col_max_width=50) + + # print node attributes + logger.info('Node attributes:') + if node.runtime_properties: + for prop_name, prop_value in node.runtime_properties.iteritems(): + logger.info('\t{0}: {1}'.format(prop_name, prop_value)) + else: + logger.info('\tNo attributes') + + +@nodes.command(name='list', + short_help='List node for a service') +@aria.options.service_name(required=False) +@aria.options.sort_by('service_name') +@aria.options.descending +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(service_name, + sort_by, + descending, + model_storage, + logger): + """List nodes + + If `SERVICE_NAME` is provided, list nodes for that service. + Otherwise, list nodes for all services. + """ + if service_name: + logger.info('Listing nodes for service {0}...'.format(service_name)) + service = model_storage.service.get_by_name(service_name) + filters = dict(service=service) + else: + logger.info('Listing all nodes...') + filters = {} + + nodes_list = model_storage.node.list( + filters=filters, + sort=utils.storage_sort_param(sort_by, descending)) + + table.print_data(NODE_COLUMNS, nodes_list, 'Nodes:') diff --git a/aria/cli/commands/plugins.py b/aria/cli/commands/plugins.py new file mode 100644 index 00000000..670288e9 --- /dev/null +++ b/aria/cli/commands/plugins.py @@ -0,0 +1,99 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import table +from .. import utils +from ..core import aria + + +PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'supported_platform', + 'distribution', 'distribution_release', 'uploaded_at'] + + +@aria.group(name='plugins') +@aria.options.verbose() +def plugins(): + """Handle plugins + """ + pass + + +@plugins.command(name='validate', + short_help='Validate a plugin') +@aria.argument('plugin-path') +@aria.options.verbose() +@aria.pass_plugin_manager +@aria.pass_logger +def validate(plugin_path, plugin_manager, logger): + """Validate a plugin archive + + A valid plugin is a wagon (http://github.com/cloudify-cosmo/wagon) + in the zip format (suffix may also be .wgn). + + `PLUGIN_PATH` is the path to wagon archive to validate. + """ + logger.info('Validating plugin {0}...'.format(plugin_path)) + plugin_manager.validate_plugin(plugin_path) + logger.info('Plugin validated successfully') + + +@plugins.command(name='install', + short_help='Install a plugin') +@aria.argument('plugin-path') +@aria.options.verbose() +@aria.pass_context +@aria.pass_plugin_manager +@aria.pass_logger +def install(ctx, plugin_path, plugin_manager, logger): + """Install a plugin + + `PLUGIN_PATH` is the path to wagon archive to install. + """ + ctx.invoke(validate, plugin_path=plugin_path) + logger.info('Installing plugin {0}...'.format(plugin_path)) + plugin = plugin_manager.install(plugin_path) + logger.info("Plugin installed. The plugin's id is {0}".format(plugin.id)) + + +@plugins.command(name='show', + short_help='show plugin information') +@aria.argument('plugin-id') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def show(plugin_id, model_storage, logger): + """Show information for a specific plugin + + `PLUGIN_ID` is the id of the plugin to show information on. + """ + logger.info('Showing plugin {0}...'.format(plugin_id)) + plugin = model_storage.plugin.get(plugin_id) + table.print_data(PLUGIN_COLUMNS, plugin, 'Plugin:') + + +@plugins.command(name='list', + short_help='List plugins') +@aria.options.sort_by('uploaded_at') +@aria.options.descending +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(sort_by, descending, model_storage, logger): + """List all plugins on the manager + """ + logger.info('Listing all plugins...') + plugins_list = model_storage.plugin.list( + sort=utils.storage_sort_param(sort_by, descending)).items + table.print_data(PLUGIN_COLUMNS, plugins_list, 'Plugins:') diff --git a/aria/cli/commands/reset.py b/aria/cli/commands/reset.py new file mode 100644 index 00000000..1fe07148 --- /dev/null +++ b/aria/cli/commands/reset.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import helptexts +from ..core import aria +from ..env import env +from ..exceptions import AriaCliError + + +@aria.command(name='reset', + short_help="Reset ARIA's working directory") +@aria.options.force(help=helptexts.FORCE_RESET) +@aria.options.reset_config +@aria.pass_logger +@aria.options.verbose() +def reset(force, reset_config, logger): + """ + Reset ARIA working directory + Resetting the working directory will result in the deletion of all state in ARIA; The user + configuration will remain intact, unless the `reset_config` flag has been set as well, in + which case the entire ARIA working directory shall be removed. + """ + if not force: + raise AriaCliError("To reset the ARIA's working directory, you must also provide the force" + " flag ('-f'/'--force').") + + env.reset(reset_config=reset_config) + logger.info("ARIA's working directory has been reset") diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py new file mode 100644 index 00000000..97367c24 --- /dev/null +++ b/aria/cli/commands/service_templates.py @@ -0,0 +1,208 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os + +from .. import csar +from .. import service_template_utils +from .. import table +from .. import utils +from ..core import aria +from ...core import Core +from ...storage import exceptions as storage_exceptions + + +DESCRIPTION_FIELD_LENGTH_LIMIT = 20 +SERVICE_TEMPLATE_COLUMNS = \ + ['id', 'name', 'description', 'main_file_name', 'created_at', 'updated_at'] + + +@aria.group(name='service-templates') +@aria.options.verbose() +def service_templates(): + """Handle service templates on the manager + """ + pass + + +@service_templates.command(name='show', + short_help='Show service template information') +@aria.argument('service-template-name') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def show(service_template_name, model_storage, logger): + """Show information for a specific service templates + + `SERVICE_TEMPLATE_NAME` is the name of the service template to show information on. + """ + logger.info('Showing service template {0}...'.format(service_template_name)) + service_template = model_storage.service_template.get_by_name(service_template_name) + service_template_dict = service_template.to_dict() + service_template_dict['#services'] = len(service_template.services) + + column_formatters = \ + dict(description=table.trim_formatter_generator(DESCRIPTION_FIELD_LENGTH_LIMIT)) + columns = SERVICE_TEMPLATE_COLUMNS + ['#services'] + table.print_data(columns, service_template_dict, 'Service-template:', + column_formatters=column_formatters, col_max_width=50) + + if service_template_dict['description'] is not None: + logger.info('Description:') + logger.info('{0}{1}'.format(service_template_dict['description'].encode('UTF-8') or '', + os.linesep)) + + if service_template.services: + logger.info('Existing services:') + for service in service_template.services: + logger.info('\t{0}'.format(service.name)) + + +@service_templates.command(name='list', + short_help='List service templates') +@aria.options.sort_by() +@aria.options.descending +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(sort_by, descending, model_storage, logger): + """List all service templates + """ + + logger.info('Listing all service templates...') + service_templates_list = model_storage.service_template.list( + sort=utils.storage_sort_param(sort_by, descending)) + + column_formatters = \ + dict(description=table.trim_formatter_generator(DESCRIPTION_FIELD_LENGTH_LIMIT)) + table.print_data(SERVICE_TEMPLATE_COLUMNS, service_templates_list, 'Service templates:', + column_formatters=column_formatters) + + +@service_templates.command(name='store', + short_help='Store a service template') +@aria.argument('service-template-path') +@aria.argument('service-template-name') +@aria.options.service_template_filename +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_resource_storage +@aria.pass_plugin_manager +@aria.pass_logger +def store(service_template_path, service_template_name, service_template_filename, + model_storage, resource_storage, plugin_manager, logger): + """Store a service template + + `SERVICE_TEMPLATE_PATH` is the path of the service template to store. + + `SERVICE_TEMPLATE_NAME` is the name of the service template to store. + """ + logger.info('Storing service template {0}...'.format(service_template_name)) + + service_template_path = service_template_utils.get(service_template_path, + service_template_filename) + core = Core(model_storage, resource_storage, plugin_manager) + try: + core.create_service_template(service_template_path, + os.path.dirname(service_template_path), + service_template_name) + except storage_exceptions.StorageError as e: + utils.check_overriding_storage_exceptions(e, 'service template', service_template_name) + raise + logger.info('Service template {0} stored'.format(service_template_name)) + + +@service_templates.command(name='delete', + short_help='Delete a service template') +@aria.argument('service-template-name') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_resource_storage +@aria.pass_plugin_manager +@aria.pass_logger +def delete(service_template_name, model_storage, resource_storage, plugin_manager, logger): + """Delete a service template + `SERVICE_TEMPLATE_NAME` is the name of the service template to delete. + """ + logger.info('Deleting service template {0}...'.format(service_template_name)) + service_template = model_storage.service_template.get_by_name(service_template_name) + core = Core(model_storage, resource_storage, plugin_manager) + core.delete_service_template(service_template.id) + logger.info('Service template {0} deleted'.format(service_template_name)) + + +@service_templates.command(name='inputs', + short_help='Show service template inputs') +@aria.argument('service-template-name') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def inputs(service_template_name, model_storage, logger): + """Show inputs for a specific service template + + `SERVICE_TEMPLATE_NAME` is the name of the service template to show inputs for. + """ + logger.info('Showing inputs for service template {0}...'.format(service_template_name)) + print_service_template_inputs(model_storage, service_template_name, logger) + + +@service_templates.command(name='validate', + short_help='Validate a service template') +@aria.argument('service-template') +@aria.options.service_template_filename +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_resource_storage +@aria.pass_plugin_manager +@aria.pass_logger +def validate(service_template, service_template_filename, + model_storage, resource_storage, plugin_manager, logger): + """Validate a service template + + `SERVICE_TEMPLATE` is the path or url of the service template or archive to validate. + """ + logger.info('Validating service template: {0}'.format(service_template)) + service_template_path = service_template_utils.get(service_template, service_template_filename) + core = Core(model_storage, resource_storage, plugin_manager) + core.validate_service_template(service_template_path) + logger.info('Service template validated successfully') + + +@service_templates.command(name='create-archive', + short_help='Create a csar archive') +@aria.argument('service-template-path') +@aria.argument('destination') +@aria.options.verbose() +@aria.pass_logger +def create_archive(service_template_path, destination, logger): + """Create a csar archive + + `service_template_path` is the path of the service template to create the archive from + `destination` is the path of the output csar archive + """ + logger.info('Creating a csar archive') + csar.write(os.path.dirname(service_template_path), service_template_path, destination, logger) + logger.info('Csar archive created at {0}'.format(destination)) + + +def print_service_template_inputs(model_storage, service_template_name, logger): + service_template = model_storage.service_template.get_by_name(service_template_name) + + logger.info('Service template inputs:') + if service_template.inputs: + logger.info(utils.get_parameter_templates_as_string(service_template.inputs)) + else: + logger.info('\tNo inputs') diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py new file mode 100644 index 00000000..50b530ae --- /dev/null +++ b/aria/cli/commands/services.py @@ -0,0 +1,179 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +from StringIO import StringIO + +from . import service_templates +from .. import helptexts +from .. import table +from .. import utils +from ..core import aria +from ...core import Core +from ...modeling import exceptions as modeling_exceptions +from ...storage import exceptions as storage_exceptions + + +SERVICE_COLUMNS = ['id', 'name', 'service_template_name', 'created_at', 'updated_at'] + + +@aria.group(name='services') +@aria.options.verbose() +def services(): + """Handle services + """ + pass + + +@services.command(name='list', short_help='List services') +@aria.options.service_template_name() +@aria.options.sort_by() +@aria.options.descending +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(service_template_name, + sort_by, + descending, + model_storage, + logger): + """List services + + If `--service-template-name` is provided, list services for that service template. + Otherwise, list services for all service templates. + """ + if service_template_name: + logger.info('Listing services for service template {0}...'.format( + service_template_name)) + service_template = model_storage.service_template.get_by_name(service_template_name) + filters = dict(service_template=service_template) + else: + logger.info('Listing all services...') + filters = {} + + services_list = model_storage.service.list( + sort=utils.storage_sort_param(sort_by=sort_by, descending=descending), + filters=filters) + table.print_data(SERVICE_COLUMNS, services_list, 'Services:') + + +@services.command(name='create', + short_help='Create a services') +@aria.argument('service-name', required=False) +@aria.options.service_template_name(required=True) +@aria.options.inputs(help=helptexts.SERVICE_INPUTS) +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_resource_storage +@aria.pass_plugin_manager +@aria.pass_logger +def create(service_template_name, + service_name, + inputs, # pylint: disable=redefined-outer-name + model_storage, + resource_storage, + plugin_manager, + logger): + """Create a service + + `SERVICE_NAME` is the name of the service you'd like to create. + + """ + logger.info('Creating new service from service template {0}...'.format( + service_template_name)) + core = Core(model_storage, resource_storage, plugin_manager) + service_template = model_storage.service_template.get_by_name(service_template_name) + + try: + service = core.create_service(service_template.id, inputs, service_name) + except storage_exceptions.StorageError as e: + utils.check_overriding_storage_exceptions(e, 'service', service_name) + raise + except modeling_exceptions.InputsException: + service_templates.print_service_template_inputs(model_storage, service_template_name, + logger) + raise + logger.info("Service created. The service's name is {0}".format(service.name)) + + +@services.command(name='delete', + short_help='Delete a service') +@aria.argument('service-name') +@aria.options.force(help=helptexts.IGNORE_AVAILABLE_NODES) +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_resource_storage +@aria.pass_plugin_manager +@aria.pass_logger +def delete(service_name, force, model_storage, resource_storage, plugin_manager, logger): + """Delete a service + + `SERVICE_NAME` is the name of the service to delete. + """ + logger.info('Deleting service {0}...'.format(service_name)) + service = model_storage.service.get_by_name(service_name) + core = Core(model_storage, resource_storage, plugin_manager) + core.delete_service(service.id, force=force) + logger.info('Service {0} deleted'.format(service_name)) + + +@services.command(name='outputs', + short_help='Show service outputs') +@aria.argument('service-name') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def outputs(service_name, model_storage, logger): + """Show outputs for a specific service + + `SERVICE_NAME` is the name of the service to print outputs for. + """ + logger.info('Showing outputs for service {0}...'.format(service_name)) + service = model_storage.service.get_by_name(service_name) + #TODO fix this section.. + outputs_def = service.outputs + response = model_storage.service.outputs.get(service_name) + outputs_ = StringIO() + for output_name, output in response.outputs.iteritems(): + outputs_.write(' - "{0}":{1}'.format(output_name, os.linesep)) + description = outputs_def[output_name].get('description', '') + outputs_.write(' Description: {0}{1}'.format(description, + os.linesep)) + outputs_.write(' Value: {0}{1}'.format(output, os.linesep)) + logger.info(outputs_.getvalue()) + + +@services.command(name='inputs', + short_help='Show service inputs') +@aria.argument('service-name') +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def inputs(service_name, model_storage, logger): + """Show inputs for a specific service + + `SERVICE_NAME` is the id of the service to print inputs for. + """ + logger.info('Showing inputs for service {0}...'.format(service_name)) + service = model_storage.service.get_by_name(service_name) + if service.inputs: + inputs_string = StringIO() + for input_name, input_ in service.inputs.iteritems(): + inputs_string.write(' - "{0}":{1}'.format(input_name, os.linesep)) + inputs_string.write(' Value: {0}{1}'.format(input_.value, os.linesep)) + logger.info(inputs_string.getvalue()) + else: + logger.info('\tNo inputs') diff --git a/aria/cli/commands/workflows.py b/aria/cli/commands/workflows.py new file mode 100644 index 00000000..221dbc45 --- /dev/null +++ b/aria/cli/commands/workflows.py @@ -0,0 +1,100 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import table +from ..core import aria +from ..exceptions import AriaCliError + +WORKFLOW_COLUMNS = ['name', 'service_template_name', 'service_name'] + + +@aria.group(name='workflows') +def workflows(): + """Handle service workflows + """ + pass + + +@workflows.command(name='show', + short_help='Show workflow information') +@aria.argument('workflow-name') +@aria.options.service_name(required=True) +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def show(workflow_name, service_name, model_storage, logger): + """Show information for a specific workflow of a specific service + + `WORKFLOW_NAME` is the name of the workflow to get information on. + """ + logger.info('Retrieving workflow {0} for service {1}'.format( + workflow_name, service_name)) + service = model_storage.service.get_by_name(service_name) + workflow = next((wf for wf in service.workflows.values() if + wf.name == workflow_name), None) + if not workflow: + raise AriaCliError( + 'Workflow {0} not found for service {1}'.format(workflow_name, service_name)) + + defaults = { + 'service_template_name': service.service_template_name, + 'service_name': service.name + } + table.print_data(WORKFLOW_COLUMNS, workflow, 'Workflows:', defaults=defaults) + + # print workflow inputs + required_inputs = dict() + optional_inputs = dict() + for input_name, input in workflow.inputs.iteritems(): + inputs_group = optional_inputs if input.value is not None else required_inputs + inputs_group[input_name] = input + + logger.info('Workflow Inputs:') + logger.info('\tMandatory Inputs:') + for input_name, input in required_inputs.iteritems(): + if input.description is not None: + logger.info('\t\t{0}\t({1})'.format(input_name, + input.description)) + else: + logger.info('\t\t{0}'.format(input_name)) + + logger.info('\tOptional Inputs:') + for input_name, input in optional_inputs.iteritems(): + if input.description is not None: + logger.info('\t\t{0}: \t{1}\t({2})'.format( + input_name, input.value, input.description)) + else: + logger.info('\t\t{0}: \t{1}'.format(input_name, + input.value)) + + +@workflows.command(name='list', + short_help='List workflows for a service') +@aria.options.service_name(required=True) +@aria.options.verbose() +@aria.pass_model_storage +@aria.pass_logger +def list(service_name, model_storage, logger): + """List all workflows of a specific service + """ + logger.info('Listing workflows for service {0}...'.format(service_name)) + service = model_storage.service.get_by_name(service_name) + workflows_list = sorted(service.workflows.values(), key=lambda w: w.name) + + defaults = { + 'service_template_name': service.service_template_name, + 'service_name': service.name + } + table.print_data(WORKFLOW_COLUMNS, workflows_list, 'Workflows:', defaults=defaults) diff --git a/aria/cli/config/__init__.py b/aria/cli/config/__init__.py new file mode 100644 index 00000000..ae1e83ee --- /dev/null +++ b/aria/cli/config/__init__.py @@ -0,0 +1,14 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/aria/cli/config/config.py b/aria/cli/config/config.py new file mode 100644 index 00000000..99f46ca2 --- /dev/null +++ b/aria/cli/config/config.py @@ -0,0 +1,73 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import yaml +import pkg_resources + +from jinja2.environment import Template + + +CONFIG_FILE_NAME = 'config.yaml' + + +class CliConfig(object): + + def __init__(self, config_path): + with open(config_path) as f: + self._config = yaml.safe_load(f.read()) + + @classmethod + def create_config(cls, workdir): + config_path = os.path.join(workdir, CONFIG_FILE_NAME) + if not os.path.isfile(config_path): + config_template = pkg_resources.resource_string( + __package__, + 'config_template.yaml') + + default_values = { + 'log_path': os.path.join(workdir, 'cli.log'), + 'enable_colors': True + } + + template = Template(config_template) + rendered = template.render(**default_values) + with open(config_path, 'w') as f: + f.write(rendered) + f.write(os.linesep) + + return cls(config_path) + + @property + def colors(self): + return self._config.get('colors', False) + + @property + def logging(self): + return self.Logging(self._config.get('logging')) + + class Logging(object): + + def __init__(self, logging): + self._logging = logging or {} + + @property + def filename(self): + return self._logging.get('filename') + + @property + def loggers(self): + return self._logging.get('loggers', {}) diff --git a/aria/cli/config/config_template.yaml b/aria/cli/config/config_template.yaml new file mode 100644 index 00000000..13f2cf9c --- /dev/null +++ b/aria/cli/config/config_template.yaml @@ -0,0 +1,12 @@ +colors: {{ enable_colors }} + +logging: + + # path to a file where cli logs will be saved. + filename: {{ log_path }} + + # configuring level per logger + loggers: + + # main logger of the cli. provides basic descriptions for executed operations. + aria.cli.main: info diff --git a/aria/cli/core/__init__.py b/aria/cli/core/__init__.py new file mode 100644 index 00000000..ae1e83ee --- /dev/null +++ b/aria/cli/core/__init__.py @@ -0,0 +1,14 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/aria/cli/core/aria.py b/aria/cli/core/aria.py new file mode 100644 index 00000000..ed7c4904 --- /dev/null +++ b/aria/cli/core/aria.py @@ -0,0 +1,429 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import sys +import difflib +import StringIO +import traceback +from functools import wraps + +import click + +from ..env import ( + env, + logger +) +from .. import defaults +from .. import helptexts +from ..inputs import inputs_to_dict +from ... import __version__ +from ...utils.exceptions import get_exception_as_string + + +CLICK_CONTEXT_SETTINGS = dict( + help_option_names=['-h', '--help'], + token_normalize_func=lambda param: param.lower()) + + +class MutuallyExclusiveOption(click.Option): + """Makes options mutually exclusive. The option must pass a `cls` argument + with this class name and a `mutually_exclusive` argument with a list of + argument names it is mutually exclusive with. + + NOTE: All mutually exclusive options must use this. It's not enough to + use it in just one of the options. + """ + + def __init__(self, *args, **kwargs): + self.mutually_exclusive = set(kwargs.pop('mutually_exclusive', [])) + self.mutuality_error_message = \ + kwargs.pop('mutuality_error_message', + helptexts.DEFAULT_MUTUALITY_MESSAGE) + self.mutuality_string = ', '.join(self.mutually_exclusive) + if self.mutually_exclusive: + help = kwargs.get('help', '') + kwargs['help'] = ( + '{0}. This argument is mutually exclusive with ' + 'arguments: [{1}] ({2})'.format( + help, + self.mutuality_string, + self.mutuality_error_message)) + super(MutuallyExclusiveOption, self).__init__(*args, **kwargs) + + def handle_parse_result(self, ctx, opts, args): + if self.mutually_exclusive.intersection(opts) and self.name in opts: + raise click.UsageError( + 'Illegal usage: `{0}` is mutually exclusive with ' + 'arguments: [{1}] ({2}).'.format( + self.name, + self.mutuality_string, + self.mutuality_error_message)) + return super(MutuallyExclusiveOption, self).handle_parse_result( + ctx, opts, args) + + +def _format_version_data(version, + prefix=None, + suffix=None, + infix=None): + all_data = dict(version=version) + all_data['prefix'] = prefix or '' + all_data['suffix'] = suffix or '' + all_data['infix'] = infix or '' + output = StringIO.StringIO() + output.write('{prefix}{version}'.format(**all_data)) + output.write('{suffix}'.format(**all_data)) + return output.getvalue() + + +def show_version(ctx, param, value): + if not value: + return + + cli_version = _format_version_data( + __version__, + prefix='ARIA CLI ', + infix=' ' * 5, + suffix='') + + logger.info(cli_version) + ctx.exit() + + +def inputs_callback(ctx, param, value): + """Allow to pass any inputs we provide to a command as + processed inputs instead of having to call `inputs_to_dict` + inside the command. + + `@aria.options.inputs` already calls this callback so that + every time you use the option it returns the inputs as a + dictionary. + """ + if not value: + return {} + + return inputs_to_dict(value) + + +def set_verbosity_level(ctx, param, value): + if not value: + return + + env.logging.verbosity_level = value + + +def set_cli_except_hook(): + + def recommend(possible_solutions): + logger.info('Possible solutions:') + for solution in possible_solutions: + logger.info(' - {0}'.format(solution)) + + def new_excepthook(tpe, value, trace): + if env.logging.is_high_verbose_level(): + # log error including traceback + logger.error(get_exception_as_string(tpe, value, trace)) + else: + # write the full error to the log file + with open(env.logging.log_file, 'a') as log_file: + traceback.print_exception( + etype=tpe, + value=value, + tb=trace, + file=log_file) + # print only the error message + print value + + if hasattr(value, 'possible_solutions'): + recommend(getattr(value, 'possible_solutions')) + + sys.excepthook = new_excepthook + + +def pass_logger(func): + """Simply passes the logger to a command. + """ + # Wraps here makes sure the original docstring propagates to click + @wraps(func) + def wrapper(*args, **kwargs): + return func(logger=logger, *args, **kwargs) + + return wrapper + + +def pass_plugin_manager(func): + """Simply passes the plugin manager to a command. + """ + # Wraps here makes sure the original docstring propagates to click + @wraps(func) + def wrapper(*args, **kwargs): + return func(plugin_manager=env.plugin_manager, *args, **kwargs) + + return wrapper + + +def pass_model_storage(func): + """Simply passes the model storage to a command. + """ + # Wraps here makes sure the original docstring propagates to click + @wraps(func) + def wrapper(*args, **kwargs): + return func(model_storage=env.model_storage, *args, **kwargs) + + return wrapper + + +def pass_resource_storage(func): + """Simply passes the resource storage to a command. + """ + # Wraps here makes sure the original docstring propagates to click + @wraps(func) + def wrapper(*args, **kwargs): + return func(resource_storage=env.resource_storage, *args, **kwargs) + + return wrapper + + +def pass_context(func): + """Make click context ARIA specific + + This exists purely for aesthetic reasons, otherwise + Some decorators are called `@click.something` instead of + `@aria.something` + """ + return click.pass_context(func) + + +class AliasedGroup(click.Group): + def __init__(self, *args, **kwargs): + self.max_suggestions = kwargs.pop("max_suggestions", 3) + self.cutoff = kwargs.pop("cutoff", 0.5) + super(AliasedGroup, self).__init__(*args, **kwargs) + + def get_command(self, ctx, cmd_name): + cmd = click.Group.get_command(self, ctx, cmd_name) + if cmd is not None: + return cmd + matches = \ + [x for x in self.list_commands(ctx) if x.startswith(cmd_name)] + if not matches: + return None + elif len(matches) == 1: + return click.Group.get_command(self, ctx, matches[0]) + ctx.fail('Too many matches: {0}'.format(', '.join(sorted(matches)))) + + def resolve_command(self, ctx, args): + """Override clicks ``resolve_command`` method + and appends *Did you mean ...* suggestions + to the raised exception message. + """ + try: + return super(AliasedGroup, self).resolve_command(ctx, args) + except click.exceptions.UsageError as error: + error_msg = str(error) + original_cmd_name = click.utils.make_str(args[0]) + matches = difflib.get_close_matches( + original_cmd_name, + self.list_commands(ctx), + self.max_suggestions, + self.cutoff) + if matches: + error_msg += '{0}{0}Did you mean one of these?{0} {1}'.format( + os.linesep, + '{0} '.format(os.linesep).join(matches, )) + raise click.exceptions.UsageError(error_msg, error.ctx) + + +def group(name): + """Allow to create a group with a default click context + and a cls for click's `didyoueamn` without having to repeat + it for every group. + """ + return click.group( + name=name, + context_settings=CLICK_CONTEXT_SETTINGS, + cls=AliasedGroup) + + +def command(*args, **kwargs): + """Make Click commands ARIA specific + + This exists purely for aesthetical reasons, otherwise + Some decorators are called `@click.something` instead of + `@aria.something` + """ + return click.command(*args, **kwargs) + + +def argument(*args, **kwargs): + """Make Click arguments ARIA specific + + This exists purely for aesthetic reasons, otherwise + Some decorators are called `@click.something` instead of + `@aria.something` + """ + return click.argument(*args, **kwargs) + + +class Options(object): + def __init__(self): + """The options api is nicer when you use each option by calling + `@aria.options.some_option` instead of `@aria.some_option`. + + Note that some options are attributes and some are static methods. + The reason for that is that we want to be explicit regarding how + a developer sees an option. It it can receive arguments, it's a + method - if not, it's an attribute. + """ + self.version = click.option( + '--version', + is_flag=True, + callback=show_version, + expose_value=False, + is_eager=True, + help=helptexts.VERSION) + + self.json_output = click.option( + '--json-output', + is_flag=True, + help=helptexts.JSON_OUTPUT) + + self.dry_execution = click.option( + '--dry', + is_flag=True, + help=helptexts.DRY_EXECUTION) + + self.reset_config = click.option( + '--reset-config', + is_flag=True, + help=helptexts.RESET_CONFIG) + + self.descending = click.option( + '--descending', + required=False, + is_flag=True, + default=defaults.SORT_DESCENDING, + help=helptexts.DESCENDING) + + self.service_template_filename = click.option( + '-n', + '--service-template-filename', + default=defaults.SERVICE_TEMPLATE_FILENAME, + help=helptexts.SERVICE_TEMPLATE_FILENAME) + + @staticmethod + def verbose(expose_value=False): + return click.option( + '-v', + '--verbose', + count=True, + callback=set_verbosity_level, + expose_value=expose_value, + is_eager=True, + help=helptexts.VERBOSE) + + @staticmethod + def inputs(help): + return click.option( + '-i', + '--inputs', + multiple=True, + callback=inputs_callback, + help=help) + + @staticmethod + def force(help): + return click.option( + '-f', + '--force', + is_flag=True, + help=help) + + @staticmethod + def task_max_attempts(default=defaults.TASK_MAX_ATTEMPTS): + return click.option( + '--task-max-attempts', + type=int, + default=default, + help=helptexts.TASK_MAX_ATTEMPTS.format(default)) + + @staticmethod + def sort_by(default='created_at'): + return click.option( + '--sort-by', + required=False, + default=default, + help=helptexts.SORT_BY) + + @staticmethod + def task_retry_interval(default=defaults.TASK_RETRY_INTERVAL): + return click.option( + '--task-retry-interval', + type=int, + default=default, + help=helptexts.TASK_RETRY_INTERVAL.format(default)) + + @staticmethod + def service_id(required=False): + return click.option( + '-s', + '--service-id', + required=required, + help=helptexts.SERVICE_ID) + + @staticmethod + def execution_id(required=False): + return click.option( + '-e', + '--execution-id', + required=required, + help=helptexts.EXECUTION_ID) + + @staticmethod + def service_template_id(required=False): + return click.option( + '-t', + '--service-template-id', + required=required, + help=helptexts.SERVICE_TEMPLATE_ID) + + @staticmethod + def service_template_path(required=False): + return click.option( + '-p', + '--service-template-path', + required=required, + type=click.Path(exists=True)) + + @staticmethod + def service_name(required=False): + return click.option( + '-s', + '--service-name', + required=required, + help=helptexts.SERVICE_ID) + + @staticmethod + def service_template_name(required=False): + return click.option( + '-t', + '--service-template-name', + required=required, + help=helptexts.SERVICE_ID) + + +options = Options() diff --git a/aria/cli/csar.py b/aria/cli/csar.py index b185f463..5bc35acb 100644 --- a/aria/cli/csar.py +++ b/aria/cli/csar.py @@ -14,12 +14,13 @@ # limitations under the License. import os +import logging import pprint import tempfile import zipfile import requests -from ruamel import yaml # @UnresolvedImport +from ruamel import yaml META_FILE = 'TOSCA-Metadata/TOSCA.meta' @@ -135,7 +136,7 @@ def _read_metadata(self): self.logger.debug('Attempting to parse CSAR metadata YAML') with open(csar_metafile) as f: self.metadata.update(yaml.load(f)) - self.logger.debug('CSAR metadata:\n{0}'.format(pprint.pformat(self.metadata))) + self.logger.debug('CSAR metadata:{0}{1}'.format(os.linesep, pprint.pformat(self.metadata))) def _validate(self): def validate_key(key, expected=None): @@ -167,5 +168,11 @@ def _download(self, url, target): f.write(chunk) -def read(source, destination, logger): +def read(source, destination=None, logger=None): + destination = destination or tempfile.mkdtemp() + logger = logger or logging.getLogger('dummy') return _CSARReader(source=source, destination=destination, logger=logger) + + +def is_csar_archive(source): + return source.endswith('.csar') diff --git a/aria/cli/defaults.py b/aria/cli/defaults.py new file mode 100644 index 00000000..5c169389 --- /dev/null +++ b/aria/cli/defaults.py @@ -0,0 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +SERVICE_TEMPLATE_FILENAME = 'service_template.yaml' +TASK_MAX_ATTEMPTS = 30 +TASK_RETRY_INTERVAL = 30 +SORT_DESCENDING = False diff --git a/aria/cli/dry.py b/aria/cli/dry.py deleted file mode 100644 index fc6c0c5c..00000000 --- a/aria/cli/dry.py +++ /dev/null @@ -1,93 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from threading import RLock - -from ..modeling import models -from ..orchestrator.decorators import operation -from ..utils.collections import OrderedDict -from ..utils.console import puts, Colored -from ..utils.formatting import safe_repr - - -_TERMINAL_LOCK = RLock() - - -def convert_to_dry(service): - """ - Converts all operations on the service (on workflows, node interfaces, and relationship - interfaces) to run dryly. - """ - - for workflow in service.workflows.itervalues(): - convert_operation_to_dry(workflow) - - for node in service.nodes.itervalues(): - for interface in node.interfaces.itervalues(): - for oper in interface.operations.itervalues(): - convert_operation_to_dry(oper) - for relationship in node.outbound_relationships: - for interface in relationship.interfaces.itervalues(): - for oper in interface.operations.itervalues(): - convert_operation_to_dry(oper) - - for group in service.groups.itervalues(): - for interface in group.interfaces.itervalues(): - for oper in interface.operations.itervalues(): - convert_operation_to_dry(oper) - - -def convert_operation_to_dry(oper): - """ - Converts a single :class:`Operation` to run dryly. - """ - - plugin = oper.plugin.name \ - if oper.plugin is not None else None - if oper.inputs is None: - oper.inputs = OrderedDict() - oper.inputs['_implementation'] = models.Parameter(name='_implementation', - type_name='string', - value=oper.implementation) - oper.inputs['_plugin'] = models.Parameter(name='_plugin', - type_name='string', - value=plugin) - oper.implementation = '{0}.{1}'.format(__name__, 'dry_operation') - oper.plugin_specification = None - - -@operation -def dry_operation(ctx, _plugin, _implementation, **kwargs): - """ - The dry operation simply prints out information about the operation to the console. - """ - - with _TERMINAL_LOCK: - print ctx.name - if hasattr(ctx, 'relationship'): - puts('> Relationship: {0} -> {1}'.format( - Colored.red(ctx.relationship.source_node.name), - Colored.red(ctx.relationship.target_node.name))) - else: - puts('> Node: {0}'.format(Colored.red(ctx.node.name))) - puts(' Operation: {0}'.format(Colored.green(ctx.name))) - _dump_implementation(_plugin, _implementation) - - -def _dump_implementation(plugin, implementation): - if plugin: - puts(' Plugin: {0}'.format(Colored.magenta(plugin, bold=True))) - if implementation: - puts(' Implementation: {0}'.format(Colored.magenta(safe_repr(implementation)))) diff --git a/aria/cli/env.py b/aria/cli/env.py new file mode 100644 index 00000000..52a4ec68 --- /dev/null +++ b/aria/cli/env.py @@ -0,0 +1,124 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import shutil + +from .config import config +from .logger import Logging +from .. import (application_model_storage, application_resource_storage) +from ..orchestrator.plugin import PluginManager +from ..storage.sql_mapi import SQLAlchemyModelAPI +from ..storage.filesystem_rapi import FileSystemResourceAPI + + +ARIA_DEFAULT_WORKDIR_NAME = '.aria' + + +class _Environment(object): + + def __init__(self, workdir): + + self._workdir = workdir + self._init_workdir() + + self._config = config.CliConfig.create_config(workdir) + self._logging = Logging(self._config) + + self._model_storage_dir = os.path.join(workdir, 'models') + self._resource_storage_dir = os.path.join(workdir, 'resources') + self._plugins_dir = os.path.join(workdir, 'plugins') + + # initialized lazily + self._model_storage = None + self._resource_storage = None + self._plugin_manager = None + + @property + def workdir(self): + return self._workdir + + @property + def config(self): + return self._config + + @property + def logging(self): + return self._logging + + @property + def model_storage(self): + if not self._model_storage: + self._model_storage = self._init_sqlite_model_storage() + return self._model_storage + + @property + def resource_storage(self): + if not self._resource_storage: + self._resource_storage = self._init_fs_resource_storage() + return self._resource_storage + + @property + def plugin_manager(self): + if not self._plugin_manager: + self._plugin_manager = self._init_plugin_manager() + return self._plugin_manager + + def reset(self, reset_config): + if reset_config: + shutil.rmtree(self._workdir) + else: + _, dirs, files = next(os.walk(self._workdir)) + files.remove(config.CONFIG_FILE_NAME) + + for dir_ in dirs: + shutil.rmtree(os.path.join(self._workdir, dir_)) + for file_ in files: + os.remove(os.path.join(self._workdir, file_)) + + def _init_workdir(self): + if not os.path.exists(self._workdir): + os.makedirs(self._workdir) + + def _init_sqlite_model_storage(self): + if not os.path.exists(self._model_storage_dir): + os.makedirs(self._model_storage_dir) + + initiator_kwargs = dict(base_dir=self._model_storage_dir) + return application_model_storage( + SQLAlchemyModelAPI, + initiator_kwargs=initiator_kwargs) + + def _init_fs_resource_storage(self): + if not os.path.exists(self._resource_storage_dir): + os.makedirs(self._resource_storage_dir) + + fs_kwargs = dict(directory=self._resource_storage_dir) + return application_resource_storage( + FileSystemResourceAPI, + api_kwargs=fs_kwargs) + + def _init_plugin_manager(self): + if not os.path.exists(self._plugins_dir): + os.makedirs(self._plugins_dir) + + return PluginManager(self.model_storage, self._plugins_dir) + + +env = _Environment(os.path.join( + os.environ.get('ARIA_WORKDIR', os.path.expanduser('~')), ARIA_DEFAULT_WORKDIR_NAME)) + +logger = env.logging.logger diff --git a/aria/cli/exceptions.py b/aria/cli/exceptions.py index 68977314..89cfacde 100644 --- a/aria/cli/exceptions.py +++ b/aria/cli/exceptions.py @@ -13,59 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -CLI various exception classes -""" +from ..exceptions import AriaError -class AriaCliError(Exception): - """ - General CLI Exception class - """ - pass - - -class AriaCliFormatInputsError(AriaCliError): - """ - Raised when provided inputs are malformed. - """ - - def __init__(self, message, inputs): - self.inputs = inputs - super(AriaCliFormatInputsError, self).__init__(message) - - def user_message(self): - """ - Describes the format error in detail. - """ - return ( - 'Invalid input format: {0}, ' - 'the expected format is: ' - 'key1=value1;key2=value2'.format(self.inputs)) - -class AriaCliYAMLInputsError(AriaCliError): - """ - Raised when an invalid yaml file is provided - """ +class AriaCliError(AriaError): pass - - -class AriaCliInvalidInputsError(AriaCliFormatInputsError): - """ - Raised when provided inputs are invalid. - """ - - def user_message(self): - """ - Describes the error in detail. - """ - return ( - 'Invalid input: {0}. input must represent a dictionary.\n' - 'Valid values can be one of:\n' - '- a path to a YAML file\n' - '- a path to a directory containing YAML files\n' - '- a single quoted wildcard based path (e.g. "*-inputs.yaml")\n' - '- a string formatted as JSON\n' - '- a string formatted as key1=value1;key2=value2'.format(self.inputs) - ) diff --git a/aria/cli/helptexts.py b/aria/cli/helptexts.py new file mode 100644 index 00000000..1a3f6c02 --- /dev/null +++ b/aria/cli/helptexts.py @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +DEFAULT_MUTUALITY_MESSAGE = 'Cannot be used simultaneously' +VERBOSE = \ + "Show verbose output. You can supply this up to three times (i.e. -vvv)" + +VERSION = "Display the version and exit" +FORCE_RESET = "Confirmation for resetting ARIA's working directory" +RESET_CONFIG = "Reset ARIA's user configuration" + +SERVICE_TEMPLATE_ID = "The unique identifier for the service template" +SERVICE_ID = "The unique identifier for the service" +EXECUTION_ID = "The unique identifier for the execution" + +SERVICE_TEMPLATE_PATH = "The path to the application's service template file" +SERVICE_TEMPLATE_FILENAME = ( + "The name of the archive's main service template file. " + "This is only relevant if uploading a (non-csar) archive") +INPUTS_PARAMS_USAGE = ( + '(Can be provided as wildcard based paths ' + '(*.yaml, /my_inputs/, etc..) to YAML files, a JSON string or as ' + 'key1=value1;key2=value2). This argument can be used multiple times') +SERVICE_INPUTS = "Inputs for the service {0}".format(INPUTS_PARAMS_USAGE) +EXECUTION_INPUTS = "Inputs for the execution {0}".format(INPUTS_PARAMS_USAGE) + +TASK_RETRY_INTERVAL = \ + "How long of a minimal interval should occur between task retry attempts [default: {0}]" +TASK_MAX_ATTEMPTS = \ + "How many times should a task be attempted in case of failures [default: {0}]" +DRY_EXECUTION = "Execute a workflow dry run (prints operations information without causing side " \ + "effects)" +IGNORE_AVAILABLE_NODES = "Delete the service even if it has available nodes" +SORT_BY = "Key for sorting the list" +DESCENDING = "Sort list in descending order [default: False]" +JSON_OUTPUT = "Output logs in a consumable JSON format" diff --git a/aria/cli/inputs.py b/aria/cli/inputs.py new file mode 100644 index 00000000..0ff48dc0 --- /dev/null +++ b/aria/cli/inputs.py @@ -0,0 +1,118 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import glob +import yaml + +from .env import logger +from .exceptions import AriaCliError + + +def inputs_to_dict(resources): + """Returns a dictionary of inputs + + `resources` can be: + - A list of files. + - A single file + - A directory containing multiple input files + - A key1=value1;key2=value2 pairs string. + - A string formatted as JSON/YAML. + - Wildcard based string (e.g. *-inputs.yaml) + """ + if not resources: + return dict() + + parsed_dict = {} + + for resource in resources: + logger.debug('Processing inputs source: {0}'.format(resource)) + # Workflow parameters always pass an empty dictionary. We ignore it + if isinstance(resource, basestring): + try: + parsed_dict.update(_parse_single_input(resource)) + except AriaCliError: + raise AriaCliError( + "Invalid input: {0}. It must represent a dictionary. " + "Valid values can be one of:{1} " + "- A path to a YAML file{1} " + "- A path to a directory containing YAML files{1} " + "- A single quoted wildcard based path " + "(e.g. '*-inputs.yaml'){1} " + "- A string formatted as JSON/YAML{1} " + "- A string formatted as key1=value1;key2=value2".format( + resource, os.linesep)) + return parsed_dict + + +def _parse_single_input(resource): + try: + # parse resource as string representation of a dictionary + return _plain_string_to_dict(resource) + except AriaCliError: + input_files = glob.glob(resource) + parsed_dict = dict() + if os.path.isdir(resource): + for input_file in os.listdir(resource): + parsed_dict.update( + _parse_yaml_path(os.path.join(resource, input_file))) + elif input_files: + for input_file in input_files: + parsed_dict.update(_parse_yaml_path(input_file)) + else: + parsed_dict.update(_parse_yaml_path(resource)) + return parsed_dict + + +def _parse_yaml_path(resource): + + try: + # if resource is a path - parse as a yaml file + if os.path.isfile(resource): + with open(resource) as f: + content = yaml.load(f.read()) + else: + # parse resource content as yaml + content = yaml.load(resource) + except yaml.error.YAMLError as e: + raise AriaCliError("'{0}' is not a valid YAML. {1}".format( + resource, str(e))) + + # Emtpy files return None + content = content or dict() + if not isinstance(content, dict): + raise AriaCliError() + + return content + + +def _plain_string_to_dict(input_string): + input_string = input_string.strip() + input_dict = {} + mapped_inputs = input_string.split(';') + for mapped_input in mapped_inputs: + mapped_input = mapped_input.strip() + if not mapped_input: + continue + split_mapping = mapped_input.split('=') + try: + key = split_mapping[0].strip() + value = split_mapping[1].strip() + except IndexError: + raise AriaCliError( + "Invalid input format: {0}, the expected format is: " + "key1=value1;key2=value2".format(input_string)) + input_dict[key] = value + return input_dict diff --git a/aria/cli/logger.py b/aria/cli/logger.py new file mode 100644 index 00000000..1ffa9189 --- /dev/null +++ b/aria/cli/logger.py @@ -0,0 +1,114 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import copy +import logging +from logutils import dictconfig + + +HIGH_VERBOSE = 3 +MEDIUM_VERBOSE = 2 +LOW_VERBOSE = 1 +NO_VERBOSE = 0 + +LOGGER_CONFIG_TEMPLATE = { + "version": 1, + "formatters": { + "file": { + "format": "%(asctime)s [%(levelname)s] %(message)s" + }, + "console": { + "format": "%(message)s" + } + }, + "handlers": { + "file": { + "class": "logging.handlers.RotatingFileHandler", + "formatter": "file", + "maxBytes": "5000000", + "backupCount": "20" + }, + "console": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + "formatter": "console" + } + }, + "disable_existing_loggers": False +} + + +class Logging(object): + + def __init__(self, config): + self._log_file = None + self._verbosity_level = NO_VERBOSE + self._all_loggers_names = [] + self._configure_loggers(config) + self._lgr = logging.getLogger('aria.cli.main') + + @property + def logger(self): + return self._lgr + + @property + def log_file(self): + return self._log_file + + @property + def verbosity_level(self): + return self._verbosity_level + + @verbosity_level.setter + def verbosity_level(self, level): + self._verbosity_level = level + if self.is_high_verbose_level(): + for logger_name in self._all_loggers_names: + logging.getLogger(logger_name).setLevel(logging.DEBUG) + + def is_high_verbose_level(self): + return self.verbosity_level == HIGH_VERBOSE + + def _configure_loggers(self, config): + loggers_config = config.logging.loggers + logfile = config.logging.filename + + logger_dict = copy.deepcopy(LOGGER_CONFIG_TEMPLATE) + if logfile: + # set filename on file handler + logger_dict['handlers']['file']['filename'] = logfile + logfile_dir = os.path.dirname(logfile) + if not os.path.exists(logfile_dir): + os.makedirs(logfile_dir) + self._log_file = logfile + else: + del logger_dict['handlers']['file'] + + # add handlers to all loggers + loggers = {} + for logger_name in loggers_config: + loggers[logger_name] = dict(handlers=list(logger_dict['handlers'].keys())) + self._all_loggers_names.append(logger_name) + logger_dict['loggers'] = loggers + + # set level for all loggers + for logger_name, logging_level in loggers_config.iteritems(): + log = logging.getLogger(logger_name) + level = logging._levelNames[logging_level.upper()] + log.setLevel(level) + + dictconfig.dictConfig(logger_dict) diff --git a/aria/cli/main.py b/aria/cli/main.py new file mode 100644 index 00000000..02cf0953 --- /dev/null +++ b/aria/cli/main.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from aria import install_aria_extensions +from aria.cli import commands +from aria.cli.core import aria + + +@aria.group(name='aria') +@aria.options.verbose() +@aria.options.version +def _aria(): + """ARIA's Command Line Interface + + To activate bash-completion. Run: `eval "$(_ARIA_COMPLETE=source aria)"` + + ARIA's working directory resides by default in ~/.aria. To change it, set + the environment variable `ARIA_WORKDIR` to something else (e.g. /tmp/). + """ + aria.set_cli_except_hook() + + +def _register_commands(): + """ + Register the CLI's commands. + """ + + _aria.add_command(commands.service_templates.service_templates) + _aria.add_command(commands.node_templates.node_templates) + _aria.add_command(commands.services.services) + _aria.add_command(commands.nodes.nodes) + _aria.add_command(commands.workflows.workflows) + _aria.add_command(commands.executions.executions) + _aria.add_command(commands.plugins.plugins) + _aria.add_command(commands.logs.logs) + _aria.add_command(commands.reset.reset) + + +def main(): + install_aria_extensions() + _register_commands() + _aria() + + +if __name__ == '__main__': + main() diff --git a/aria/cli/service_template_utils.py b/aria/cli/service_template_utils.py new file mode 100644 index 00000000..382cce1a --- /dev/null +++ b/aria/cli/service_template_utils.py @@ -0,0 +1,121 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from urlparse import urlparse + +from . import csar +from . import utils +from .exceptions import AriaCliError +from ..utils import archive as archive_utils + + +def get(source, service_template_filename): + """Get a source and return a path to the main service template file + + The behavior based on then source argument content is: + - local yaml file: return the file + - local archive: + extract it locally and return path service template file + - URL: + - download and get service template from downloaded archive + - github repo: + - download and get service template from downloaded archive + + Supported archive types are: csar, zip, tar, tar.gz and tar.bz2 + + :param source: Path/URL/github repo to archive/service-template file + :type source: str + :param service_template_filename: Path to service template (if source is an archive [but + not a csar archive - with csars archives, this is read from the metadata file]) + :type service_template_filename: str + :return: Path to main service template file + :rtype: str + """ + if urlparse(source).scheme: + downloaded_file = utils.download_file(source) + return _get_service_template_file_from_archive( + downloaded_file, service_template_filename) + elif os.path.isfile(source): + if _is_archive(source): + return _get_service_template_file_from_archive(source, service_template_filename) + else: + # Maybe check if yaml. + return source + elif len(source.split('/')) == 2: + url = _map_to_github_url(source) + downloaded_file = utils.download_file(url) + return _get_service_template_file_from_archive( + downloaded_file, service_template_filename) + else: + raise AriaCliError( + 'You must provide either a path to a local file, a remote URL ' + 'or a GitHub `organization/repository[:tag/branch]`') + + +def _get_service_template_file_from_archive(archive, service_template_filename): + """Extract archive to temporary location and get path to service template file. + + :param archive: Path to archive file + :type archive: str + :param service_template_filename: Path to service template file relative to archive + :type service_template_filename: str + :return: Absolute path to service template file + :rtype: str + + """ + if csar.is_csar_archive(archive): + service_template_file = _extract_csar_archive(archive) + else: + extract_directory = archive_utils.extract_archive(archive) + service_template_dir = os.path.join( + extract_directory, + os.listdir(extract_directory)[0], + ) + service_template_file = os.path.join(service_template_dir, service_template_filename) + + if not os.path.isfile(service_template_file): + raise AriaCliError( + 'Could not find `{0}`. Please provide the name of the main ' + 'service template file by using the `-n/--service-template-filename` flag' + .format(service_template_filename)) + return service_template_file + + +def _map_to_github_url(source): + """Returns a path to a downloaded github archive. + + :param source: github repo in the format of `org/repo[:tag/branch]`. + :type source: str + :return: URL to the archive file for the given repo in github + :rtype: str + + """ + source_parts = source.split(':', 1) + repo = source_parts[0] + tag = source_parts[1] if len(source_parts) == 2 else 'master' + url = 'https://github.com/{0}/archive/{1}.tar.gz'.format(repo, tag) + return url + + +def _is_archive(source): + return archive_utils.is_archive(source) or csar.is_csar_archive(source) + + +def _extract_csar_archive(archive): + reader = csar.read(source=archive) + main_service_template_file_name = os.path.basename(reader.entry_definitions) + return os.path.join(reader.destination, + main_service_template_file_name) diff --git a/aria/cli/storage.py b/aria/cli/storage.py deleted file mode 100644 index fa1518b5..00000000 --- a/aria/cli/storage.py +++ /dev/null @@ -1,95 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Filesystem related CLI storage location and configuration -""" - -import os -import getpass -from shutil import rmtree - -work_space_directory = '.aria' -storage_directory_name = 'local-storage' - - -def user_space(user_name=getpass.getuser()): - """ - Base work directory - """ - user_path = '~{0}'.format(user_name) - real_path = os.path.expanduser(user_path) - if os.path.exists(real_path): - return os.path.join(real_path, work_space_directory) - return os.path.join(os.getcwd(), work_space_directory) - - -def local_storage(user_name=getpass.getuser()): - """ - Base storage directory - """ - return os.path.join(user_space(user_name), storage_directory_name) - - -def local_model_storage(): - """ - Model storage directory - """ - return os.path.join(local_storage(), 'models') - - -def local_resource_storage(): - """ - Resource storage directory - """ - return os.path.join(local_storage(), 'resources') - - -def config_file_path(): - """ - Configuration file path - """ - path = os.path.join(user_space(), 'config.yaml') - if not os.path.exists(path): - open(path, 'w').close() - return path - - -def create_user_space(user_name=getpass.getuser(), override=False): - """ - Creates the base work directory - """ - path = user_space(user_name) - if os.path.exists(path): - if override: - rmtree(path, ignore_errors=True) - else: - raise IOError('user space {0} already exists'.format(path)) - os.mkdir(path) - return path - - -def create_local_storage(user_name=getpass.getuser(), override=False): - """ - Creates the base storage directory - """ - path = local_storage(user_name) - if os.path.exists(path): - if override: - rmtree(path, ignore_errors=True) - else: - raise IOError('local storage {0} already exists'.format(path)) - os.mkdir(path) - return path diff --git a/aria/cli/table.py b/aria/cli/table.py new file mode 100644 index 00000000..408f81e4 --- /dev/null +++ b/aria/cli/table.py @@ -0,0 +1,116 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from datetime import datetime + +from prettytable import PrettyTable + +from .env import logger + + +def print_data(columns, items, header_text, + column_formatters=None, col_max_width=None, defaults=None): + if items is None: + items = [] + elif not isinstance(items, list): + items = [items] + + pretty_table = _generate(columns, data=items, column_formatters=column_formatters, + defaults=defaults) + if col_max_width: + pretty_table.max_width = col_max_width + _log(header_text, pretty_table) + + +def _log(title, table): + logger.info('{0}{1}{0}{2}{0}'.format(os.linesep, title, table)) + + +def _generate(cols, data, column_formatters=None, defaults=None): + """ + Return a new PrettyTable instance representing the list. + + Arguments: + + cols - An iterable of strings that specify what + are the columns of the table. + + for example: ['id','name'] + + data - An iterable of dictionaries or objects, each element must + have keys or attributes corresponding to the cols items. + + for example: [{'id':'123', 'name':'Pete'}] + + column_formatters - A dictionary from a column name to a formatter - a function that + may manipulate the string values printed for this column. + (See below for a few built-in formatter examples) + + for example: {'created_at': timestamp_formatter} + + defaults - A dictionary specifying default values for + key's that don't exist in the data itself. + + for example: {'serviceId':'123'} will set the + serviceId value for all rows to '123'. + + """ + def get_values_per_column(column, row_data): + if hasattr(row_data, column) or (isinstance(row_data, dict) and column in row_data): + val = row_data[column] if isinstance(row_data, dict) else getattr(row_data, column) + + if val and isinstance(val, list): + val = [str(element) for element in val] + val = ','.join(val) + elif val is None or isinstance(val, list): + # don't print `[]` or `None` (but do print `0`, `False`, etc.) + val = '' + + if column in column_formatters: + # calling the user's column formatter to manipulate the value + val = column_formatters[column](val) + + return val + else: + return defaults[column] + + column_formatters = column_formatters or dict() + pretty_table = PrettyTable(list(cols)) + + for datum in data: + values_row = [] + for col in cols: + values_row.append(get_values_per_column(col, datum)) + pretty_table.add_row(values_row) + + return pretty_table + + +def timestamp_formatter(value): + try: + datetime.strptime(value[:10], '%Y-%m-%d') + return value.replace('T', ' ').replace('Z', ' ') + except ValueError: + # not a timestamp + return value + + +def trim_formatter_generator(max_length): + def trim_formatter(value): + if len(value) >= max_length: + value = '{0}..'.format(value[:max_length - 2]) + return value + return trim_formatter diff --git a/aria/cli/utils.py b/aria/cli/utils.py new file mode 100644 index 00000000..852f24de --- /dev/null +++ b/aria/cli/utils.py @@ -0,0 +1,115 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +from StringIO import StringIO + +from backports.shutil_get_terminal_size import get_terminal_size + +from .env import logger +from .exceptions import AriaCliError +from ..utils import http + + +def storage_sort_param(sort_by, descending): + return {sort_by: 'desc' if descending else 'asc'} + + +def get_parameter_templates_as_string(parameter_templates): + params_string = StringIO() + + for param_name, param_template in parameter_templates.iteritems(): + params_string.write('\t{0}:{1}'.format(param_name, os.linesep)) + param_dict = param_template.to_dict() + del param_dict['id'] # not interested in printing the id + for k, v in param_dict.iteritems(): + params_string.write('\t\t{0}: {1}{2}'.format(k, v, os.linesep)) + + params_string.write(os.linesep) + return params_string.getvalue() + + +def check_overriding_storage_exceptions(e, model_class, name): + """ + This method checks whether the storage exception is a known type where we'd like to override + the exception message; If so, it raises a new error. Otherwise it simply returns. + """ + assert isinstance(e, BaseException) + if 'UNIQUE constraint failed' in e.message: + new_message = \ + 'Could not store {model_class} `{name}`{linesep}' \ + 'There already a exists a {model_class} with the same name' \ + .format(model_class=model_class, name=name, linesep=os.linesep) + trace = sys.exc_info()[2] + raise type(e), type(e)(new_message), trace # pylint: disable=raising-non-exception + + +def download_file(url): + progress_bar = generate_progress_handler(url, 'Downloading') + try: + destination = http.download_file(url, logger=logger, progress_handler=progress_bar) + except Exception as e: + raise AriaCliError( + 'Failed to download {0}. ({1})'.format(url, str(e))) + return destination + + +def generate_progress_handler(file_path, action='', max_bar_length=80): + """Returns a function that prints a progress bar in the terminal + + :param file_path: The name of the file being transferred + :param action: Uploading/Downloading + :param max_bar_length: Maximum allowed length of the bar. Default: 80 + :return: The configured print_progress function + """ + # We want to limit the maximum line length to 80, but allow for a smaller + # terminal size. We also include the action string, and some extra chars + terminal_width = get_terminal_size().columns + + # This takes care of the case where there is no terminal (e.g. unittest) + terminal_width = terminal_width or max_bar_length + bar_length = min(max_bar_length, terminal_width) - len(action) - 12 + + # Shorten the file name if it's too long + file_name = os.path.basename(file_path) + if len(file_name) > (bar_length / 4) + 3: + file_name = file_name[:bar_length / 4] + '...' + + bar_length -= len(file_name) + + def print_progress(read_bytes, total_bytes): + """Print upload/download progress on a single line + + Call this function in a loop to create a progress bar in the terminal + + :param read_bytes: Number of bytes already processed + :param total_bytes: Total number of bytes in the file + """ + + filled_length = min(bar_length, int(round(bar_length * read_bytes / + float(total_bytes)))) + percents = min(100.00, round( + 100.00 * (read_bytes / float(total_bytes)), 2)) + bar = '#' * filled_length + '-' * (bar_length - filled_length) # pylint: disable=blacklisted-name + + # The \r caret makes sure the cursor moves back to the beginning of + # the line + sys.stdout.write('\r{0} {1} |{2}| {3}%'.format( + action, file_name, bar, percents)) + if read_bytes >= total_bytes: + sys.stdout.write(os.linesep) + + return print_progress diff --git a/aria/core.py b/aria/core.py new file mode 100644 index 00000000..af1984a7 --- /dev/null +++ b/aria/core.py @@ -0,0 +1,124 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import exceptions +from .parser import consumption +from .parser.loading.location import UriLocation + + +class Core(object): + + def __init__(self, + model_storage, + resource_storage, + plugin_manager): + self._model_storage = model_storage + self._resource_storage = resource_storage + self._plugin_manager = plugin_manager + + @property + def model_storage(self): + return self._model_storage + + @property + def resource_storage(self): + return self._resource_storage + + @property + def plugin_manager(self): + return self._plugin_manager + + def validate_service_template(self, service_template_path): + self._parse_service_template(service_template_path) + + def create_service_template(self, service_template_path, service_template_dir, + service_template_name): + context = self._parse_service_template(service_template_path) + service_template = context.modeling.template + service_template.name = service_template_name + self.model_storage.service_template.put(service_template) + self.resource_storage.service_template.upload( + entry_id=str(service_template.id), source=service_template_dir) + + def delete_service_template(self, service_template_id): + service_template = self.model_storage.service_template.get(service_template_id) + if service_template.services: + raise exceptions.DependentServicesError( + "Can't delete service template {0} - Service template has existing services") + + self.model_storage.service_template.delete(service_template) + self.resource_storage.service_template.delete(entry_id=str(service_template.id)) + + def create_service(self, service_template_id, inputs, service_name=None): + + service_template = self.model_storage.service_template.get(service_template_id) + + # creating an empty ConsumptionContext, initiating a threadlocal context + context = consumption.ConsumptionContext() + + storage_session = self.model_storage._all_api_kwargs['session'] + # setting no autoflush for the duration of instantiation - this helps avoid dependency + # constraints as they're being set up + with storage_session.no_autoflush: + service = service_template.instantiate(None, self.model_storage, inputs=inputs) + + consumption.ConsumerChain( + context, + ( + consumption.SatisfyRequirements, + consumption.ValidateCapabilities, + consumption.FindHosts, + consumption.ConfigureOperations + )).consume() + if context.validation.dump_issues(): + raise exceptions.InstantiationError('Failed to instantiate service template') + + storage_session.flush() # flushing so service.id would auto-populate + service.name = service_name or '{0}_{1}'.format(service_template.name, service.id) + self.model_storage.service.put(service) + return service + + def delete_service(self, service_id, force=False): + service = self.model_storage.service.get(service_id) + + active_executions = [e for e in service.executions if e.is_active()] + if active_executions: + raise exceptions.DependentActiveExecutionsError( + "Can't delete service {0} - there is an active execution for this service. " + "Active execution id: {1}".format(service.name, active_executions[0].id)) + + if not force: + available_nodes = [str(n.id) for n in service.nodes.values() if n.is_available()] + if available_nodes: + raise exceptions.DependentAvailableNodesError( + "Can't delete service {0} - there are available nodes for this service. " + "Available node ids: {1}".format(service.name, ', '.join(available_nodes))) + + self.model_storage.service.delete(service) + + @staticmethod + def _parse_service_template(service_template_path): + context = consumption.ConsumptionContext() + context.presentation.location = UriLocation(service_template_path) + consumption.ConsumerChain( + context, + ( + consumption.Read, + consumption.Validate, + consumption.ServiceTemplate + )).consume() + if context.validation.dump_issues(): + raise exceptions.ParsingError('Failed to parse service template') + return context diff --git a/aria/exceptions.py b/aria/exceptions.py index a180ce13..93987dcb 100644 --- a/aria/exceptions.py +++ b/aria/exceptions.py @@ -44,3 +44,32 @@ def __init__(self, message=None, cause=None, cause_traceback=None): # Make sure it's our traceback cause_traceback = traceback self.cause_traceback = cause_traceback + + +class DependentServicesError(AriaError): + """ + Raised when attempting to delete a service template which has existing services + """ + pass + + +class DependentActiveExecutionsError(AriaError): + """ + Raised when attempting to delete a service which has active executions + """ + pass + + +class DependentAvailableNodesError(AriaError): + """ + Raised when attempting to delete a service which has available nodes + """ + pass + + +class ParsingError(AriaError): + pass + + +class InstantiationError(AriaError): + pass diff --git a/aria/logger.py b/aria/logger.py index e3039f5f..dd542641 100644 --- a/aria/logger.py +++ b/aria/logger.py @@ -19,8 +19,20 @@ import logging from logging import handlers as logging_handlers +# NullHandler doesn't exist in < 27. this workaround is from +# http://docs.python.org/release/2.6/library/logging.html#configuring-logging-for-a-library +try: + from logging import NullHandler # pylint: disable=unused-import +except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass from datetime import datetime + +TASK_LOGGER_NAME = 'aria.executions.task' + + _base_logger = logging.getLogger('aria') diff --git a/aria/modeling/__init__.py b/aria/modeling/__init__.py index 4dfc39df..4ac79e73 100644 --- a/aria/modeling/__init__.py +++ b/aria/modeling/__init__.py @@ -19,6 +19,7 @@ mixins, types, models, + utils, service_template as _service_template_bases, service_instance as _service_instance_bases, service_changes as _service_changes_bases, @@ -45,4 +46,5 @@ 'types', 'models', 'model_bases', + 'utils' ) diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py index 6931c782..19fd9420 100644 --- a/aria/modeling/exceptions.py +++ b/aria/modeling/exceptions.py @@ -22,6 +22,13 @@ class ModelingException(AriaException): """ +class InputsException(ModelingException): + """ + ARIA inputs exception. + """ + pass + + class ValueFormatException(ModelingException): """ ARIA modeling exception: the value is in the wrong format. @@ -32,3 +39,21 @@ class CannotEvaluateFunctionException(ModelingException): """ ARIA modeling exception: cannot evaluate the function at this time. """ + + +class MissingRequiredInputsException(InputsException): + """ + ARIA modeling exception: Required inputs have been omitted. + """ + + +class InputsOfWrongTypeException(InputsException): + """ + ARIA modeling exception: Inputs of the wrong types have been provided. + """ + + +class UndeclaredInputsException(InputsException): + """ + ARIA modeling exception: Undeclared inputs have been provided. + """ diff --git a/aria/modeling/models.py b/aria/modeling/models.py index 170efb25..584b8775 100644 --- a/aria/modeling/models.py +++ b/aria/modeling/models.py @@ -16,6 +16,10 @@ # pylint: disable=abstract-method from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import ( + Column, + Text +) from . import ( service_template, @@ -26,7 +30,6 @@ mixins, ) - aria_declarative_base = declarative_base(cls=mixins.ModelIDMixin) @@ -84,7 +87,7 @@ # region service template models class ServiceTemplate(aria_declarative_base, service_template.ServiceTemplateBase): - pass + name = Column(Text, index=True, unique=True) class NodeTemplate(aria_declarative_base, service_template.NodeTemplateBase): @@ -140,7 +143,7 @@ class PluginSpecification(aria_declarative_base, service_template.PluginSpecific # region service instance models class Service(aria_declarative_base, service_instance.ServiceBase): - pass + name = Column(Text, index=True, unique=True) class Node(aria_declarative_base, service_instance.NodeBase): diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py index b32a8a19..01ab2e87 100644 --- a/aria/modeling/orchestration.py +++ b/aria/modeling/orchestration.py @@ -39,7 +39,6 @@ from sqlalchemy.ext.declarative import declared_attr from ..orchestrator.exceptions import (TaskAbortException, TaskRetryException) -from .types import Dict from .mixins import ModelMixin from . import ( relationship, @@ -55,9 +54,7 @@ class ExecutionBase(ModelMixin): __tablename__ = 'execution' __private_fields__ = ['service_fk', - 'service_name', - 'service_template', - 'service_template_name'] + 'service_template'] TERMINATED = 'terminated' FAILED = 'failed' @@ -97,17 +94,14 @@ def validate_status(self, key, value): ended_at = Column(DateTime, nullable=True, index=True) error = Column(Text, nullable=True) is_system_workflow = Column(Boolean, nullable=False, default=False) - parameters = Column(Dict) status = Column(Enum(*STATES, name='execution_status'), default=PENDING) workflow_name = Column(Text) - @property def has_ended(self): return self.status in self.END_STATES - @property def is_active(self): - return not self.has_ended + return not self.has_ended() and self.status != self.PENDING @declared_attr def logs(cls): @@ -121,6 +115,10 @@ def service(cls): def tasks(cls): return relationship.one_to_many(cls, 'task') + @declared_attr + def inputs(cls): + return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name') + # region foreign keys @declared_attr @@ -264,10 +262,7 @@ class TaskBase(ModelMixin): __private_fields__ = ['node_fk', 'relationship_fk', 'plugin_fk', - 'execution_fk' - 'node_name', - 'relationship_name', - 'execution_name'] + 'execution_fk'] PENDING = 'pending' RETRYING = 'retrying' @@ -322,11 +317,9 @@ def inputs(cls): ended_at = Column(DateTime, default=None) retry_count = Column(Integer, default=0) - @property def has_ended(self): return self.status in (self.SUCCESS, self.FAILED) - @property def is_waiting(self): return self.status in (self.PENDING, self.RETRYING) diff --git a/aria/modeling/service_changes.py b/aria/modeling/service_changes.py index b1a75a24..1974424b 100644 --- a/aria/modeling/service_changes.py +++ b/aria/modeling/service_changes.py @@ -45,9 +45,7 @@ class ServiceUpdateBase(ModelMixin): __tablename__ = 'service_update' __private_fields__ = ['service_fk', - 'execution_fk', - 'execution_name', - 'service_name'] + 'execution_fk'] created_at = Column(DateTime, nullable=False, index=True) service_plan = Column(Dict, nullable=False) @@ -125,8 +123,7 @@ class ServiceUpdateStepBase(ModelMixin): __tablename__ = 'service_update_step' - __private_fields__ = ['service_update_fk', - 'service_update_name'] + __private_fields__ = ['service_update_fk'] _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY') ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify') @@ -222,8 +219,7 @@ class ServiceModificationBase(ModelMixin): __tablename__ = 'service_modification' - __private_fields__ = ['service_fk', - 'service_name'] + __private_fields__ = ['service_fk'] STARTED = 'started' FINISHED = 'finished' diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py index 1fcbc5f2..1188f342 100644 --- a/aria/modeling/service_common.py +++ b/aria/modeling/service_common.py @@ -87,6 +87,9 @@ def dump(self): if self.description: console.puts(context.style.meta(self.description)) + def unwrap(self): + return self.name, self.value + @classmethod def wrap(cls, name, value, description=None): """ @@ -98,13 +101,11 @@ def wrap(cls, name, value, description=None): :param description: Description (optional) :type description: basestring """ - - from . import models - return models.Parameter(name=name, - type_name=formatting.full_type_name(value) - if value is not None else None, - value=value, - description=description) + return cls(name=name, + type_name=formatting.full_type_name(value) + if value is not None else None, + value=value, + description=description) class TypeBase(InstanceModelMixin): diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py index 40d43faa..6d8f3fe1 100644 --- a/aria/modeling/service_instance.py +++ b/aria/modeling/service_instance.py @@ -88,8 +88,7 @@ class ServiceBase(InstanceModelMixin): __tablename__ = 'service' __private_fields__ = ['substitution_fk', - 'service_template_fk', - 'service_template_name'] + 'service_template_fk'] # region foreign keys @@ -371,8 +370,7 @@ class NodeBase(InstanceModelMixin): __private_fields__ = ['type_fk', 'host_fk', 'service_fk', - 'node_template_fk', - 'service_name'] + 'node_template_fk'] INITIAL = 'initial' CREATING = 'creating' @@ -417,7 +415,6 @@ def determine_state(cls, op_name, is_transitional): except KeyError: return None - @property def is_available(self): return self.state not in (self.INITIAL, self.DELETED, self.ERROR) @@ -452,6 +449,11 @@ def service_name(cls): """Required for use by SQLAlchemy queries""" return association_proxy('service', 'name') + @declared_attr + def node_template_name(cls): + """Required for use by SQLAlchemy queries""" + return association_proxy('node_template', 'name') + # endregion # region one_to_one relationships @@ -1183,9 +1185,7 @@ class RelationshipBase(InstanceModelMixin): 'target_node_fk', 'target_capability_fk', 'requirement_template_fk', - 'relationship_template_fk', - 'source_node_name', - 'target_node_name'] + 'relationship_template_fk'] # region foreign keys diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py index 51fea2fb..f1c2bcbe 100644 --- a/aria/modeling/service_template.py +++ b/aria/modeling/service_template.py @@ -280,7 +280,7 @@ def types_as_raw(self): ('interface_types', formatting.as_raw(self.interface_types)), ('artifact_types', formatting.as_raw(self.artifact_types)))) - def instantiate(self, container): + def instantiate(self, container, model_storage, inputs=None): # pylint: disable=arguments-differ from . import models context = ConsumptionContext.get_thread_local() now = datetime.now() @@ -288,13 +288,14 @@ def instantiate(self, container): updated_at=now, description=deepcopy_with_locators(self.description), service_template=self) - #service.name = '{0}_{1}'.format(self.name, service.id) - context.modeling.instance = service + service.inputs = utils.create_inputs(inputs or {}, self.inputs) + # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions + for plugin_specification in self.plugin_specifications.itervalues(): if plugin_specification.enabled: - if plugin_specification.resolve(): + if plugin_specification.resolve(model_storage): plugin = plugin_specification.plugin service.plugins[plugin.name] = plugin else: @@ -316,15 +317,8 @@ def instantiate(self, container): if self.substitution_template is not None: service.substitution = self.substitution_template.instantiate(container) - utils.instantiate_dict(self, service.inputs, self.inputs) utils.instantiate_dict(self, service.outputs, self.outputs) - for name, the_input in context.modeling.inputs.iteritems(): - if name not in service.inputs: - context.validation.report('input "{0}" is not supported'.format(name)) - else: - service.inputs[name].value = the_input - return service def validate(self): @@ -448,8 +442,7 @@ class NodeTemplateBase(TemplateModelMixin): __tablename__ = 'node_template' __private_fields__ = ['type_fk', - 'service_template_fk', - 'service_template_name'] + 'service_template_fk'] # region foreign_keys @@ -472,6 +465,11 @@ def service_template_name(cls): """Required for use by SQLAlchemy queries""" return association_proxy('service_template', 'name') + @declared_attr + def type_name(cls): + """Required for use by SQLAlchemy queries""" + return association_proxy('type', 'name') + # endregion # region one_to_one relationships @@ -558,6 +556,7 @@ def instantiate(self, container): type=self.type, description=deepcopy_with_locators(self.description), state=models.Node.INITIAL, + runtime_properties={}, node_template=self) utils.instantiate_dict(node, node.properties, self.properties) utils.instantiate_dict(node, node.interfaces, self.interface_templates) @@ -1238,7 +1237,8 @@ def find_target(self, source_node_template): # Find first node that matches the type elif self.target_node_type is not None: - for target_node_template in context.modeling.template.node_templates.itervalues(): + for target_node_template in \ + self.node_template.service_template.node_templates.values(): if self.target_node_type.get_descendant(target_node_template.type.name) is None: continue @@ -1865,16 +1865,22 @@ def as_raw(self): def instantiate(self, container): from . import models - if self.plugin_specification and self.plugin_specification.enabled: - plugin = self.plugin_specification.plugin - implementation = self.implementation if plugin is not None else None - # "plugin" would be none if a match was not found. In that case, a validation error - # should already have been reported in ServiceTemplateBase.instantiate, so we will - # continue silently here + if self.plugin_specification: + if self.plugin_specification.enabled: + plugin = self.plugin_specification.plugin + implementation = self.implementation if plugin is not None else None + # "plugin" would be none if a match was not found. In that case, a validation error + # should already have been reported in ServiceTemplateBase.instantiate, so we will + # continue silently here + else: + # If the plugin is disabled, the operation should be disabled, too + plugin = None + implementation = None else: - # If the plugin is disabled, the operation should be disabled, too + # using the execution plugin plugin = None - implementation = None + implementation = self.implementation + operation = models.Operation(name=self.name, description=deepcopy_with_locators(self.description), relationship_edge=self.relationship_edge, @@ -2120,25 +2126,16 @@ def as_raw(self): def coerce_values(self, container, report_issues): pass - def resolve(self): + def resolve(self, model_storage): # TODO: we are planning a separate "instantiation" module where this will be called or - # moved to. There, we will probably have a context with a storage manager. Until then, - # this is the only potentially available context, which of course will only be available - # if we're in a workflow. - from ..orchestrator import context - try: - workflow_context = context.workflow.current.get() - plugins = workflow_context.model.plugin.list() - except context.exceptions.ContextException: - plugins = None - + # moved to. + plugins = model_storage.plugin.list() matching_plugins = [] - if plugins: - for plugin in plugins: - # TODO: we need to use a version comparator - if (plugin.name == self.name) and \ + for plugin in plugins: + # TODO: we need to use a version comparator + if (plugin.name == self.name) and \ ((self.version is None) or (plugin.package_version >= self.version)): - matching_plugins.append(plugin) + matching_plugins.append(plugin) self.plugin = None if matching_plugins: # Return highest version of plugin diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py index 0b4015ce..91d7b9c0 100644 --- a/aria/modeling/utils.py +++ b/aria/modeling/utils.py @@ -13,12 +13,100 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os +from json import JSONEncoder +from StringIO import StringIO + +from . import exceptions from ..parser.consumption import ConsumptionContext from ..parser.exceptions import InvalidValueError from ..parser.presentation import Value from ..utils.collections import OrderedDict from ..utils.console import puts -from .exceptions import CannotEvaluateFunctionException +from ..utils.type import validate_value_type + + +class ModelJSONEncoder(JSONEncoder): + def default(self, o): # pylint: disable=method-hidden + from .mixins import ModelMixin + if isinstance(o, ModelMixin): + if hasattr(o, 'value'): + dict_to_return = o.to_dict(fields=('value',)) + return dict_to_return['value'] + else: + return o.to_dict() + else: + return JSONEncoder.default(self, o) + + +def create_inputs(inputs, template_inputs): + """ + :param inputs: key-value dict + :param template_inputs: parameter name to parameter object dict + :return: dict of parameter name to Parameter models + """ + merged_inputs = _merge_and_validate_inputs(inputs, template_inputs) + + from . import models + input_models = [] + for input_name, input_val in merged_inputs.iteritems(): + parameter = models.Parameter( + name=input_name, + type_name=template_inputs[input_name].type_name, + description=template_inputs[input_name].description, + value=input_val) + input_models.append(parameter) + + return dict((inp.name, inp) for inp in input_models) + + +def _merge_and_validate_inputs(inputs, template_inputs): + """ + :param inputs: key-value dict + :param template_inputs: parameter name to parameter object dict + :return: + """ + merged_inputs = inputs.copy() + + missing_inputs = [] + wrong_type_inputs = {} + for input_name, input_template in template_inputs.iteritems(): + if input_name not in inputs: + if input_template.value is not None: + merged_inputs[input_name] = input_template.value # apply default value + else: + missing_inputs.append(input_name) + else: + # Validate input type + try: + validate_value_type(inputs[input_name], input_template.type_name) + except ValueError: + wrong_type_inputs[input_name] = input_template.type_name + except RuntimeError: + # TODO: This error shouldn't be raised (or caught), but right now we lack support + # for custom data_types, which will raise this error. Skipping their validation. + pass + + if missing_inputs: + raise exceptions.MissingRequiredInputsException( + 'Required inputs {0} have not been specified - expected inputs: {1}' + .format(missing_inputs, template_inputs.keys())) + + if wrong_type_inputs: + error_message = StringIO() + for param_name, param_type in wrong_type_inputs.iteritems(): + error_message.write('Input "{0}" must be of type {1}{2}' + .format(param_name, param_type, os.linesep)) + raise exceptions.InputsOfWrongTypeException(error_message.getvalue()) + + undeclared_inputs = [input_name for input_name in inputs.keys() + if input_name not in template_inputs] + if undeclared_inputs: + raise exceptions.UndeclaredInputsException( + 'Undeclared inputs have been specified: {0}; Expected inputs: {1}' + .format(undeclared_inputs, template_inputs.keys())) + + return merged_inputs def coerce_value(container, value, report_issues=False): @@ -35,7 +123,7 @@ def coerce_value(container, value, report_issues=False): try: value = value._evaluate(context, container) value = coerce_value(container, value, report_issues) - except CannotEvaluateFunctionException: + except exceptions.CannotEvaluateFunctionException: pass except InvalidValueError as e: if report_issues: diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py index 127641fa..15843db0 100644 --- a/aria/orchestrator/context/common.py +++ b/aria/orchestrator/context/common.py @@ -19,7 +19,6 @@ import logging from contextlib import contextmanager -from datetime import datetime from functools import partial import jinja2 @@ -55,6 +54,7 @@ def __init__( self, name, service_id, + execution_id, model_storage, resource_storage, workdir=None, @@ -65,27 +65,17 @@ def __init__( self._model = model_storage self._resource = resource_storage self._service_id = service_id + self._execution_id = execution_id self._workdir = workdir self.logger = None - def _create_execution(self): - now = datetime.utcnow() - execution = self.model.execution.model_cls( - service_instance=self.service_instance, - workflow_name=self._workflow_name, - created_at=now, - parameters=self.parameters, - ) - self.model.execution.put(execution) - return execution.id - - def _register_logger(self, logger_name=None, level=None, task_id=None): - self.logger = self.PrefixedLogger(logging.getLogger(logger_name or self.__class__.__name__), - self.logging_id, - task_id=task_id) - self.logger.addHandler(aria_logger.create_console_log_handler()) - self.logger.addHandler(self._get_sqla_handler()) + def _register_logger(self, level=None, task_id=None): + self.logger = self.PrefixedLogger( + logging.getLogger(aria_logger.TASK_LOGGER_NAME), self.logging_id, task_id=task_id) self.logger.setLevel(level or logging.DEBUG) + if not self.logger.handlers: + self.logger.addHandler(aria_logger.create_console_log_handler()) + self.logger.addHandler(self._get_sqla_handler()) def _get_sqla_handler(self): api_kwargs = {} @@ -168,13 +158,13 @@ def download_resource(self, destination, path=None): Download a blueprint resource from the resource storage """ try: - self.resource.deployment.download(entry_id=str(self.service.id), - destination=destination, - path=path) + self.resource.service.download(entry_id=str(self.service.id), + destination=destination, + path=path) except exceptions.StorageError: - self.resource.blueprint.download(entry_id=str(self.service_template.id), - destination=destination, - path=path) + self.resource.service_template.download(entry_id=str(self.service_template.id), + destination=destination, + path=path) def download_resource_and_render(self, destination, path=None, variables=None): """ @@ -193,9 +183,10 @@ def get_resource(self, path=None): Read a deployment resource as string from the resource storage """ try: - return self.resource.deployment.read(entry_id=str(self.service.id), path=path) + return self.resource.service.read(entry_id=str(self.service.id), path=path) except exceptions.StorageError: - return self.resource.deployment.read(entry_id=str(self.service_template.id), path=path) + return self.resource.service_template.read(entry_id=str(self.service_template.id), + path=path) def get_resource_and_render(self, path=None, variables=None): """ diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py index cbd186c3..c7d82469 100644 --- a/aria/orchestrator/context/operation.py +++ b/aria/orchestrator/context/operation.py @@ -36,7 +36,6 @@ def __init__(self, service_id, task_id, actor_id, - execution_id, **kwargs): super(BaseOperationContext, self).__init__( name=name, @@ -47,7 +46,6 @@ def __init__(self, self._task_id = task_id self._actor_id = actor_id self._thread_local = threading.local() - self._execution_id = execution_id self._register_logger(task_id=self.task.id) def __repr__(self): diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py index 5f86d9d3..667d22fc 100644 --- a/aria/orchestrator/context/workflow.py +++ b/aria/orchestrator/context/workflow.py @@ -19,7 +19,6 @@ import threading from contextlib import contextmanager -from datetime import datetime from .exceptions import ContextException from .common import BaseContext @@ -35,36 +34,21 @@ def __init__(self, task_max_attempts=1, task_retry_interval=0, task_ignore_failure=False, - execution_id=None, *args, **kwargs): super(WorkflowContext, self).__init__(*args, **kwargs) self._workflow_name = workflow_name - self.parameters = parameters or {} + self._parameters = parameters or {} self._task_max_attempts = task_max_attempts self._task_retry_interval = task_retry_interval self._task_ignore_failure = task_ignore_failure - # TODO: execution creation should happen somewhere else - # should be moved there, when such logical place exists - self._execution_id = execution_id or self._create_execution() self._register_logger() def __repr__(self): return ( '{name}(deployment_id={self._service_id}, ' - 'workflow_name={self._workflow_name}'.format( + 'workflow_name={self._workflow_name}, execution_id={self._execution_id})'.format( name=self.__class__.__name__, self=self)) - def _create_execution(self): - now = datetime.utcnow() - execution = self.model.execution.model_cls( - service=self.service, - workflow_name=self._workflow_name, - created_at=now, - parameters=self.parameters, - ) - self.model.execution.put(execution) - return execution.id - @property def logging_id(self): return '{0}[{1}]'.format(self._workflow_name, self._execution_id) diff --git a/aria/orchestrator/exceptions.py b/aria/orchestrator/exceptions.py index c00b66b0..8d3dcc6c 100644 --- a/aria/orchestrator/exceptions.py +++ b/aria/orchestrator/exceptions.py @@ -25,6 +25,13 @@ class OrchestratorError(AriaError): pass +class InvalidPluginError(AriaError): + """ + Raised when an invalid plugin is validated unsuccessfully + """ + pass + + class PluginAlreadyExistsError(AriaError): """ Raised when a plugin with the same package name and package version already exists @@ -46,3 +53,24 @@ class TaskAbortException(RuntimeError): Used internally when ctx.task.abort is called """ pass + + +class UndeclaredWorkflowError(AriaError): + """ + Raised when attempting to execute an undeclared workflow + """ + pass + + +class ActiveExecutionsError(AriaError): + """ + Raised when attempting to execute a workflow on a service which already has an active execution + """ + pass + + +class WorkflowImplementationNotFoundError(AriaError): + """ + Raised when attempting to import a workflow's code but the implementation is not found + """ + pass diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py index 817d064a..52a53121 100644 --- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py +++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py @@ -24,6 +24,7 @@ import wsgiref.simple_server import bottle +from aria import modeling from .. import exceptions @@ -111,7 +112,7 @@ def _process(self, request): result = json.dumps({ 'type': result_type, 'payload': payload - }) + }, cls=modeling.utils.ModelJSONEncoder) except Exception as e: traceback_out = StringIO.StringIO() traceback.print_exc(file=traceback_out) diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py index 960835c2..7627a38f 100644 --- a/aria/orchestrator/execution_plugin/instantiation.py +++ b/aria/orchestrator/execution_plugin/instantiation.py @@ -27,7 +27,7 @@ def configure_operation(operation): arguments = OrderedDict() arguments['script_path'] = operation.implementation arguments['process'] = _get_process(configuration.pop('process')) \ - if 'process' in configuration else None + if 'process' in configuration else dict() host = None interface = operation.interface diff --git a/aria/orchestrator/plugin.py b/aria/orchestrator/plugin.py index d8157549..f99666ca 100644 --- a/aria/orchestrator/plugin.py +++ b/aria/orchestrator/plugin.py @@ -17,6 +17,7 @@ import tempfile import subprocess import sys +import zipfile from datetime import datetime import wagon @@ -43,11 +44,11 @@ def install(self, source): os_props = metadata['build_server_os_properties'] plugin = cls( + name=metadata['package_name'], archive_name=metadata['archive_name'], supported_platform=metadata['supported_platform'], supported_py_versions=metadata['supported_python_versions'], - # Remove suffix colon after upgrading wagon to > 0.5.0 - distribution=os_props.get('distribution:') or os_props.get('distribution'), + distribution=os_props.get('distribution'), distribution_release=os_props['distribution_version'], distribution_version=os_props['distribution_release'], package_name=metadata['package_name'], @@ -70,6 +71,28 @@ def get_plugin_prefix(self, plugin): self._plugins_dir, '{0}-{1}'.format(plugin.package_name, plugin.package_version)) + @staticmethod + def validate_plugin(source): + """ + validate a plugin archive. + A valid plugin is a wagon (http://github.com/cloudify-cosmo/wagon) + in the zip format (suffix may also be .wgn). + """ + if not zipfile.is_zipfile(source): + raise exceptions.InvalidPluginError( + 'Archive {0} is of an unsupported type. Only ' + 'zip/wgn is allowed'.format(source)) + with zipfile.ZipFile(source, 'r') as zip_file: + infos = zip_file.infolist() + try: + package_name = infos[0].filename[:infos[0].filename.index('/')] + package_json_path = "{0}/{1}".format(package_name, 'package.json') + zip_file.getinfo(package_json_path) + except (KeyError, ValueError, IndexError): + raise exceptions.InvalidPluginError( + 'Failed to validate plugin {0} ' + '(package.json was not found in archive)'.format(source)) + def _install_wagon(self, source, prefix): pip_freeze_output = self._pip_freeze() file_descriptor, constraint_path = tempfile.mkstemp(prefix='constraint-', suffix='.txt') diff --git a/aria/orchestrator/runner.py b/aria/orchestrator/runner.py deleted file mode 100644 index f1633fab..00000000 --- a/aria/orchestrator/runner.py +++ /dev/null @@ -1,101 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Workflow runner -""" - -import tempfile -import os - -from .context.workflow import WorkflowContext -from .workflows.core.engine import Engine -from .workflows.executor.thread import ThreadExecutor -from ..storage import ( - sql_mapi, - filesystem_rapi, -) -from .. import ( - application_model_storage, - application_resource_storage -) - - -class Runner(object): - """ - Runs workflows on a deployment. By default uses temporary storage (either on disk or in memory) - but can also be used with existing storage. - - Handles the initialization of the storage engine and provides convenience methods for - sub-classes to create tasks. - - :param path: path to Sqlite database file; use '' (the default) to use a temporary file, - and None to use an in-memory database - :type path: string - """ - - def __init__(self, workflow_name, workflow_fn, inputs, initialize_model_storage_fn, - service_id_fn, storage_path='', is_storage_temporary=True): - if storage_path == '': - # Temporary file storage - the_file, storage_path = tempfile.mkstemp(suffix='.db', prefix='aria-') - os.close(the_file) - - self._storage_path = storage_path - self._storage_dir = os.path.dirname(storage_path) - self._storage_name = os.path.basename(storage_path) - self._is_storage_temporary = is_storage_temporary - - workflow_context = self.create_workflow_context(workflow_name, initialize_model_storage_fn, - service_id_fn) - - tasks_graph = workflow_fn(ctx=workflow_context, **inputs) - - self._engine = Engine( - executor=ThreadExecutor(), - workflow_context=workflow_context, - tasks_graph=tasks_graph) - - def run(self): - try: - self._engine.execute() - finally: - self.cleanup() - - def create_workflow_context(self, - workflow_name, - initialize_model_storage_fn, - service_id_fn): - self.cleanup() - model_storage = application_model_storage( - sql_mapi.SQLAlchemyModelAPI, - initiator_kwargs=dict(base_dir=self._storage_dir, filename=self._storage_name)) - if initialize_model_storage_fn: - initialize_model_storage_fn(model_storage) - resource_storage = application_resource_storage( - filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory='.')) - return WorkflowContext( - name=workflow_name, - model_storage=model_storage, - resource_storage=resource_storage, - service_id=service_id_fn(), - workflow_name=self.__class__.__name__, - task_max_attempts=1, - task_retry_interval=1) - - def cleanup(self): - if (self._is_storage_temporary and (self._storage_path is not None) and - os.path.isfile(self._storage_path)): - os.remove(self._storage_path) diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py new file mode 100644 index 00000000..1ea60a1d --- /dev/null +++ b/aria/orchestrator/workflow_runner.py @@ -0,0 +1,161 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Workflow runner +""" + +import os +import sys +from datetime import datetime + +from . import exceptions +from .context.workflow import WorkflowContext +from .workflows import builtin +from .workflows.core.engine import Engine +from .workflows.executor.process import ProcessExecutor +from ..modeling import models +from ..modeling import utils as modeling_utils +from ..utils.imports import import_fullname + + +DEFAULT_TASK_MAX_ATTEMPTS = 30 +DEFAULT_TASK_RETRY_INTERVAL = 30 + + +class WorkflowRunner(object): + + def __init__(self, workflow_name, service_id, inputs, + model_storage, resource_storage, plugin_manager, + executor=None, task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS, + task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL): + """ + Manages a single workflow execution on a given service + :param workflow_name: Workflow name + :param service_id: Service id + :param inputs: A key-value dict of inputs for the execution + :param model_storage: Model storage + :param resource_storage: Resource storage + :param plugin_manager: Plugin manager + :param executor: Executor for tasks. Defaults to a ProcessExecutor instance. + :param task_max_attempts: Maximum attempts of repeating each failing task + :param task_retry_interval: Retry interval in between retry attempts of a failing task + """ + + self._model_storage = model_storage + self._resource_storage = resource_storage + self._workflow_name = workflow_name + + # the IDs are stored rather than the models themselves, so this module could be used + # by several threads without raising errors on model objects shared between threads + self._service_id = service_id + + self._validate_workflow_exists_for_service() + + workflow_fn = self._get_workflow_fn() + + execution = self._create_execution_model(inputs) + self._execution_id = execution.id + + workflow_context = WorkflowContext( + name=self.__class__.__name__, + model_storage=self._model_storage, + resource_storage=resource_storage, + service_id=service_id, + execution_id=execution.id, + workflow_name=workflow_name, + task_max_attempts=task_max_attempts, + task_retry_interval=task_retry_interval) + + # transforming the execution inputs to dict, to pass them to the workflow function + execution_inputs_dict = dict(inp.unwrap() for inp in self.execution.inputs.values()) + self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict) + + executor = executor or ProcessExecutor(plugin_manager=plugin_manager) + self._engine = Engine( + executor=executor, + workflow_context=workflow_context, + tasks_graph=self._tasks_graph) + + @property + def execution(self): + return self._model_storage.execution.get(self._execution_id) + + @property + def service(self): + return self._model_storage.service.get(self._service_id) + + def execute(self): + self._engine.execute() + + def cancel(self): + self._engine.cancel_execution() + + def _create_execution_model(self, inputs): + execution = models.Execution( + created_at=datetime.utcnow(), + service=self.service, + workflow_name=self._workflow_name, + inputs={}) + + if self._workflow_name in builtin.BUILTIN_WORKFLOWS: + workflow_inputs = dict() # built-in workflows don't have any inputs + else: + workflow_inputs = self.service.workflows[self._workflow_name].inputs + + execution.inputs = modeling_utils.create_inputs(inputs, workflow_inputs) + # TODO: these two following calls should execute atomically + self._validate_no_active_executions(execution) + self._model_storage.execution.put(execution) + return execution + + def _validate_workflow_exists_for_service(self): + if self._workflow_name not in self.service.workflows and \ + self._workflow_name not in builtin.BUILTIN_WORKFLOWS: + raise exceptions.UndeclaredWorkflowError( + 'No workflow policy {0} declared in service {1}' + .format(self._workflow_name, self.service.name)) + + def _validate_no_active_executions(self, execution): + active_executions = [e for e in self.service.executions if e.is_active()] + if active_executions: + raise exceptions.ActiveExecutionsError( + "Can't start execution; Service {0} has an active execution with id {1}" + .format(self.service.name, active_executions[0].id)) + + def _get_workflow_fn(self): + if self._workflow_name in builtin.BUILTIN_WORKFLOWS: + return import_fullname('{0}.{1}'.format(builtin.BUILTIN_WORKFLOWS_PATH_PREFIX, + self._workflow_name)) + + workflow = self.service.workflows[self._workflow_name] + + # TODO: Custom workflow support needs improvement, currently this code uses internal + # knowledge of the resource storage; Instead, workflows should probably be loaded + # in a similar manner to operation plugins. Also consider passing to import_fullname + # as paths instead of appending to sys path. + service_template_resources_path = os.path.join( + self._resource_storage.service_template.base_path, + str(self.service.service_template.id)) + sys.path.append(service_template_resources_path) + + try: + workflow_fn = import_fullname(workflow.implementation) + except ImportError: + raise exceptions.WorkflowImplementationNotFoundError( + 'Could not find workflow {0} implementation at {1}'.format( + self._workflow_name, workflow.implementation)) + + return workflow_fn diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py index 49c584c3..82c40c34 100644 --- a/aria/orchestrator/workflows/api/task.py +++ b/aria/orchestrator/workflows/api/task.py @@ -16,18 +16,16 @@ """ Provides the tasks to be entered into the task graph """ -import copy +from ... import context from ....modeling import models -from ....utils.collections import (OrderedDict, FrozenDict) +from ....modeling import utils as modeling_utils from ....utils.uuid import generate_uuid -from ... import context -from .. import exceptions class BaseTask(object): """ - Abstract task_graph task + Abstract task graph task """ def __init__(self, ctx=None, **kwargs): @@ -56,14 +54,13 @@ def workflow_context(self): class OperationTask(BaseTask): """ - Represents an operation task in the task graph. + Represents an operation task in the task graph """ NAME_FORMAT = '{interface}:{operation}@{type}:{name}' def __init__(self, actor, - actor_type, interface_name, operation_name, inputs=None, @@ -75,122 +72,101 @@ def __init__(self, :meth:`for_relationship`. """ + actor_type = type(actor).__name__.lower() + assert isinstance(actor, (models.Node, models.Relationship)) + assert actor_type in ('node', 'relationship') assert interface_name and operation_name super(OperationTask, self).__init__() - operation = None - interface = actor.interfaces.get(interface_name) - if interface is not None: - operation = interface.operations.get(operation_name) - - if operation is None: - raise exceptions.OperationNotFoundException( - 'Could not find operation "{0}" on interface "{1}" for {2} "{3}"' - .format(operation_name, interface_name, actor_type, actor.name)) - - if operation.implementation is None: - raise exceptions.OperationNotFoundException( - 'Empty operation "{0}" on interface "{1}" for {2} "{3}"' - .format(operation_name, interface_name, actor_type, actor.name)) - self.actor = actor - self.actor_type = actor_type - self.interface_name = interface_name - self.operation_name = operation_name - - self.name = OperationTask.NAME_FORMAT.format(type=actor_type, - name=actor.name, - interface=interface_name, - operation=operation_name) self.max_attempts = (self.workflow_context._task_max_attempts if max_attempts is None else max_attempts) self.retry_interval = (self.workflow_context._task_retry_interval if retry_interval is None else retry_interval) self.ignore_failure = (self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure) - self.implementation = operation.implementation - self.plugin = operation.plugin + self.interface_name = interface_name + self.operation_name = operation_name - # Wrap inputs - inputs = copy.deepcopy(inputs) if inputs else {} - for k, v in inputs.iteritems(): - if not isinstance(v, models.Parameter): - inputs[k] = models.Parameter.wrap(k, v) + operation = self.actor.interfaces[self.interface_name].operations[self.operation_name] + self.plugin = operation.plugin + self.inputs = modeling_utils.create_inputs(inputs or {}, operation.inputs) + self.implementation = operation.implementation + self.name = OperationTask.NAME_FORMAT.format(type=actor_type, + name=actor.name, + interface=self.interface_name, + operation=self.operation_name) - self.inputs = OrderedDict(operation.inputs) - if inputs: - self.inputs.update(inputs) - self.inputs = FrozenDict(self.inputs) + def __repr__(self): + return self.name @classmethod def for_node(cls, node, interface_name, operation_name, - inputs=None, max_attempts=None, retry_interval=None, - ignore_failure=None): + ignore_failure=None, + inputs=None): """ Creates an operation on a node. :param node: The node on which to run the operation :param interface_name: The interface name :param operation_name: The operation name within the interface - :param inputs: Override the operation's inputs :param max_attempts: The maximum number of attempts in case the operation fails - (if not specified the defaults is taken from the workflow context) + (if not specified the defaults it taken from the workflow context) :param retry_interval: The interval in seconds between attempts when the operation fails - (if not specified the defaults is taken from the workflow context) + (if not specified the defaults it taken from the workflow context) :param ignore_failure: Whether to ignore failures - (if not specified the defaults is taken from the workflow context) + (if not specified the defaults it taken from the workflow context) + :param inputs: Additional operation inputs """ assert isinstance(node, models.Node) return cls( actor=node, - actor_type='node', interface_name=interface_name, operation_name=operation_name, - inputs=inputs, max_attempts=max_attempts, retry_interval=retry_interval, - ignore_failure=ignore_failure) + ignore_failure=ignore_failure, + inputs=inputs) @classmethod def for_relationship(cls, relationship, interface_name, operation_name, - inputs=None, max_attempts=None, retry_interval=None, - ignore_failure=None): + ignore_failure=None, + inputs=None): """ - Creates an operation on a relationship. + Creates an operation on a relationship edge. :param relationship: The relationship on which to run the operation :param interface_name: The interface name :param operation_name: The operation name within the interface - :param inputs: Override the operation's inputs :param max_attempts: The maximum number of attempts in case the operation fails - (if not specified the defaults is taken from the workflow context) + (if not specified the defaults it taken from the workflow context) :param retry_interval: The interval in seconds between attempts when the operation fails - (if not specified the defaults is taken from the workflow context) + (if not specified the defaults it taken from the workflow context) :param ignore_failure: Whether to ignore failures - (if not specified the defaults is taken from the workflow context) + (if not specified the defaults it taken from the workflow context) + :param inputs: Additional operation inputs """ assert isinstance(relationship, models.Relationship) return cls( actor=relationship, - actor_type='relationship', interface_name=interface_name, operation_name=operation_name, - inputs=inputs, max_attempts=max_attempts, retry_interval=retry_interval, - ignore_failure=ignore_failure) + ignore_failure=ignore_failure, + inputs=inputs) class WorkflowTask(BaseTask): diff --git a/aria/orchestrator/workflows/builtin/__init__.py b/aria/orchestrator/workflows/builtin/__init__.py index d43a962f..8b13c627 100644 --- a/aria/orchestrator/workflows/builtin/__init__.py +++ b/aria/orchestrator/workflows/builtin/__init__.py @@ -24,6 +24,7 @@ BUILTIN_WORKFLOWS = ('install', 'uninstall', 'start', 'stop') +BUILTIN_WORKFLOWS_PATH_PREFIX = 'aria.orchestrator.workflows.builtin' __all__ = [ diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py index 348f47af..16504ece 100644 --- a/aria/orchestrator/workflows/builtin/execute_operation.py +++ b/aria/orchestrator/workflows/builtin/execute_operation.py @@ -17,7 +17,7 @@ Builtin execute_operation workflow """ -from ..api.task import OperationTask +from . import utils from ... import workflow @@ -28,7 +28,6 @@ def execute_operation( interface_name, operation_name, operation_kwargs, - allow_kwargs_override, run_by_dependency_order, type_names, node_template_ids, @@ -41,7 +40,6 @@ def execute_operation( :param TaskGraph graph: the graph which will describe the workflow. :param basestring operation: the operation name to execute :param dict operation_kwargs: - :param bool allow_kwargs_override: :param bool run_by_dependency_order: :param type_names: :param node_template_ids: @@ -71,8 +69,7 @@ def execute_operation( node=node, interface_name=interface_name, operation_name=operation_name, - operation_kwargs=operation_kwargs, - allow_kwargs_override=allow_kwargs_override + operation_kwargs=operation_kwargs ) ) @@ -108,21 +105,16 @@ def _create_node_task( node, interface_name, operation_name, - operation_kwargs, - allow_kwargs_override): + operation_kwargs): """ A workflow which executes a single operation :param node: the node instance to install :param basestring operation: the operation name :param dict operation_kwargs: - :param bool allow_kwargs_override: :return: """ - if allow_kwargs_override is not None: - operation_kwargs['allow_kwargs_override'] = allow_kwargs_override - - return OperationTask.for_node( + return utils.create_node_task( node=node, interface_name=interface_name, operation_name=operation_name, diff --git a/aria/orchestrator/workflows/builtin/utils.py b/aria/orchestrator/workflows/builtin/utils.py index 752fe351..2254d137 100644 --- a/aria/orchestrator/workflows/builtin/utils.py +++ b/aria/orchestrator/workflows/builtin/utils.py @@ -12,26 +12,31 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from ..api.task import OperationTask + +from ..api.task import OperationTask, StubTask from .. import exceptions -def create_node_task(node, interface_name, operation_name): +def create_node_task(node, interface_name, operation_name, **kwargs): """ Returns a new operation task if the operation exists in the node, otherwise returns None. """ try: + if _is_empty_task(node, interface_name, operation_name): + return StubTask() + return OperationTask.for_node(node=node, interface_name=interface_name, - operation_name=operation_name) + operation_name=operation_name, + **kwargs) except exceptions.OperationNotFoundException: # We will skip nodes which do not have the operation return None def create_relationships_tasks( - node, interface_name, source_operation_name=None, target_operation_name=None): + node, interface_name, source_operation_name=None, target_operation_name=None, **kwargs): """ Creates a relationship task (source and target) for all of a node_instance relationships. :param basestring source_operation_name: the relationship operation name. @@ -43,21 +48,18 @@ def create_relationships_tasks( """ sub_tasks = [] for relationship in node.outbound_relationships: - try: - relationship_operations = relationship_tasks( - relationship, - interface_name, - source_operation_name=source_operation_name, - target_operation_name=target_operation_name) - sub_tasks.append(relationship_operations) - except exceptions.OperationNotFoundException: - # We will skip relationships which do not have the operation - pass + relationship_operations = relationship_tasks( + relationship, + interface_name, + source_operation_name=source_operation_name, + target_operation_name=target_operation_name, + **kwargs) + sub_tasks.append(relationship_operations) return sub_tasks -def relationship_tasks( - relationship, interface_name, source_operation_name=None, target_operation_name=None): +def relationship_tasks(relationship, interface_name, source_operation_name=None, + target_operation_name=None, **kwargs): """ Creates a relationship task source and target. :param Relationship relationship: the relationship instance itself @@ -68,17 +70,33 @@ def relationship_tasks( """ operations = [] if source_operation_name: - operations.append( - OperationTask.for_relationship(relationship=relationship, - interface_name=interface_name, - operation_name=source_operation_name) - ) + try: + if _is_empty_task(relationship, interface_name, source_operation_name): + operations.append(StubTask()) + else: + operations.append( + OperationTask.for_relationship(relationship=relationship, + interface_name=interface_name, + operation_name=source_operation_name, + **kwargs) + ) + except exceptions.OperationNotFoundException: + # We will skip relationships which do not have the operation + pass if target_operation_name: - operations.append( - OperationTask.for_relationship(relationship=relationship, - interface_name=interface_name, - operation_name=target_operation_name) - ) + try: + if _is_empty_task(relationship, interface_name, target_operation_name): + operations.append(StubTask()) + else: + operations.append( + OperationTask.for_relationship(relationship=relationship, + interface_name=interface_name, + operation_name=target_operation_name, + **kwargs) + ) + except exceptions.OperationNotFoundException: + # We will skip relationships which do not have the operation + pass return operations @@ -106,3 +124,15 @@ def get_task(node_name): graph.add_dependency(dependency, task) else: graph.add_dependency(task, dependencies) + + +def _is_empty_task(actor, interface_name, operation_name): + interface = actor.interfaces.get(interface_name) + if interface: + operation = interface.operations.get(operation_name) + if operation: + return operation.implementation is None + + raise exceptions.OperationNotFoundException( + 'Could not find operation "{0}" on interface "{1}" for {2} "{3}"' + .format(operation_name, interface_name, type(actor).__name__.lower(), actor.name)) diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py index f73cade4..155d0ee1 100644 --- a/aria/orchestrator/workflows/core/engine.py +++ b/aria/orchestrator/workflows/core/engine.py @@ -88,12 +88,12 @@ def _is_cancel(self): def _executable_tasks(self): now = datetime.utcnow() return (task for task in self._tasks_iter() - if task.is_waiting and + if task.is_waiting() and task.due_at <= now and not self._task_has_dependencies(task)) def _ended_tasks(self): - return (task for task in self._tasks_iter() if task.has_ended) + return (task for task in self._tasks_iter() if task.has_ended()) def _task_has_dependencies(self, task): return len(self._execution_graph.pred.get(task.id, {})) > 0 @@ -105,7 +105,7 @@ def _tasks_iter(self): for _, data in self._execution_graph.nodes_iter(data=True): task = data['task'] if isinstance(task, engine_task.OperationTask): - if not task.model_task.has_ended: + if not task.model_task.has_ended(): self._workflow_context.model.task.refresh(task.model_task) yield task diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py index ba93e21e..2b26152c 100644 --- a/aria/orchestrator/workflows/core/task.py +++ b/aria/orchestrator/workflows/core/task.py @@ -69,11 +69,9 @@ def __init__(self, *args, **kwargs): self.status = models.Task.PENDING self.due_at = datetime.utcnow() - @property def has_ended(self): return self.status in (models.Task.SUCCESS, models.Task.FAILED) - @property def is_waiting(self): return self.status in (models.Task.PENDING, models.Task.RETRYING) diff --git a/aria/orchestrator/workflows/exceptions.py b/aria/orchestrator/workflows/exceptions.py index 0ca263f5..b5ae496b 100644 --- a/aria/orchestrator/workflows/exceptions.py +++ b/aria/orchestrator/workflows/exceptions.py @@ -16,6 +16,8 @@ """ Workflow related Exception classes """ +import os + from .. import exceptions @@ -52,10 +54,10 @@ def explanation(self): Describes the error in detail """ return ( - 'Command "{error.command}" executed with an error.\n' - 'code: {error.return_code}\n' - 'error: {error.stderr}\n' - 'output: {error.stdout}'.format(error=self)) + 'Command "{error.command}" executed with an error.{0}' + 'code: {error.return_code}{0}' + 'error: {error.stderr}{0}' + 'output: {error.stdout}'.format(os.linesep, error=self)) class AriaEngineError(exceptions.AriaError): diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py index baa03752..7bd9b7cf 100644 --- a/aria/orchestrator/workflows/executor/celery.py +++ b/aria/orchestrator/workflows/executor/celery.py @@ -44,7 +44,7 @@ def __init__(self, app, *args, **kwargs): def execute(self, task): self._tasks[task.id] = task - inputs = dict((k, v.value) for k, v in task.inputs.iteritems()) + inputs = dict(inp.unwrap() for inp in task.inputs.values()) inputs['ctx'] = task.context self._results[task.id] = self._app.send_task( task.operation_mapping, diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py new file mode 100644 index 00000000..d894b253 --- /dev/null +++ b/aria/orchestrator/workflows/executor/dry.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Dry executor +""" + +from datetime import datetime + +from .base import BaseExecutor + + +class DryExecutor(BaseExecutor): + """ + Executor which dry runs tasks - prints task information without causing any side effects + """ + + def execute(self, task): + # updating the task manually instead of calling self._task_started(task), + # to avoid any side effects raising that event might cause + with task._update(): + task.started_at = datetime.utcnow() + task.status = task.STARTED + + actor_type = type(task.actor).__name__.lower() + implementation = '{0} > '.format(task.plugin) if task.plugin else '' + implementation += task.implementation + inputs = dict(inp.unwrap() for inp in task.inputs.values()) + + task.context.logger.info( + 'Executing {actor_type} {task.actor.name} operation {task.interface_name} ' + '{task.operation_name}: {implementation} (Inputs: {inputs})' + .format(actor_type=actor_type, task=task, implementation=implementation, inputs=inputs)) + + # updating the task manually instead of calling self._task_succeeded(task), + # to avoid any side effects raising that event might cause + with task._update(): + task.ended_at = datetime.utcnow() + task.status = task.SUCCESS diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py index f814c4d6..851d78e8 100644 --- a/aria/orchestrator/workflows/executor/process.py +++ b/aria/orchestrator/workflows/executor/process.py @@ -148,7 +148,7 @@ def _create_arguments_dict(self, task): return { 'task_id': task.id, 'implementation': task.implementation, - 'operation_inputs': dict((k, v.value) for k, v in task.inputs.iteritems()), + 'operation_inputs': dict(inp.unwrap() for inp in task.inputs.values()), 'port': self._server_port, 'context': task.context.serialization_dict, } diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py index 1a49af54..f422592a 100644 --- a/aria/orchestrator/workflows/executor/thread.py +++ b/aria/orchestrator/workflows/executor/thread.py @@ -21,6 +21,7 @@ import threading from aria.utils import imports + from .base import BaseExecutor @@ -58,7 +59,7 @@ def _processor(self): self._task_started(task) try: task_func = imports.load_attribute(task.implementation) - inputs = dict((k, v.value) for k, v in task.inputs.iteritems()) + inputs = dict(inp.unwrap() for inp in task.inputs.values()) task_func(ctx=task.context, **inputs) self._task_succeeded(task) except BaseException as e: diff --git a/aria/parser/consumption/__init__.py b/aria/parser/consumption/__init__.py index 7da84903..8f6d2b6c 100644 --- a/aria/parser/consumption/__init__.py +++ b/aria/parser/consumption/__init__.py @@ -17,10 +17,21 @@ from .exceptions import ConsumerException from .context import ConsumptionContext from .style import Style -from .consumer import Consumer, ConsumerChain +from .consumer import ( + Consumer, + ConsumerChain +) from .presentation import Read from .validation import Validate -from .modeling import ServiceTemplate, Types, ServiceInstance +from .modeling import ( + ServiceTemplate, + Types, + ServiceInstance, + FindHosts, + ConfigureOperations, + SatisfyRequirements, + ValidateCapabilities +) from .inputs import Inputs __all__ = ( @@ -34,4 +45,7 @@ 'ServiceTemplate', 'Types', 'ServiceInstance', - 'Inputs') + 'Inputs', + 'SatisfyRequirements', + 'ValidateCapabilities' +) diff --git a/aria/parser/consumption/modeling.py b/aria/parser/consumption/modeling.py index 6c616b40..771fd7f3 100644 --- a/aria/parser/consumption/modeling.py +++ b/aria/parser/consumption/modeling.py @@ -106,7 +106,8 @@ def consume(self): 'template') return - self.context.modeling.template.instantiate(None) + self.context.modeling.template.instantiate(None, None, + inputs=dict(self.context.modeling.inputs)) class CoerceServiceInstanceValues(Consumer): diff --git a/aria/storage/core.py b/aria/storage/core.py index 8302fc93..8caca667 100644 --- a/aria/storage/core.py +++ b/aria/storage/core.py @@ -38,7 +38,7 @@ * StorageDriver - class, abstract model implementation. """ -from aria.logger import LoggerMixin +from aria.logger import LoggerMixin, NullHandler from . import sql_mapi __all__ = ( @@ -71,6 +71,10 @@ def __init__(self, :param kwargs: """ super(Storage, self).__init__(**kwargs) + # Set the logger handler of any storage object to NullHandler. + # This is since the absence of a handler shows up while using the CLI in the form of: + # `No handlers could be found for logger "aria.ResourceStorage"`. + self.logger.addHandler(NullHandler()) self.api = api_cls self.registered = {} self._initiator = initiator diff --git a/aria/storage/exceptions.py b/aria/storage/exceptions.py index f982f63b..3f0ececf 100644 --- a/aria/storage/exceptions.py +++ b/aria/storage/exceptions.py @@ -23,3 +23,7 @@ class StorageError(exceptions.AriaError): General storage exception """ pass + + +class NotFoundError(StorageError): + pass diff --git a/aria/storage/instrumentation.py b/aria/storage/instrumentation.py index 138432a5..cf2a3658 100644 --- a/aria/storage/instrumentation.py +++ b/aria/storage/instrumentation.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import copy import json @@ -189,9 +190,9 @@ def apply_tracked_changes(tracked_changes, model): if not value: del successfully_updated_changes[key] model.logger.error( - 'Registering all the changes to the storage has failed. \n' - 'The successful updates were: \n ' - '{0}'.format(json.dumps(successfully_updated_changes, indent=4))) + 'Registering all the changes to the storage has failed. {0}' + 'The successful updates were: {0} ' + '{1}'.format(os.linesep, json.dumps(successfully_updated_changes, indent=4))) raise diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py index 8d34bb4c..730d0076 100644 --- a/aria/storage/sql_mapi.py +++ b/aria/storage/sql_mapi.py @@ -59,7 +59,7 @@ def get(self, entry_id, include=None, **kwargs): result = query.first() if not result: - raise exceptions.StorageError( + raise exceptions.NotFoundError( 'Requested `{0}` with ID `{1}` was not found' .format(self.model_cls.__name__, entry_id) ) @@ -69,13 +69,13 @@ def get_by_name(self, entry_name, include=None, **kwargs): assert hasattr(self.model_cls, 'name') result = self.list(include=include, filters={'name': entry_name}) if not result: - raise exceptions.StorageError( - 'Requested {0} with NAME `{1}` was not found' + raise exceptions.NotFoundError( + 'Requested {0} with name `{1}` was not found' .format(self.model_cls.__name__, entry_name) ) elif len(result) > 1: raise exceptions.StorageError( - 'Requested {0} with NAME `{1}` returned more than 1 value' + 'Requested {0} with name `{1}` returned more than 1 value' .format(self.model_cls.__name__, entry_name) ) else: @@ -92,10 +92,8 @@ def list(self, results, total, size, offset = self._paginate(query, pagination) return ListResult( - items=results, - metadata=dict(total=total, - size=size, - offset=offset) + dict(total=total, size=size, offset=offset), + results ) def iter(self, @@ -406,19 +404,11 @@ def init_storage(base_dir, filename='db.sqlite'): return dict(engine=engine, session=session) -class ListResult(object): +class ListResult(list): """ a ListResult contains results about the requested items. """ - def __init__(self, items, metadata): - self.items = items + def __init__(self, metadata, *args, **qwargs): + super(ListResult, self).__init__(*args, **qwargs) self.metadata = metadata - - def __len__(self): - return len(self.items) - - def __iter__(self): - return iter(self.items) - - def __getitem__(self, item): - return self.items[item] + self.items = self diff --git a/aria/utils/application.py b/aria/utils/application.py deleted file mode 100644 index 2f408257..00000000 --- a/aria/utils/application.py +++ /dev/null @@ -1,294 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Convenience storage related tools. -# TODO rename module name -""" - -import json -import os -import shutil -import tarfile -import tempfile -from datetime import datetime - -from aria.storage.exceptions import StorageError -from aria.logger import LoggerMixin - - -class StorageManager(LoggerMixin): - """ - Convenience wrapper to simplify work with the lower level storage mechanism - """ - - def __init__( - self, - model_storage, - resource_storage, - blueprint_path, - blueprint_id, - blueprint_plan, - deployment_id, - deployment_plan, - **kwargs): - super(StorageManager, self).__init__(**kwargs) - self.model_storage = model_storage - self.resource_storage = resource_storage - self.blueprint_path = blueprint_path - self.blueprint_id = blueprint_id - self.blueprint_plan = blueprint_plan - self.deployment_id = deployment_id - self.deployment_plan = deployment_plan - - @classmethod - def from_deployment( - cls, - model_storage, - resource_storage, - deployment_id, - deployment_plan): - """ - Create a StorageManager from a deployment - """ - return cls( - model_storage=model_storage, - resource_storage=resource_storage, - deployment_id=deployment_id, - deployment_plan=deployment_plan, - blueprint_path=None, - blueprint_plan=None, - blueprint_id=None - ) - - @classmethod - def from_blueprint( - cls, - model_storage, - resource_storage, - blueprint_path, - blueprint_id, - blueprint_plan): - """ - Create a StorageManager from a blueprint - """ - return cls( - model_storage=model_storage, - resource_storage=resource_storage, - blueprint_path=blueprint_path, - blueprint_plan=blueprint_plan, - blueprint_id=blueprint_id, - deployment_id=None, - deployment_plan=None) - - def create_blueprint_storage(self, source, main_file_name=None): - """ - create blueprint model & resource - """ - assert self.blueprint_path and self.blueprint_id - assert hasattr(self.resource_storage, 'blueprint') - assert hasattr(self.model_storage, 'blueprint') - - self.logger.debug('creating blueprint resource storage entry') - self.resource_storage.service_template.upload( - entry_id=self.blueprint_id, - source=os.path.dirname(source)) - self.logger.debug('created blueprint resource storage entry') - - self.logger.debug('creating blueprint model storage entry') - now = datetime.utcnow() - blueprint = self.model_storage.service_template.model_cls( - plan=self.blueprint_plan, - id=self.blueprint_id, - description=self.blueprint_plan.get('description'), - created_at=now, - updated_at=now, - main_file_name=main_file_name, - ) - self.model_storage.service_template.put(blueprint) - self.logger.debug('created blueprint model storage entry') - - def create_nodes_storage(self): - """ - create nodes model - """ - assert self.blueprint_path and self.blueprint_id - assert hasattr(self.model_storage, 'node') - assert hasattr(self.model_storage, 'relationship') - - for node in self.blueprint_plan['nodes']: - node_copy = node.copy() - for field in ('name', - 'deployment_plugins_to_install', - 'interfaces', - 'instances'): - node_copy.pop(field) - scalable = node_copy.pop('capabilities')['scalable']['properties'] - for index, relationship in enumerate(node_copy['relationships']): - relationship = self.model_storage.relationship.model_cls(**relationship) - self.model_storage.relationship.put(relationship) - node_copy['relationships'][index] = relationship - - node_copy = self.model_storage.node.model_cls( - blueprint_id=self.blueprint_id, - planned_number_of_instances=scalable['current_instances'], - deploy_number_of_instances=scalable['default_instances'], - min_number_of_instances=scalable['min_instances'], - max_number_of_instances=scalable['max_instances'], - number_of_instances=scalable['current_instances'], - **node_copy) - self.model_storage.node.put(node_copy) - - def create_deployment_storage(self): - """ - create deployment model & resource - """ - assert self.deployment_id and self.deployment_plan - - assert hasattr(self.resource_storage, 'blueprint') - assert hasattr(self.resource_storage, 'deployment') - assert hasattr(self.model_storage, 'deployment') - - self.logger.debug('creating deployment resource storage entry') - temp_dir = tempfile.mkdtemp() - try: - self.resource_storage.service_template.download( - entry_id=self.blueprint_id, - destination=temp_dir) - self.resource_storage.service_instance.upload( - entry_id=self.deployment_id, - source=temp_dir) - finally: - shutil.rmtree(temp_dir, ignore_errors=True) - self.logger.debug('created deployment resource storage entry') - - self.logger.debug('creating deployment model storage entry') - now = datetime.utcnow() - deployment = self.model_storage.service_instance.model_cls( - id=self.deployment_id, - blueprint_id=self.blueprint_id, - description=self.deployment_plan['description'], - workflows=self.deployment_plan['workflows'], - inputs=self.deployment_plan['inputs'], - policy_types=self.deployment_plan['policy_types'], - policy_triggers=self.deployment_plan['policy_triggers'], - groups=self.deployment_plan['groups'], - scaling_groups=self.deployment_plan['scaling_groups'], - outputs=self.deployment_plan['outputs'], - created_at=now, - updated_at=now - ) - self.model_storage.service_instance.put(deployment) - self.logger.debug('created deployment model storage entry') - - def create_node_instances_storage(self): - """ - create node_instances model - """ - assert self.deployment_id and self.deployment_plan - assert hasattr(self.model_storage, 'node_instance') - assert hasattr(self.model_storage, 'relationship_instance') - - self.logger.debug('creating node-instances model storage entries') - for node_instance in self.deployment_plan['node_instances']: - node_model = self.model_storage.node.get(node_instance['node_id']) - relationship_instances = [] - - for index, relationship_instance in enumerate(node_instance['relationships']): - relationship_instance_model = self.model_storage.relationship.model_cls( - relationship=node_model.relationships[index], - target_name=relationship_instance['target_name'], - type=relationship_instance['type'], - target_id=relationship_instance['target_id']) - relationship_instances.append(relationship_instance_model) - self.model_storage.relationship.put(relationship_instance_model) - - node_instance_model = self.model_storage.node.model_cls( - node=node_model, - id=node_instance['id'], - runtime_properties={}, - state=self.model_storage.node.model_cls.UNINITIALIZED, - deployment_id=self.deployment_id, - version='1.0', - relationship_instances=relationship_instances) - - self.model_storage.node.put(node_instance_model) - self.logger.debug('created node-instances model storage entries') - - def create_plugin_storage(self, plugin_id, source): - """ - create plugin model & resource - """ - assert hasattr(self.model_storage, 'plugin') - assert hasattr(self.resource_storage, 'plugin') - - self.logger.debug('creating plugin resource storage entry') - self.resource_storage.plugin.upload(entry_id=plugin_id, source=source) - self.logger.debug('created plugin resource storage entry') - - self.logger.debug('creating plugin model storage entry') - plugin = _load_plugin_from_archive(source) - build_props = plugin.get('build_server_os_properties') - now = datetime.utcnow() - - plugin = self.model_storage.plugin.model_cls( - id=plugin_id, - package_name=plugin.get('package_name'), - package_version=plugin.get('package_version'), - archive_name=plugin.get('archive_name'), - package_source=plugin.get('package_source'), - supported_platform=plugin.get('supported_platform'), - distribution=build_props.get('distribution'), - distribution_version=build_props.get('distribution_version'), - distribution_release=build_props.get('distribution_release'), - wheels=plugin.get('wheels'), - excluded_wheels=plugin.get('excluded_wheels'), - supported_py_versions=plugin.get('supported_python_versions'), - uploaded_at=now - ) - self.model_storage.plugin.put(plugin) - self.logger.debug('created plugin model storage entry') - - -def _load_plugin_from_archive(tar_source): - if not tarfile.is_tarfile(tar_source): - # TODO: go over the exceptions - raise StorageError( - 'the provided tar archive can not be read.') - - with tarfile.open(tar_source) as tar: - tar_members = tar.getmembers() - # a wheel plugin will contain exactly one sub directory - if not tar_members: - raise StorageError( - 'archive file structure malformed. expecting exactly one ' - 'sub directory; got none.') - package_json_path = os.path.join(tar_members[0].name, - 'package.json') - try: - package_member = tar.getmember(package_json_path) - except KeyError: - raise StorageError("'package.json' was not found under {0}" - .format(package_json_path)) - try: - package_json = tar.extractfile(package_member) - except tarfile.ExtractError as e: - raise StorageError(str(e)) - try: - return json.load(package_json) - except ValueError as e: - raise StorageError("'package.json' is not a valid json: " - "{json_str}. error is {error}" - .format(json_str=package_json.read(), error=str(e))) diff --git a/aria/utils/archive.py b/aria/utils/archive.py new file mode 100644 index 00000000..63d9004b --- /dev/null +++ b/aria/utils/archive.py @@ -0,0 +1,63 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import tarfile +import zipfile +import tempfile +from contextlib import closing + + +def is_archive(source): + return tarfile.is_tarfile(source) or zipfile.is_zipfile(source) + + +def extract_archive(source): + if tarfile.is_tarfile(source): + return untar(source) + elif zipfile.is_zipfile(source): + return unzip(source) + raise ValueError( + 'Unsupported archive type provided or archive is not valid: {0}.'.format(source)) + + +def tar(source, destination): + with closing(tarfile.open(destination, 'w:gz')) as tar_archive: + tar_archive.add(source, arcname=os.path.basename(source)) + + +def untar(archive, destination=None): + if not destination: + destination = tempfile.mkdtemp() + with closing(tarfile.open(name=archive)) as tar_archive: + tar_archive.extractall(path=destination, members=tar_archive.getmembers()) + return destination + + +def zip(source, destination): + with closing(zipfile.ZipFile(destination, 'w')) as zip_file: + for root, _, files in os.walk(source): + for filename in files: + file_path = os.path.join(root, filename) + source_dir = os.path.dirname(source) + zip_file.write( + file_path, os.path.relpath(file_path, source_dir)) + return destination + + +def unzip(archive, destination=None): + if not destination: + destination = tempfile.mkdtemp() + with closing(zipfile.ZipFile(archive, 'r')) as zip_file: + zip_file.extractall(destination) + return destination diff --git a/aria/utils/exceptions.py b/aria/utils/exceptions.py index 9e3e80f5..b60cee43 100644 --- a/aria/utils/exceptions.py +++ b/aria/utils/exceptions.py @@ -15,6 +15,7 @@ import sys import linecache +import StringIO import traceback as tb import jsonpickle @@ -89,6 +90,16 @@ def _print_stack(frame): puts(line) +def get_exception_as_string(exc_type, exc_val, traceback): + s_traceback = StringIO.StringIO() + tb.print_exception( + etype=exc_type, + value=exc_val, + tb=traceback, + file=s_traceback) + return s_traceback.getvalue() + + class _WrappedException(Exception): def __init__(self, exception_type, exception_str): diff --git a/aria/utils/file.py b/aria/utils/file.py index b515f705..6d1aa164 100644 --- a/aria/utils/file.py +++ b/aria/utils/file.py @@ -15,6 +15,7 @@ import errno import os +import shutil def makedirs(path): @@ -26,3 +27,15 @@ def makedirs(path): except IOError as e: if e.errno != errno.EEXIST: raise + +def remove_if_exists(path): + + try: + if os.path.isfile(path): + os.remove(path) + if os.path.isdir(path): + shutil.rmtree(path) + + except OSError as e: + if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory + raise # re-raise exception if a different error occurred diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py index 8a223e9d..b5e141d1 100644 --- a/aria/utils/formatting.py +++ b/aria/utils/formatting.py @@ -83,6 +83,34 @@ def full_type_name(value): return name if module == '__builtin__' else '%s.%s' % (module, name) +def decode_list(data): + decoded_list = [] + for item in data: + if isinstance(item, unicode): + item = item.encode('utf-8') + elif isinstance(item, list): + item = decode_list(item) + elif isinstance(item, dict): + item = decode_dict(item) + decoded_list.append(item) + return decoded_list + + +def decode_dict(data): + decoded_dict = {} + for key, value in data.iteritems(): + if isinstance(key, unicode): + key = key.encode('utf-8') + if isinstance(value, unicode): + value = value.encode('utf-8') + elif isinstance(value, list): + value = decode_list(value) + elif isinstance(value, dict): + value = decode_dict(value) + decoded_dict[key] = value + return decoded_dict + + def safe_str(value): """ Like :code:`str` coercion, but makes sure that Unicode strings are properly diff --git a/aria/utils/http.py b/aria/utils/http.py new file mode 100644 index 00000000..7bdfd793 --- /dev/null +++ b/aria/utils/http.py @@ -0,0 +1,62 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tempfile + +import requests + + +def download_file(url, destination=None, logger=None, progress_handler=None): + """Download file. + + May raise IOError as well as requests.exceptions.RequestException + :param url: Location of the file to download + :type url: str + :param destination: + Location where the file should be saved (autogenerated by default) + :type destination: str | None + :returns: Location where the file was saved + :rtype: str + + """ + chunk_size = 1024 + + if not destination: + file_descriptor, destination = tempfile.mkstemp() + os.close(file_descriptor) + if logger: + logger.info('Downloading {0} to {1}...'.format(url, destination)) + + response = requests.get(url, stream=True) + final_url = response.url + if final_url != url and logger: + logger.debug('Redirected to {0}'.format(final_url)) + + read_bytes = 0 + total_size = int(response.headers['Content-Length']) \ + if 'Content-Length' in response.headers else None + try: + with open(destination, 'wb') as destination_file: + for chunk in response.iter_content(chunk_size): + destination_file.write(chunk) + if total_size and progress_handler: + # Only showing progress bar if we have the total content length + read_bytes += chunk_size + progress_handler(read_bytes, total_size) + finally: + response.close() + + return destination diff --git a/aria/utils/threading.py b/aria/utils/threading.py index b99250de..bfd30f5b 100644 --- a/aria/utils/threading.py +++ b/aria/utils/threading.py @@ -15,6 +15,7 @@ from __future__ import absolute_import # so we can import standard 'threading' +import sys import itertools import multiprocessing from threading import (Thread, Lock) @@ -255,3 +256,26 @@ def __enter__(self): def __exit__(self, the_type, value, traceback): return self.lock.__exit__(the_type, value, traceback) + + +class ExceptionThread(Thread): + """ + A thread from which top level exceptions can be retrieved or reraised + """ + def __init__(self, *args, **kwargs): + Thread.__init__(self, *args, **kwargs) + self.exception = None + + def run(self): + try: + super(ExceptionThread, self).run() + except BaseException: + self.exception = sys.exc_info() + + def is_error(self): + return self.exception is not None + + def raise_error_if_exists(self): + if self.is_error(): + type_, value, trace = self.exception + raise type_, value, trace diff --git a/aria/utils/type.py b/aria/utils/type.py new file mode 100644 index 00000000..dad54272 --- /dev/null +++ b/aria/utils/type.py @@ -0,0 +1,61 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def validate_value_type(value, type_name): + """ + Validate a value is of a specific type. + A ValueError will be raised on type mismatch. + Supports both python and yaml type names. + """ + + #TODO add timestamp type? + name_to_type = { + 'list': list, + 'dict': dict, + 'tuple': tuple, + 'str': str, + 'unicode': str, + 'string': str, + 'int': int, + 'integer': int, + 'bool': bool, + 'boolean': bool, + 'float': float + } + + type_ = name_to_type.get(type_name.lower()) + if type_ is None: + raise RuntimeError('No supported type_name was provided') + + if not isinstance(value, type_): + raise ValueError('Value {0} is not of type {1}'.format(value, type_name)) + + +def convert_value_to_type(str_value, type_name): + try: + if type_name.lower() in ['str', 'unicode']: + return str_value.decode('utf-8') + elif type_name.lower() == 'int': + return int(str_value) + elif type_name.lower() == 'bool': + return bool(str_value) + elif type_name.lower() == 'float': + return float(str_value) + else: + raise ValueError('No supported type_name was provided') + except ValueError: + raise ValueError('Trying to convert {0} to {1} failed'.format(str_value, + type_name)) diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml new file mode 100644 index 00000000..d0b08547 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml new file mode 100644 index 00000000..d0b08547 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml new file mode 100644 index 00000000..daca041f --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml @@ -0,0 +1,2 @@ +storage_location: /mnt +cpus: 4 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml new file mode 100644 index 00000000..18e457db --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml @@ -0,0 +1,2 @@ +storage_snapshot_id: "snapshot-id" +cpus: 4 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml new file mode 100644 index 00000000..d0b08547 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml new file mode 100644 index 00000000..d0b08547 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml new file mode 100644 index 00000000..c1ee88a0 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml @@ -0,0 +1 @@ +cpus: 4 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml new file mode 100644 index 00000000..5302bbf6 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml @@ -0,0 +1 @@ +my_cpus: 8 \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml new file mode 100644 index 00000000..9687bb06 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml @@ -0,0 +1 @@ +network_name: "network" \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml new file mode 100644 index 00000000..9687bb06 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml @@ -0,0 +1 @@ +network_name: "network" \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml new file mode 100644 index 00000000..9687bb06 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml @@ -0,0 +1 @@ +network_name: "network" \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml new file mode 100644 index 00000000..57f99a31 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml @@ -0,0 +1 @@ +objectstore_name: "objectstore" \ No newline at end of file diff --git a/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml new file mode 100644 index 00000000..c1ee88a0 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml @@ -0,0 +1 @@ +cpus: 4 \ No newline at end of file diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py index 0e9177fd..95762600 100644 --- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py +++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py @@ -19,6 +19,7 @@ Relies on many helper methods in the presentation classes. """ +import os import re from types import FunctionType from datetime import datetime @@ -41,7 +42,7 @@ def create_service_template_model(context): # pylint: disable=too-many-locals,too-many-branches model = ServiceTemplate(created_at=datetime.now(), - main_file_name=str(context.presentation.location)) + main_file_name=os.path.basename(str(context.presentation.location))) model.description = context.presentation.get('service_template', 'description', 'value') diff --git a/requirements.in b/requirements.in index bc27479b..39501406 100644 --- a/requirements.in +++ b/requirements.in @@ -10,6 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +# In order to create the requirements.txt file, execute +# pip-compile --output-file requirements.txt requirements.in (pip-tools package is needed). + PyYAML<3.13 requests>=2.3.0, <2.14.0 networkx>=1.9, <1.10 # version 1.10 dropped support of python 2.6 @@ -25,6 +28,12 @@ SQLAlchemy>=1.1.0, <1.2 # version 1.2 dropped support of python 2.6 wagon==0.6.0 bottle>=0.12.0, <0.13 Fabric>=1.13.0, <1.14 +click>=4.1, < 5.0 +colorama>=0.3.3, < 0.3.5 +PrettyTable>=0.7,<0.8 +click_didyoumean==0.0.3 +backports.shutil_get_terminal_size==1.0.0 +logutils==0.3.4.1 # Since the tool we are using to generate our requirements.txt, `pip-tools`, # does not currently support conditional dependencies (;), we're adding our original diff --git a/requirements.txt b/requirements.txt index 901aa751..3accaa35 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,28 +4,30 @@ # # pip-compile --output-file requirements.txt requirements.in # - -# ---------------------------------------------------------------------------------- # Since the tool we are using to generate our requirements.txt, `pip-tools`, # does not currently support conditional dependencies (;), we're adding our original -# conditional dependencies here manually. +# conditional dependencies here as comments, and manually adding them to our +# generated requirements.txt file. # The relevant pip-tools issue: https://github.com/jazzband/pip-tools/issues/435 -importlib==1.0.4 ; python_version < '2.7' -ordereddict==1.1 ; python_version < '2.7' -total-ordering==0.1.0 ; python_version < '2.7' +importlib ; python_version < '2.7' +ordereddict ; python_version < '2.7' +total-ordering ; python_version < '2.7' # only one version on pypi # Fabric makes use of this library, but doesn't bring it :( pypiwin32==219 ; sys_platform == 'win32' # ---------------------------------------------------------------------------------- -appdirs==1.4.3 # via setuptools args==0.1.0 # via clint asn1crypto==0.22.0 # via cryptography +backports.shutil_get_terminal_size==1.0.0 blinker==1.4 bottle==0.12.13 cachecontrol[filecache]==0.12.1 cffi==1.10.0 # via cryptography +click==4.1 +click_didyoumean==0.0.3 clint==0.5.1 +colorama==0.3.4 cryptography==1.8.1 # via paramiko decorator==4.0.11 # via networkx enum34==1.1.6 # via cryptography @@ -35,11 +37,13 @@ ipaddress==1.0.18 # via cryptography jinja2==2.8.1 jsonpickle==0.9.4 lockfile==0.12.2 # via cachecontrol +logutils==0.3.4.1 markupsafe==1.0 # via jinja2 msgpack-python==0.4.8 # via cachecontrol networkx==1.9.1 -packaging==16.8 # via cryptography, setuptools +packaging==16.8 # via cryptography paramiko==2.1.2 # via fabric +prettytable==0.7.2 pyasn1==0.2.3 # via paramiko pycparser==2.17 # via cffi pyparsing==2.2.0 # via packaging @@ -49,7 +53,7 @@ retrying==1.3.3 ruamel.ordereddict==0.4.9 # via ruamel.yaml ruamel.yaml==0.11.15 shortuuid==0.5.0 -six==1.10.0 # via cryptography, packaging, retrying, setuptools +six==1.10.0 # via cryptography, packaging, retrying sqlalchemy==1.1.6 wagon==0.6.0 wheel==0.29.0 # via wagon diff --git a/setup.py b/setup.py index 3d72ebc2..b64453aa 100644 --- a/setup.py +++ b/setup.py @@ -61,7 +61,7 @@ extras_require = {} -console_scripts = ['aria = aria.cli.cli:main'] +console_scripts = ['aria = aria.cli.main:main'] def _generate_user_options(command): diff --git a/tests/.pylintrc b/tests/.pylintrc index 06409e95..eead6e84 100644 --- a/tests/.pylintrc +++ b/tests/.pylintrc @@ -77,7 +77,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,no-self-use,missing-docstring,attribute-defined-outside-init,redefined-outer-name,import-error,redefined-variable-type,broad-except,protected-access,global-statement,too-many-locals,abstract-method,no-member +disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,no-self-use,missing-docstring,attribute-defined-outside-init,redefined-outer-name,import-error,redefined-variable-type,broad-except,protected-access,global-statement,too-many-locals,abstract-method,no-member,unused-argument [REPORTS] diff --git a/tests/cli/__init__.py b/tests/cli/__init__.py new file mode 100644 index 00000000..ae1e83ee --- /dev/null +++ b/tests/cli/__init__.py @@ -0,0 +1,14 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/cli/base_test.py b/tests/cli/base_test.py new file mode 100644 index 00000000..da9d72c5 --- /dev/null +++ b/tests/cli/base_test.py @@ -0,0 +1,77 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from StringIO import StringIO + +import pytest + +from . import runner +from . import utils + + +@pytest.fixture +def mock_storage(): + return utils.MockStorage() + + +@pytest.mark.usefixtures("redirect_logger") +class TestCliBase(object): + + @staticmethod + @pytest.fixture(scope="class") + def redirect_logger(): + + utils.setup_logger(logger_name='aria.cli.main', + handlers=[logging.StreamHandler(TestCliBase._logger_output)], + logger_format='%(message)s') + yield + utils.setup_logger(logger_name='aria.cli.main', + handlers=_default_logger_config['handlers'], + level=_default_logger_config['level']) + + _logger_output = StringIO() + + def invoke(self, command): + self._logger_output.truncate(0) + return runner.invoke(command) + + @property + def logger_output_string(self): + return self._logger_output.getvalue() + + +def assert_exception_raised(outcome, expected_exception, expected_msg=''): + assert isinstance(outcome.exception, expected_exception) + assert expected_msg in str(outcome.exception) + + +# This exists as I wanted to mocked a function using monkeypatch to return a function that raises an +# exception. I tried doing that using a lambda in-place, but this can't be accomplished in a trivial +# way it seems. So I wrote this silly function instead +def raise_exception(exception, msg=''): + + def inner(*args, **kwargs): + raise exception(msg) + + return inner + + +def get_default_logger_config(): + logger = logging.getLogger('aria.cli.main') + return {'handlers': logger.handlers, + 'level': logger.level} + +_default_logger_config = get_default_logger_config() diff --git a/tests/cli/runner.py b/tests/cli/runner.py new file mode 100644 index 00000000..7e4243b9 --- /dev/null +++ b/tests/cli/runner.py @@ -0,0 +1,27 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click.testing + +import aria.cli.commands as commands + + +def invoke(command_string): + command_list = command_string.split() + command, sub, args = command_list[0], command_list[1], command_list[2:] + runner = click.testing.CliRunner() + outcome = runner.invoke(getattr( + getattr(commands, command), sub), args) + return outcome diff --git a/tests/cli/test_node_templates.py b/tests/cli/test_node_templates.py new file mode 100644 index 00000000..ff7ff287 --- /dev/null +++ b/tests/cli/test_node_templates.py @@ -0,0 +1,133 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from mock import ANY, MagicMock + +from aria.cli.env import _Environment + +from .base_test import ( # pylint: disable=unused-import + TestCliBase, + mock_storage +) +from ..mock import models as mock_models + + +class TestNodeTemplatesShow(TestCliBase): + + def test_header_strings(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates show 1') + assert 'Showing node template 1' in self.logger_output_string + assert 'Node template properties:' in self.logger_output_string + assert 'Nodes:' in self.logger_output_string + + def test_no_properties_no_nodes(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates show 1') + + assert 'No properties' in self.logger_output_string + assert 'prop1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + assert 'No nodes' in self.logger_output_string + assert mock_models.NODE_NAME not in self.logger_output_string + + def test_one_property_no_nodes(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + m = MagicMock(return_value=mock_models.create_node_template_with_dependencies( + include_property=True)) + monkeypatch.setattr(mock_storage.node_template, 'get', m) + self.invoke('node_templates show 2') + assert 'No properties' not in self.logger_output_string + assert 'prop1' in self.logger_output_string and 'value1' in self.logger_output_string + assert 'No nodes' in self.logger_output_string + assert mock_models.NODE_NAME not in self.logger_output_string + + def test_no_properties_one_node(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + m = MagicMock(return_value=mock_models.create_node_template_with_dependencies( + include_node=True)) + monkeypatch.setattr(mock_storage.node_template, 'get', m) + self.invoke('node_templates show 3') + assert 'No properties' in self.logger_output_string + assert 'prop1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + assert 'No nodes' not in self.logger_output_string + assert mock_models.NODE_NAME in self.logger_output_string + + def test_one_property_one_node(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + m = MagicMock(return_value=mock_models.create_node_template_with_dependencies( + include_node=True, include_property=True)) + monkeypatch.setattr(mock_storage.node_template, 'get', m) + self.invoke('node_templates show 4') + assert 'No properties' not in self.logger_output_string + assert 'prop1' in self.logger_output_string and 'value1' in self.logger_output_string + assert 'No nodes' not in self.logger_output_string + assert mock_models.NODE_NAME in self.logger_output_string + + +class TestNodeTemplatesList(TestCliBase): + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_template_name', 'asc'), + ('', ' --descending', 'service_template_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates list -t {service_template_name}{sort_by}{order}' + .format(service_template_name=mock_models.SERVICE_TEMPLATE_NAME, + sort_by=sort_by, + order=order)) + assert 'Listing node templates for service template {name}...'\ + .format(name=mock_models.SERVICE_TEMPLATE_NAME) in self.logger_output_string + assert 'Listing all node templates...' not in self.logger_output_string + + node_templates_list = mock_storage.node_template.list + node_templates_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={'service_template': ANY}) + assert 'Node templates:' in self.logger_output_string + assert mock_models.SERVICE_TEMPLATE_NAME in self.logger_output_string + assert mock_models.NODE_TEMPLATE_NAME in self.logger_output_string + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_template_name', 'asc'), + ('', ' --descending', 'service_template_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_no_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates list{sort_by}{order}'.format(sort_by=sort_by, order=order)) + assert 'Listing all node templates...' in self.logger_output_string + assert 'Listing node templates for service template {name}...'\ + .format(name=mock_models.SERVICE_TEMPLATE_NAME) not in self.logger_output_string + + node_templates_list = mock_storage.node_template.list + node_templates_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={}) + assert 'Node templates:' in self.logger_output_string + assert mock_models.SERVICE_TEMPLATE_NAME in self.logger_output_string + assert mock_models.NODE_TEMPLATE_NAME in self.logger_output_string diff --git a/tests/cli/test_nodes.py b/tests/cli/test_nodes.py new file mode 100644 index 00000000..0233989f --- /dev/null +++ b/tests/cli/test_nodes.py @@ -0,0 +1,101 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import mock + +from aria.cli.env import _Environment + +from .base_test import ( # pylint: disable=unused-import + TestCliBase, + mock_storage +) +from ..mock import models as mock_models + + +class TestNodesShow(TestCliBase): + + def test_header_strings(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes show 1') + assert 'Showing node 1' in self.logger_output_string + assert 'Node:' in self.logger_output_string + assert 'Node attributes:' in self.logger_output_string + + def test_no_attributes(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes show 2') + assert 'No attributes' in self.logger_output_string + assert 'attribute1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + + def test_one_attribute(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + m = mock.MagicMock( + return_value=mock_models.create_node_with_dependencies(include_attribute=True)) + monkeypatch.setattr(mock_storage.node, 'get', m) + self.invoke('nodes show 3') + assert 'No attributes' not in self.logger_output_string + assert 'attribute1' in self.logger_output_string + assert 'value1' in self.logger_output_string + + +class TestNodesList(TestCliBase): + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_name', 'asc'), + ('', ' --descending', 'service_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_specified_service(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes list -s test_s{sort_by}{order}'.format(sort_by=sort_by, + order=order)) + assert 'Listing nodes for service test_s...' in self.logger_output_string + assert 'Listing all nodes...' not in self.logger_output_string + + nodes_list = mock_storage.node.list + nodes_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={'service': mock.ANY}) + assert 'Nodes:' in self.logger_output_string + assert 'test_s' in self.logger_output_string + assert 'test_n' in self.logger_output_string + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_name', 'asc'), + ('', ' --descending', 'service_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_no_specified_service(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes list{sort_by}{order}'.format(sort_by=sort_by, + order=order)) + assert 'Listing nodes for service test_s...' not in self.logger_output_string + assert 'Listing all nodes...' in self.logger_output_string + + nodes_list = mock_storage.node.list + nodes_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={}) + assert 'Nodes:' in self.logger_output_string + assert 'test_s' in self.logger_output_string + assert 'test_n' in self.logger_output_string diff --git a/tests/cli/test_service_templates.py b/tests/cli/test_service_templates.py new file mode 100644 index 00000000..01b3f678 --- /dev/null +++ b/tests/cli/test_service_templates.py @@ -0,0 +1,246 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import mock + +from aria.cli import service_template_utils, csar +from aria.cli.env import _Environment +from aria.core import Core +from aria.exceptions import AriaException +from aria.storage import exceptions as storage_exceptions + +from .base_test import ( # pylint: disable=unused-import + TestCliBase, + assert_exception_raised, + raise_exception, + mock_storage +) +from ..mock import models as mock_models + + +class TestServiceTemplatesShow(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates show test_st') + assert 'Showing service template test_st...' in self.logger_output_string + + def test_no_services_no_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates show test_st') + + assert 'Description:' not in self.logger_output_string + assert 'Existing services:' not in self.logger_output_string + + def test_no_services_yes_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + st = mock_models.create_service_template(description='test_description') + monkeypatch.setattr(mock_storage.service_template, 'get_by_name', + mock.MagicMock(return_value=st)) + + self.invoke('service_templates show test_st') + assert 'Description:' in self.logger_output_string + assert 'test_description' in self.logger_output_string + assert 'Existing services:' not in self.logger_output_string + + def test_one_service_no_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + st = mock_models.create_service_template() + st.services = [mock_models.create_service(st)] + monkeypatch.setattr(mock_storage.service_template, 'get_by_name', + mock.MagicMock(return_value=st)) + + self.invoke('service_templates show test_st') + + assert 'Description:' not in self.logger_output_string + assert 'Existing services:' in self.logger_output_string + assert mock_models.SERVICE_NAME in self.logger_output_string + + def test_one_service_yes_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + st = mock_models.create_service_template(description='test_description') + st.services = [mock_models.create_service(st)] + monkeypatch.setattr(mock_storage.service_template, 'get_by_name', + mock.MagicMock(return_value=st)) + + self.invoke('service_templates show test_st') + + assert 'Description:' in self.logger_output_string + assert 'test_description' in self.logger_output_string + assert 'Existing services:' in self.logger_output_string + assert 'test_s' in self.logger_output_string + + +class TestServiceTemplatesList(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates list') + assert 'Listing all service templates...' in self.logger_output_string + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'created_at', 'asc'), + ('', ' --descending', 'created_at', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_all_sorting_combinations(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates list{sort_by}{order}'.format(sort_by=sort_by, order=order)) + + mock_storage.service_template.list.assert_called_with( + sort={sort_by_in_output: order_in_output}) + assert mock_models.SERVICE_TEMPLATE_NAME in self.logger_output_string + + +class TestServiceTemplatesStore(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates store stubpath test_st') + assert 'Storing service template test_st...' in self.logger_output_string + + def test_store_no_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(Core, 'create_service_template', mock_object) + monkeypatch.setattr(service_template_utils, 'get', mock_object) + self.invoke('service_templates store stubpath {name}'.format( + name=mock_models.SERVICE_TEMPLATE_NAME)) + assert 'Service template {name} stored'.format( + name=mock_models.SERVICE_TEMPLATE_NAME) in self.logger_output_string + + def test_store_raises_exception_resulting_from_name_uniqueness(self, monkeypatch, mock_object): + + monkeypatch.setattr(service_template_utils, 'get', mock_object) + monkeypatch.setattr(Core, + 'create_service_template', + raise_exception(storage_exceptions.NotFoundError, + msg='UNIQUE constraint failed')) + + assert_exception_raised( + self.invoke('service_templates store stubpath test_st'), + expected_exception=storage_exceptions.NotFoundError, + expected_msg='There already a exists a service template with the same name') + + def test_store_raises_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(service_template_utils, 'get', mock_object) + monkeypatch.setattr(Core, + 'create_service_template', + raise_exception(storage_exceptions.NotFoundError)) + + assert_exception_raised( + self.invoke('service_templates store stubpath test_st'), + expected_exception=storage_exceptions.StorageError) + + +class TestServiceTemplatesDelete(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates delete test_st') + assert 'Deleting service template test_st...' in self.logger_output_string + + def test_delete_no_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, 'delete_service_template', mock_object) + self.invoke('service_templates delete {name}'.format( + name=mock_models.SERVICE_TEMPLATE_NAME)) + assert 'Service template {name} deleted'.format( + name=mock_models.SERVICE_TEMPLATE_NAME) in self.logger_output_string + + def test_delete_raises_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, + 'delete_service_template', + raise_exception(storage_exceptions.StorageError)) + + assert_exception_raised( + self.invoke('service_templates delete test_st'), + expected_exception=storage_exceptions.StorageError, + expected_msg='') + + +class TestServiceTemplatesInputs(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates inputs test_st') + assert 'Showing inputs for service template test_st...' in self.logger_output_string + + def test_inputs_existing_inputs(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + input = mock_models.create_parameter(name='input1', value='value1') + st = mock_models.create_service_template(inputs={'input1': input}) + monkeypatch.setattr(mock_storage.service_template, 'get_by_name', + mock.MagicMock(return_value=st)) + + self.invoke('service_templates inputs with_inputs') + assert 'input1' in self.logger_output_string and 'value1' in self.logger_output_string + + def test_inputs_no_inputs(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates inputs without_inputs') + assert 'No inputs' in self.logger_output_string + + +class TestServiceTemplatesValidate(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates validate stubpath') + assert 'Validating service template: stubpath' in self.logger_output_string + + def test_validate_no_exception(self, monkeypatch, mock_object): + monkeypatch.setattr(Core, 'validate_service_template', mock_object) + monkeypatch.setattr(service_template_utils, 'get', mock_object) + self.invoke('service_templates validate stubpath') + assert 'Service template validated successfully' in self.logger_output_string + + def test_validate_raises_exception(self, monkeypatch, mock_object): + monkeypatch.setattr(Core, 'validate_service_template', raise_exception(AriaException)) + monkeypatch.setattr(service_template_utils, 'get', mock_object) + assert_exception_raised( + self.invoke('service_templates validate stubpath'), + expected_exception=AriaException) + + +class TestServiceTemplatesCreateArchive(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates create_archive stubpath stubdest') + assert 'Creating a csar archive' in self.logger_output_string + + def test_create_archive_successful(self, monkeypatch, mock_object): + monkeypatch.setattr(csar, 'write', mock_object) + self.invoke('service_templates create_archive stubpath stubdest') + assert 'Csar archive created at stubdest' in self.logger_output_string diff --git a/tests/cli/test_services.py b/tests/cli/test_services.py new file mode 100644 index 00000000..b1a6ee4e --- /dev/null +++ b/tests/cli/test_services.py @@ -0,0 +1,205 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import mock + +from aria.cli.env import _Environment +from aria.core import Core +from aria.exceptions import DependentActiveExecutionsError, DependentAvailableNodesError +from aria.modeling.exceptions import InputsException +from aria.storage import exceptions as storage_exceptions + +from .base_test import ( # pylint: disable=unused-import + TestCliBase, + raise_exception, + assert_exception_raised, + mock_storage +) +from ..mock import models as mock_models + + +class TestServicesList(TestCliBase): + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'created_at', 'asc'), + ('', ' --descending', 'created_at', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_no_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services list{sort_by}{order}'.format(sort_by=sort_by, order=order)) + assert 'Listing all services...' in self.logger_output_string + assert 'Listing services for service template' not in self.logger_output_string + + mock_storage.service.list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={}) + assert 'Services:' in self.logger_output_string + assert mock_models.SERVICE_TEMPLATE_NAME in self.logger_output_string + assert mock_models.SERVICE_NAME in self.logger_output_string + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'created_at', 'asc'), + ('', ' --descending', 'created_at', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services list -t test_st{sort_by}{order}'.format(sort_by=sort_by, order=order)) + assert 'Listing services for service template test_st...' in self.logger_output_string + assert 'Listing all services...' not in self.logger_output_string + + mock_storage.service.list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={'service_template': mock.ANY}) + assert 'Services:' in self.logger_output_string + assert mock_models.SERVICE_TEMPLATE_NAME in self.logger_output_string + assert mock_models.SERVICE_NAME in self.logger_output_string + + +class TestServicesCreate(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services create -t test_st test_s') + assert 'Creating new service from service template test_st...' in self.logger_output_string + + def test_no_exception(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + + m = mock.MagicMock(return_value=mock_models.create_service_with_dependencies()) + monkeypatch.setattr(Core, 'create_service', m) + self.invoke('services create -t test_st test_s') + assert "Service created. The service's name is test_s" in self.logger_output_string + + def test_raises_storage_error_resulting_from_name_uniqueness(self, monkeypatch, + mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + monkeypatch.setattr(Core, + 'create_service', + raise_exception(storage_exceptions.NotFoundError, + msg='UNIQUE constraint failed')) + assert_exception_raised( + self.invoke('services create -t test_st test_s'), + expected_exception=storage_exceptions.NotFoundError, + expected_msg='There already a exists a service with the same name') + + assert "Service created. The service's name is test_s" not in self.logger_output_string + + def test_raises_other_storage_error(self, monkeypatch, mock_object): + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, + 'create_service', + raise_exception(storage_exceptions.NotFoundError)) + + assert_exception_raised( + self.invoke('services create -t test_st test_s'), + expected_exception=storage_exceptions.NotFoundError) + + assert "Service created. The service's name is test_s" not in self.logger_output_string + + def test_raises_inputs_exception(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + monkeypatch.setattr(Core, + 'create_service', + raise_exception(InputsException)) + + assert_exception_raised( + self.invoke('services create -t with_inputs test_s'), + expected_exception=InputsException) + + assert "Service created. The service's name is test_s" not in self.logger_output_string + + +class TestServicesDelete(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services delete test_s') + assert 'Deleting service test_s...' in self.logger_output_string + + def test_delete_no_exception(self, monkeypatch, mock_storage, mock_object): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + monkeypatch.setattr(Core, 'delete_service', mock_object) + self.invoke('services delete test_s') + assert 'Service test_s deleted' in self.logger_output_string + + def test_delete_active_execution_error(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + mock_service_with_execution = \ + mock.MagicMock(return_value=mock_models.create_service_with_dependencies( + include_execution=True)) + monkeypatch.setattr(mock_storage.service, 'get', mock_service_with_execution) + assert_exception_raised( + self.invoke('services delete test_s'), + expected_exception=DependentActiveExecutionsError, + expected_msg="Can't delete service {name} - there is an active execution " + "for this service. Active execution id: 1".format( + name=mock_models.SERVICE_NAME)) + + def test_delete_available_nodes_error(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + assert_exception_raised( + self.invoke('services delete test_s'), + expected_exception=DependentAvailableNodesError, + expected_msg="Can't delete service {name} - there are available nodes " + "for this service. Available node ids: 1".format( + name=mock_models.SERVICE_NAME)) + + def test_delete_available_nodes_error_with_force(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services delete service_with_available_nodes --force') + + assert mock_storage.service.delete.call_count == 1 + assert 'Service service_with_available_nodes deleted' in self.logger_output_string + + +class TestServicesOutputs(TestCliBase): + pass + + +class TestServicesInputs(TestCliBase): + + def test_header_string(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services inputs test_s') + assert 'Showing inputs for service test_s...' in self.logger_output_string + + def test_inputs_no_inputs(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services inputs service_with_no_inputs') + + assert 'No inputs' in self.logger_output_string + assert 'input1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + + def test_inputs_one_input(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + s = mock_models.create_service_with_dependencies(include_input=True) + monkeypatch.setattr(mock_storage.service, 'get_by_name', mock.MagicMock(return_value=s)) + + self.invoke('services inputs test_s') + + assert 'input1' in self.logger_output_string + assert 'value1' in self.logger_output_string + assert 'No inputs' not in self.logger_output_string diff --git a/tests/cli/utils.py b/tests/cli/utils.py new file mode 100644 index 00000000..a1e0c9a9 --- /dev/null +++ b/tests/cli/utils.py @@ -0,0 +1,101 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from mock import MagicMock + +from ..mock import models as mock_models + + +def setup_logger(logger_name, + level=logging.INFO, + handlers=None, + remove_existing_handlers=True, + logger_format=None, + propagate=True): + """ + :param logger_name: Name of the logger. + :param level: Level for the logger (not for specific handler). + :param handlers: An optional list of handlers (formatter will be + overridden); If None, only a StreamHandler for + sys.stdout will be used. + :param remove_existing_handlers: Determines whether to remove existing + handlers before adding new ones + :param logger_format: the format this logger will have. + :param propagate: propagate the message the parent logger. + :return: A logger instance. + :rtype: logging.Logger + """ + + logger = logging.getLogger(logger_name) + + if remove_existing_handlers: + for handler in logger.handlers: + logger.removeHandler(handler) + + for handler in handlers: + if logger_format: + formatter = logging.Formatter(fmt=logger_format) + handler.setFormatter(formatter) + logger.addHandler(handler) + + logger.setLevel(level) + if not propagate: + logger.propagate = False + + return logger + + +class MockStorage(object): + + def __init__(self): + self.service_template = MockServiceTemplateStorage() + self.service = MockServiceStorage() + self.node_template = MockNodeTemplateStorage() + self.node = MockNodeStorage() + + +class MockServiceTemplateStorage(object): + + def __init__(self): + self.list = MagicMock(return_value=[mock_models.create_service_template()]) + self.get_by_name = MagicMock(return_value=mock_models.create_service_template()) + + +class MockServiceStorage(object): + + def __init__(self): + + self.s = mock_models.create_service_with_dependencies() + + self.list = MagicMock(return_value=[self.s]) + self.create = MagicMock(return_value=self.s) + self.get = MagicMock( + return_value=mock_models.create_service_with_dependencies(include_node=True)) + self.get_by_name = MagicMock(return_value=self.s) + self.delete = MagicMock() + + +class MockNodeTemplateStorage(object): + def __init__(self): + self.get = MagicMock(return_value=mock_models.create_node_template_with_dependencies()) + self.list = MagicMock(return_value=[mock_models.create_node_template_with_dependencies()]) + + +class MockNodeStorage(object): + def __init__(self): + self.get = MagicMock(return_value=mock_models.create_node_with_dependencies()) + self.list = MagicMock(return_value=[mock_models.create_node_with_dependencies()]) diff --git a/tests/conftest.py b/tests/conftest.py index c501eeb4..8f2c273f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,6 +18,7 @@ import pytest import aria +from aria import logger @pytest.fixture(scope='session', autouse=True) @@ -37,11 +38,10 @@ def logging_handler_cleanup(request): :return: """ def clear_logging_handlers(): - logged_ctx_names = [ - aria.orchestrator.context.workflow.WorkflowContext.__name__, - aria.orchestrator.context.operation.NodeOperationContext.__name__, - aria.orchestrator.context.operation.RelationshipOperationContext.__name__ - ] - for logger_name in logged_ctx_names: - logging.getLogger(logger_name).handlers = [] + logging.getLogger(logger.TASK_LOGGER_NAME).handlers = [] request.addfinalizer(clear_logging_handlers) + + +@pytest.fixture +def mock_object(mocker): + return mocker.MagicMock() diff --git a/tests/end2end/test_orchestrator.py b/tests/end2end/test_orchestrator.py deleted file mode 100644 index 4dfca446..00000000 --- a/tests/end2end/test_orchestrator.py +++ /dev/null @@ -1,63 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import os - -from aria.orchestrator.runner import Runner -from aria.orchestrator.workflows.builtin import BUILTIN_WORKFLOWS -from aria.utils.imports import import_fullname -from aria.utils.collections import OrderedDict -from aria.cli.dry import convert_to_dry - -from tests.parser.service_templates import consume_node_cellar - - -WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies') - - -def test_install(): - _workflow('install') - - -def test_custom(): - _workflow('maintenance_on') - - -def _workflow(workflow_name): - context, _ = consume_node_cellar() - - convert_to_dry(context.modeling.instance) - - # TODO: this logic will eventually stabilize and be part of the ARIA API, - # likely somewhere in aria.orchestrator.workflows - if workflow_name in BUILTIN_WORKFLOWS: - workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.' + workflow_name) - inputs = {} - else: - workflow = context.modeling.instance.policies[workflow_name] - sys.path.append(os.path.dirname(str(context.presentation.location))) - workflow_fn = import_fullname(workflow.properties['implementation'].value) - inputs = OrderedDict([ - (k, v.value) for k, v in workflow.properties.iteritems() - if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES - ]) - - def _initialize_storage(model_storage): - context.modeling.store(model_storage) - - runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage, - lambda: context.modeling.instance.id) - runner.run() diff --git a/tests/fixtures.py b/tests/fixtures.py new file mode 100644 index 00000000..3b1b9b59 --- /dev/null +++ b/tests/fixtures.py @@ -0,0 +1,70 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import shutil + +import pytest + +from aria import ( + application_model_storage, + application_resource_storage +) +from aria.orchestrator import plugin +from aria.storage import ( + sql_mapi, + filesystem_rapi +) + +from . import storage + + +@pytest.fixture +def inmemory_model(): + model = application_model_storage(sql_mapi.SQLAlchemyModelAPI, + initiator=storage.init_inmemory_model_storage) + yield model + storage.release_sqlite_storage(model) + + +@pytest.fixture +def fs_model(tmpdir): + result = application_model_storage(sql_mapi.SQLAlchemyModelAPI, + initiator_kwargs=dict(base_dir=str(tmpdir)), + initiator=sql_mapi.init_storage) + yield result + storage.release_sqlite_storage(result) + + +@pytest.fixture +def resource_storage(tmpdir): + result = tmpdir.join('resources') + result.mkdir() + resource_storage = application_resource_storage( + filesystem_rapi.FileSystemResourceAPI, + api_kwargs=dict(directory=str(result))) + yield resource_storage + shutil.rmtree(str(result)) + + +@pytest.fixture +def plugins_dir(tmpdir): + result = tmpdir.join('plugins') + result.mkdir() + return str(result) + + +@pytest.fixture +def plugin_manager(model, plugins_dir): + return plugin.PluginManager(model=model, plugins_dir=plugins_dir) diff --git a/tests/mock/context.py b/tests/mock/context.py index f943d7e8..ac0a8a73 100644 --- a/tests/mock/context.py +++ b/tests/mock/context.py @@ -39,12 +39,17 @@ def simple(tmpdir, inmemory=False, context_kwargs=None, topology=None): api_kwargs=dict(directory=os.path.join(tmpdir, 'resources')) ) + service_id = topology(model_storage) + execution = models.create_execution(model_storage.service.get(service_id)) + model_storage.execution.put(execution) + final_kwargs = dict( name='simple_context', model_storage=model_storage, resource_storage=resource_storage, - service_id=topology(model_storage), + service_id=service_id, workflow_name=models.WORKFLOW_NAME, + execution_id=execution.id, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL ) diff --git a/tests/mock/models.py b/tests/mock/models.py index 1d29e2d5..cdedea93 100644 --- a/tests/mock/models.py +++ b/tests/mock/models.py @@ -37,10 +37,11 @@ NORMATIVE_REMOVE_SOURCE ) -SERVICE_NAME = 'test_service_name' -SERVICE_TEMPLATE_NAME = 'test_service_template_name' -WORKFLOW_NAME = 'test_workflow_name' -EXECUTION_NAME = 'test_execution_name' +SERVICE_TEMPLATE_NAME = 'test_service_template' +SERVICE_NAME = 'test_service1' +NODE_TEMPLATE_NAME = 'test_node_template' +NODE_NAME = 'test_node1' +WORKFLOW_NAME = 'test_workflow' TASK_RETRY_INTERVAL = 1 TASK_MAX_ATTEMPTS = 1 @@ -50,11 +51,13 @@ DEPENDENT_NODE_NAME = 'dependent_node' -def create_service_template(name=SERVICE_TEMPLATE_NAME): +def create_service_template(name=SERVICE_TEMPLATE_NAME, description=None, inputs=None): now = datetime.now() + inputs = inputs or {} return models.ServiceTemplate( name=name, - description=None, + description=description, + inputs=inputs, created_at=now, updated_at=now, main_file_name='main_file_name', @@ -68,10 +71,12 @@ def create_service_template(name=SERVICE_TEMPLATE_NAME): ) -def create_service(service_template, name=SERVICE_NAME): +def create_service(service_template, name=SERVICE_NAME, inputs=None): now = datetime.utcnow() + inputs = inputs or {} return models.Service( name=name, + inputs=inputs, service_template=service_template, description='', created_at=now, @@ -81,6 +86,73 @@ def create_service(service_template, name=SERVICE_NAME): ) +def create_service_with_dependencies(include_execution=False, + include_input=False, + include_node=False): + service_template = create_service_template() + service = create_service(service_template=service_template) + if include_execution: + execution = create_execution(service=service, status=models.Execution.STARTED) + service.executions = [execution] + execution.id = '1' + if include_input: + input = create_parameter(name='input1', value='value1') + service.inputs = {'input1': input} + if include_node: + node_template = create_node_template(service_template=service_template) + node = create_node(node_template, service, state=models.Node.STARTED) + node.id = '1' + return service + + +def create_node_template_with_dependencies(include_node=False, include_property=False): + service_template = create_service_template() + node_template = create_node_template(service_template=service_template) + if include_node: + service = create_service(service_template=service_template) + create_node(dependency_node_template=node_template, service=service) + if include_property: + node_template.properties = {'prop1': create_parameter(name='prop1', value='value1')} + return node_template + + +def create_node_with_dependencies(include_attribute=False): + + node_template = create_node_template_with_dependencies() + node_template.service_template.services[0] = create_service(node_template.service_template) + node = create_node(node_template, node_template.service_template.services[0]) + if include_attribute: + node.runtime_properties = {'attribute1': 'value1'} + return node + + +def create_node_template(service_template, + name=NODE_TEMPLATE_NAME, + type=models.Type(variant='node', name='test_node_type'), + capability_templates=None, + requirement_templates=None, + interface_templates=None, + default_instances=1, + min_instances=1, + max_instances=1): + capability_templates = capability_templates or {} + requirement_templates = requirement_templates or [] + interface_templates = interface_templates or {} + node_template = models.NodeTemplate( + name=name, + type=type, + capability_templates=capability_templates, + requirement_templates=requirement_templates, + interface_templates=interface_templates, + default_instances=default_instances, + min_instances=min_instances, + max_instances=max_instances, + service_template=service_template) + + service_template.node_templates[node_template.name] = node_template + return node_template + + def create_dependency_node_template(service_template, name=DEPENDENCY_NODE_TEMPLATE_NAME): node_type = service_template.node_types.get_descendant('test_node_type') capability_type = service_template.capability_types.get_descendant('test_capability_type') @@ -89,18 +161,12 @@ def create_dependency_node_template(service_template, name=DEPENDENCY_NODE_TEMPL name='capability', type=capability_type ) - - node_template = models.NodeTemplate( + return create_node_template( + service_template=service_template, name=name, type=node_type, - capability_templates=_dictify(capability_template), - default_instances=1, - min_instances=1, - max_instances=1, - service_template=service_template + capability_templates=_dictify(capability_template) ) - service_template.node_templates[node_template.name] = node_template - return node_template def create_dependent_node_template( @@ -111,29 +177,25 @@ def create_dependent_node_template( name='requirement', target_node_template=dependency_node_template ) - - node_template = models.NodeTemplate( + return create_node_template( + service_template=service_template, name=name, type=the_type, - default_instances=1, - min_instances=1, - max_instances=1, interface_templates=_dictify(get_standard_interface_template(service_template)), requirement_templates=[requirement_template], - service_template=service_template ) - service_template.node_templates[node_template.name] = node_template - return node_template -def create_node(name, dependency_node_template, service): +def create_node(dependency_node_template, service, name=NODE_NAME, state=models.Node.INITIAL, + runtime_properties=None): + runtime_properties = runtime_properties or {} node = models.Node( name=name, type=dependency_node_template.type, - runtime_properties={'ip': '1.1.1.1'}, + runtime_properties=runtime_properties, version=None, node_template=dependency_node_template, - state=models.Node.INITIAL, + state=state, scaling_groups=[], service=service, interfaces=get_standard_interface(service), @@ -168,6 +230,13 @@ def create_interface_template(service_template, interface_name, operation_name, def create_interface(service, interface_name, operation_name, operation_kwargs=None, interface_kwargs=None): the_type = service.service_template.interface_types.get_descendant('test_interface_type') + + if operation_kwargs and operation_kwargs.get('inputs'): + operation_kwargs['inputs'] = dict( + (input_name, models.Parameter.wrap(input_name, input_value)) + for input_name, input_value in operation_kwargs['inputs'].iteritems() + if input_value is not None) + operation = models.Operation( name=operation_name, **(operation_kwargs or {}) @@ -180,13 +249,14 @@ def create_interface(service, interface_name, operation_name, operation_kwargs=N ) -def create_execution(service): +def create_execution(service, status=models.Execution.PENDING): return models.Execution( service=service, - status=models.Execution.STARTED, + status=status, workflow_name=WORKFLOW_NAME, + created_at=datetime.utcnow(), started_at=datetime.utcnow(), - parameters=None + inputs={} ) @@ -214,6 +284,11 @@ def create_plugin_specification(name='test_plugin', version='0.1'): ) +def create_parameter(name, value): + p = models.Parameter() + return p.wrap(name, value) + + def _dictify(item): return dict(((item.name, item),)) diff --git a/tests/mock/topology.py b/tests/mock/topology.py index e5b4e014..bfb7b4e8 100644 --- a/tests/mock/topology.py +++ b/tests/mock/topology.py @@ -33,7 +33,7 @@ def create_simple_topology_single_node(model_storage, create_operation): ) node_template.interface_templates[interface_template.name] = interface_template # pylint: disable=unsubscriptable-object - node = models.create_node(models.DEPENDENCY_NODE_NAME, node_template, service) + node = models.create_node(node_template, service, name=models.DEPENDENCY_NODE_NAME) interface = models.create_interface( service, 'Standard', 'create', @@ -59,9 +59,9 @@ def create_simple_topology_two_nodes(model_storage): dependency_node_template) dependency_node = models.create_node( - models.DEPENDENCY_NODE_NAME, dependency_node_template, service) + dependency_node_template, service, models.DEPENDENCY_NODE_NAME) dependent_node = models.create_node( - models.DEPENDENT_NODE_NAME, dependent_node_template, service) + dependent_node_template, service, models.DEPENDENT_NODE_NAME) dependent_node.outbound_relationships.append(models.create_relationship( # pylint: disable=no-member source=dependent_node, @@ -86,7 +86,7 @@ def create_simple_topology_three_nodes(model_storage): service = model_storage.service.get(service_id) third_node_template = models.create_dependency_node_template( service.service_template, name='another_dependency_node_template') - third_node = models.create_node('another_dependency_node', third_node_template, service) + third_node = models.create_node(third_node_template, service, 'another_dependency_node') new_relationship = models.create_relationship( source=model_storage.node.get_by_name(models.DEPENDENT_NODE_NAME), target=third_node, diff --git a/tests/mock/workflow.py b/tests/mock/workflow.py new file mode 100644 index 00000000..b12b9fa7 --- /dev/null +++ b/tests/mock/workflow.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from aria.orchestrator.decorators import workflow + + +@workflow +def mock_workflow(graph, ctx, output_path=None, **kwargs): # pylint: disable=unused-argument + if output_path: + # writes call arguments to the specified output file + with open(output_path, 'w') as f: + json.dump(kwargs, f) diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py index bd4eba49..d64cdba4 100644 --- a/tests/modeling/test_models.py +++ b/tests/modeling/test_models.py @@ -100,12 +100,13 @@ def _nodes_storage(): service = storage.service.get_by_name(mock.models.SERVICE_NAME) dependency_node_template = storage.node_template.get_by_name( mock.models.DEPENDENCY_NODE_TEMPLATE_NAME) - mock.models.create_node(mock.models.DEPENDENCY_NODE_NAME, dependency_node_template, service) + mock.models.create_node(dependency_node_template, service, + name=mock.models.DEPENDENCY_NODE_NAME) dependent_node_template = mock.models.create_dependent_node_template(service.service_template, dependency_node_template) - mock.models.create_node(mock.models.DEPENDENT_NODE_NAME, dependent_node_template, service) + mock.models.create_node(dependent_node_template, service, name=mock.models.DEPENDENT_NODE_NAME) storage.service.update(service) return storage @@ -180,7 +181,7 @@ class TestServiceTemplate(object): @pytest.mark.parametrize( 'is_valid, description, created_at, updated_at, main_file_name', [ - (False, {}, now, now, '/path'), + (False, [], now, now, '/path'), (False, 'description', 'error', now, '/path'), (False, 'description', now, 'error', '/path'), (False, 'description', now, now, {}), @@ -253,7 +254,7 @@ def test_service_model_creation(self, service_storage, is_valid, name, created_a class TestExecution(object): @pytest.mark.parametrize( - 'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, ' + 'is_valid, created_at, started_at, ended_at, error, is_system_workflow, inputs, ' 'status, workflow_name', [ (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'), @@ -268,11 +269,11 @@ class TestExecution(object): (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'), (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'), (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'), - (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'), + (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'), ] ) def test_execution_model_creation(self, service_storage, is_valid, created_at, started_at, - ended_at, error, is_system_workflow, parameters, status, + ended_at, error, is_system_workflow, inputs, status, workflow_name): execution = _test_model( is_valid=is_valid, @@ -285,7 +286,7 @@ def test_execution_model_creation(self, service_storage, is_valid, created_at, s ended_at=ended_at, error=error, is_system_workflow=is_system_workflow, - parameters=parameters, + inputs=inputs, status=status, workflow_name=workflow_name, )) @@ -299,7 +300,7 @@ def create_execution(status): id='e_id', workflow_name='w_name', status=status, - parameters={}, + inputs={}, created_at=now, ) return execution diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py index af8b4541..c3994743 100644 --- a/tests/orchestrator/context/test_operation.py +++ b/tests/orchestrator/context/test_operation.py @@ -69,16 +69,17 @@ def test_node_operation_task_execution(ctx, thread_executor): interface_name = 'Standard' operation_name = 'create' + inputs = {'putput': True} node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) interface = mock.models.create_interface( node.service, interface_name, operation_name, - operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface ctx.model.node.update(node) - inputs = {'putput': True} @workflow def basic_workflow(graph, **_): @@ -124,17 +125,18 @@ def test_relationship_operation_task_execution(ctx, thread_executor): interface_name = 'Configure' operation_name = 'post_configure' + inputs = {'putput': True} relationship = ctx.model.relationship.list()[0] interface = mock.models.create_interface( relationship.source_node.service, interface_name, operation_name, - operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)), + operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__), + inputs=inputs), ) relationship.interfaces[interface.name] = interface ctx.model.relationship.update(relationship) - inputs = {'putput': True} @workflow def basic_workflow(graph, **_): @@ -231,21 +233,21 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir): plugin = mock.models.create_plugin() ctx.model.plugin.put(plugin) node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + filename = 'test_file' + content = 'file content' + inputs = {'filename': filename, 'content': content} interface = mock.models.create_interface( node.service, interface_name, operation_name, operation_kwargs=dict( implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__), - plugin=plugin) + plugin=plugin, + inputs=inputs) ) node.interfaces[interface.name] = interface ctx.model.node.update(node) - filename = 'test_file' - content = 'file content' - inputs = {'filename': filename, 'content': content} - @workflow def basic_workflow(graph, **_): graph.add_tasks(api.task.OperationTask.for_node(node=node, @@ -277,21 +279,22 @@ def test_node_operation_logging(ctx, executor): interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0] node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + + inputs = { + 'op_start': 'op_start', + 'op_end': 'op_end', + } interface = mock.models.create_interface( node.service, interface_name, operation_name, operation_kwargs=dict( - implementation=op_path(logged_operation, module_path=__name__)) + implementation=op_path(logged_operation, module_path=__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface ctx.model.node.update(node) - inputs = { - 'op_start': 'op_start', - 'op_end': 'op_end', - } - @workflow def basic_workflow(graph, **_): graph.add_tasks( @@ -311,20 +314,20 @@ def test_relationship_operation_logging(ctx, executor): interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0] relationship = ctx.model.relationship.list()[0] + inputs = { + 'op_start': 'op_start', + 'op_end': 'op_end', + } interface = mock.models.create_interface( relationship.source_node.service, interface_name, operation_name, - operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__), + inputs=inputs) ) relationship.interfaces[interface.name] = interface ctx.model.relationship.update(relationship) - inputs = { - 'op_start': 'op_start', - 'op_end': 'op_end', - } - @workflow def basic_workflow(graph, **_): graph.add_tasks( diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py index 696e9b30..8249086c 100644 --- a/tests/orchestrator/context/test_resource_render.py +++ b/tests/orchestrator/context/test_resource_render.py @@ -64,9 +64,9 @@ def resources(tmpdir, ctx): implicit_ctx_template_path.write(_IMPLICIT_CTX_TEMPLATE) variables_template_path = tmpdir.join(_VARIABLES_TEMPLATE_PATH) variables_template_path.write(_VARIABLES_TEMPLATE) - ctx.resource.deployment.upload(entry_id='1', - source=str(implicit_ctx_template_path), - path=_IMPLICIT_CTX_TEMPLATE_PATH) - ctx.resource.deployment.upload(entry_id='1', - source=str(variables_template_path), - path=_VARIABLES_TEMPLATE_PATH) + ctx.resource.service.upload(entry_id='1', + source=str(implicit_ctx_template_path), + path=_IMPLICIT_CTX_TEMPLATE_PATH) + ctx.resource.service.upload(entry_id='1', + source=str(variables_template_path), + path=_VARIABLES_TEMPLATE_PATH) diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py index 8b809b36..f4acc365 100644 --- a/tests/orchestrator/context/test_serialize.py +++ b/tests/orchestrator/context/test_serialize.py @@ -15,8 +15,6 @@ import pytest -import aria -from aria.storage import sql_mapi from aria.orchestrator.workflows import api from aria.orchestrator.workflows.core import engine from aria.orchestrator.workflows.executor import process @@ -34,7 +32,7 @@ def test_serialize_operation_context(context, executor, tmpdir): test_file = tmpdir.join(TEST_FILE_NAME) test_file.write(TEST_FILE_CONTENT) resource = context.resource - resource.blueprint.upload(TEST_FILE_ENTRY_ID, str(test_file)) + resource.service_template.upload(TEST_FILE_ENTRY_ID, str(test_file)) graph = _mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph) eng.execute() @@ -72,7 +70,7 @@ def _mock_operation(ctx): # a correct ctx.deployment.name tells us we kept the correct deployment_id assert ctx.service.name == mock.models.SERVICE_NAME # Here we test that the resource storage was properly re-created - test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME) + test_file_content = ctx.resource.service_template.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME) assert test_file_content == TEST_FILE_CONTENT # a non empty plugin workdir tells us that we kept the correct base_workdir assert ctx.plugin_workdir is not None @@ -98,10 +96,3 @@ def context(tmpdir): yield result storage.release_sqlite_storage(result.model) - - -@pytest.fixture -def memory_model_storage(): - result = aria.application_model_storage(sql_mapi.SQLAlchemyModelAPI) - yield result - storage.release_sqlite_storage(result) diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py index cf82127a..213d9641 100644 --- a/tests/orchestrator/context/test_toolbelt.py +++ b/tests/orchestrator/context/test_toolbelt.py @@ -76,15 +76,16 @@ def test_host_ip(workflow_context, executor): interface_name = 'Standard' operation_name = 'create' _, dependency_node, _, _, _ = _get_elements(workflow_context) + inputs = {'putput': True} interface = mock.models.create_interface( dependency_node.service, interface_name=interface_name, operation_name=operation_name, - operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), + inputs=inputs) ) dependency_node.interfaces[interface.name] = interface workflow_context.model.node.update(dependency_node) - inputs = {'putput': True} @workflow def basic_workflow(graph, **_): @@ -106,17 +107,17 @@ def test_relationship_tool_belt(workflow_context, executor): interface_name = 'Configure' operation_name = 'post_configure' _, _, _, _, relationship = _get_elements(workflow_context) + inputs = {'putput': True} interface = mock.models.create_interface( relationship.source_node.service, interface_name=interface_name, operation_name=operation_name, - operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__), + inputs=inputs) ) relationship.interfaces[interface.name] = interface workflow_context.model.relationship.update(relationship) - inputs = {'putput': True} - @workflow def basic_workflow(graph, **_): graph.add_tasks( diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py index fa1f3879..3c354356 100644 --- a/tests/orchestrator/context/test_workflow.py +++ b/tests/orchestrator/context/test_workflow.py @@ -35,7 +35,7 @@ def test_execution_creation_on_workflow_context_creation(self, storage): assert execution.service_template == storage.service_template.get_by_name( models.SERVICE_TEMPLATE_NAME) assert execution.status == storage.execution.model_cls.PENDING - assert execution.parameters == {} + assert execution.inputs == {} assert execution.created_at <= datetime.utcnow() def test_subsequent_workflow_context_creation_do_not_fail(self, storage): @@ -49,11 +49,13 @@ def _create_ctx(storage): :param storage: :return WorkflowContext: """ + service = storage.service.get_by_name(models.SERVICE_NAME) return context.workflow.WorkflowContext( name='simple_context', model_storage=storage, resource_storage=None, - service_id=storage.service.get_by_name(models.SERVICE_NAME).id, + service_id=service, + execution_id=storage.execution.list(filters=dict(service=service))[0].id, workflow_name=models.WORKFLOW_NAME, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL @@ -66,6 +68,8 @@ def storage(): sql_mapi.SQLAlchemyModelAPI, initiator=test_storage.init_inmemory_model_storage) workflow_storage.service_template.put(models.create_service_template()) service_template = workflow_storage.service_template.get_by_name(models.SERVICE_TEMPLATE_NAME) - workflow_storage.service.put(models.create_service(service_template)) + service = models.create_service(service_template) + workflow_storage.service.put(service) + workflow_storage.execution.put(models.create_execution(service)) yield workflow_storage test_storage.release_sqlite_storage(workflow_storage) diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py index e3612cf1..58506bad 100644 --- a/tests/orchestrator/execution_plugin/test_local.py +++ b/tests/orchestrator/execution_plugin/test_local.py @@ -460,14 +460,15 @@ def _run(self, env_var='value', inputs=None): local_script_path = script_path - script_path = os.path.basename(local_script_path) if local_script_path else None + script_path = os.path.basename(local_script_path) if local_script_path else '' + inputs = inputs or {} + process = process or {} if script_path: - workflow_context.resource.deployment.upload( + workflow_context.resource.service.upload( entry_id=str(workflow_context.service.id), source=local_script_path, path=script_path) - inputs = inputs or {} inputs.update({ 'script_path': script_path, 'process': process, @@ -481,9 +482,11 @@ def mock_workflow(ctx, graph): node.service, 'test', 'op', - operation_kwargs=dict(implementation='{0}.{1}'.format( - operations.__name__, - operations.run_script_locally.__name__)) + operation_kwargs=dict( + implementation='{0}.{1}'.format( + operations.__name__, + operations.run_script_locally.__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface graph.add_tasks(api.task.OperationTask.for_node( diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py index d86b6d2b..a75d59a9 100644 --- a/tests/orchestrator/execution_plugin/test_ssh.py +++ b/tests/orchestrator/execution_plugin/test_ssh.py @@ -217,29 +217,41 @@ def _execute(self, @workflow def mock_workflow(ctx, graph): node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + inputs = { + 'script_path': script_path, + 'fabric_env': _FABRIC_ENV, + 'process': process, + 'use_sudo': use_sudo, + 'custom_env_var': custom_input, + 'test_operation': '', + } + if hide_output: + inputs['hide_output'] = hide_output + if commands: + inputs['commands'] = commands interface = mock.models.create_interface( node.service, 'test', 'op', - operation_kwargs=dict(implementation='{0}.{1}'.format( - operations.__name__, - operation.__name__)) + operation_kwargs=dict( + implementation='{0}.{1}'.format( + operations.__name__, + operation.__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface - graph.sequence(*[api.task.OperationTask.for_node( - node=node, - interface_name='test', - operation_name='op', - inputs={ - 'script_path': script_path, - 'fabric_env': _FABRIC_ENV, - 'process': process, - 'use_sudo': use_sudo, - 'hide_output': hide_output, - 'custom_env_var': custom_input, - 'test_operation': test_operation, - 'commands': commands - }) for test_operation in test_operations]) + + ops = [] + for test_operation in test_operations: + op_inputs = inputs.copy() + op_inputs['test_operation'] = test_operation + ops.append(api.task.OperationTask.for_node( + node=node, + interface_name='test', + operation_name='op', + inputs=op_inputs)) + + graph.sequence(*ops) return graph tasks_graph = mock_workflow(ctx=self._workflow_context) # pylint: disable=no-value-for-parameter eng = engine.Engine( @@ -258,7 +270,7 @@ def _execute_and_get_task_exception(self, *args, **kwargs): return collected[signal][0]['kwargs']['exception'] def _upload(self, source, path): - self._workflow_context.resource.deployment.upload( + self._workflow_context.resource.service.upload( entry_id=str(self._workflow_context.service.id), source=source, path=path) diff --git a/tests/orchestrator/test_runner.py b/tests/orchestrator/test_runner.py deleted file mode 100644 index 74e98ad5..00000000 --- a/tests/orchestrator/test_runner.py +++ /dev/null @@ -1,74 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from aria import workflow -from aria.orchestrator import operation -from aria.orchestrator.workflows.api.task import OperationTask -from aria.orchestrator.runner import Runner - -from tests import mock - -import pytest - - -OPERATION_RESULTS = {} - - -@operation -def mock_create_operation(ctx, key, value, **kwargs): # pylint: disable=unused-argument - OPERATION_RESULTS[key] = value - - -@pytest.fixture(autouse=True) -def cleanup(): - OPERATION_RESULTS.clear() - - -def test_runner_no_tasks(): - @workflow - def workflow_fn(ctx, graph): # pylint: disable=unused-argument - pass - - _test_runner(workflow_fn) - - -def test_runner_tasks(): - @workflow - def workflow_fn(ctx, graph): - for node in ctx.model.node: - graph.add_tasks( - OperationTask.for_node(node=node, - interface_name='Standard', - operation_name='create')) - - _test_runner(workflow_fn) - - assert OPERATION_RESULTS.get('create') is True - - -def _initialize_model_storage_fn(model_storage): - mock.topology.create_simple_topology_single_node( - model_storage, - '{0}.{1}'.format(__name__, mock_create_operation.__name__) - ) - - -def _test_runner(workflow_fn): - runner = Runner(workflow_name='runner workflow', - workflow_fn=workflow_fn, - inputs={}, - initialize_model_storage_fn=_initialize_model_storage_fn, - service_id_fn=lambda: 1) - runner.run() diff --git a/tests/orchestrator/test_workflow_runner.py b/tests/orchestrator/test_workflow_runner.py new file mode 100644 index 00000000..54e940f6 --- /dev/null +++ b/tests/orchestrator/test_workflow_runner.py @@ -0,0 +1,292 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from datetime import datetime + +import pytest +import mock + +from aria.modeling import exceptions as modeling_exceptions +from aria.modeling import models +from aria.orchestrator import exceptions +from aria.orchestrator.workflow_runner import WorkflowRunner +from aria.orchestrator.workflows.executor.process import ProcessExecutor + +from ..mock import ( + topology, + workflow as workflow_mocks +) +from ..fixtures import ( # pylint: disable=unused-import + plugins_dir, + plugin_manager, + fs_model as model, + resource_storage as resource +) + + +def test_undeclared_workflow(request): + # validating a proper error is raised when the workflow is not declared in the service + with pytest.raises(exceptions.UndeclaredWorkflowError): + _create_workflow_runner(request, 'undeclared_workflow') + + +def test_missing_workflow_implementation(service, request): + # validating a proper error is raised when the workflow code path does not exist + workflow = models.Operation( + name='test_workflow', + service=service, + implementation='nonexistent.workflow.implementation', + inputs={}) + service.workflows['test_workflow'] = workflow + + with pytest.raises(exceptions.WorkflowImplementationNotFoundError): + _create_workflow_runner(request, 'test_workflow') + + +def test_builtin_workflow_instantiation(request): + # validates the workflow runner instantiates properly when provided with a builtin workflow + # (expecting no errors to be raised on undeclared workflow or missing workflow implementation) + workflow_runner = _create_workflow_runner(request, 'install') + tasks = list(workflow_runner._tasks_graph.tasks) + assert len(tasks) == 2 # expecting two WorkflowTasks + + +def test_custom_workflow_instantiation(request): + # validates the workflow runner instantiates properly when provided with a custom workflow + # (expecting no errors to be raised on undeclared workflow or missing workflow implementation) + mock_workflow = _setup_mock_workflow_in_service(request) + workflow_runner = _create_workflow_runner(request, mock_workflow) + tasks = list(workflow_runner._tasks_graph.tasks) + assert len(tasks) == 0 # mock workflow creates no tasks + + +def test_existing_active_executions(request, service, model): + existing_active_execution = models.Execution( + service=service, + status=models.Execution.STARTED, + workflow_name='uninstall') + model.execution.put(existing_active_execution) + with pytest.raises(exceptions.ActiveExecutionsError): + _create_workflow_runner(request, 'install') + + +def test_existing_executions_but_no_active_ones(request, service, model): + existing_terminated_execution = models.Execution( + service=service, + status=models.Execution.TERMINATED, + workflow_name='uninstall') + model.execution.put(existing_terminated_execution) + # no active executions exist, so no error should be raised + _create_workflow_runner(request, 'install') + + +def test_default_executor(request): + # validates the ProcessExecutor is used by the workflow runner by default + mock_workflow = _setup_mock_workflow_in_service(request) + + with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls: + _create_workflow_runner(request, mock_workflow) + _, engine_kwargs = mock_engine_cls.call_args + assert isinstance(engine_kwargs.get('executor'), ProcessExecutor) + + +def test_custom_executor(request): + mock_workflow = _setup_mock_workflow_in_service(request) + + custom_executor = mock.MagicMock() + with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls: + _create_workflow_runner(request, mock_workflow, executor=custom_executor) + _, engine_kwargs = mock_engine_cls.call_args + assert engine_kwargs.get('executor') == custom_executor + + +def test_task_configuration_parameters(request): + mock_workflow = _setup_mock_workflow_in_service(request) + + task_max_attempts = 5 + task_retry_interval = 7 + with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls: + _create_workflow_runner(request, mock_workflow, task_max_attempts=task_max_attempts, + task_retry_interval=task_retry_interval) + _, engine_kwargs = mock_engine_cls.call_args + assert engine_kwargs['workflow_context']._task_max_attempts == task_max_attempts + assert engine_kwargs['workflow_context']._task_retry_interval == task_retry_interval + + +def test_execute(request, service): + mock_workflow = _setup_mock_workflow_in_service(request) + + mock_engine = mock.MagicMock() + with mock.patch('aria.orchestrator.workflow_runner.Engine', return_value=mock_engine) \ + as mock_engine_cls: + workflow_runner = _create_workflow_runner(request, mock_workflow) + + _, engine_kwargs = mock_engine_cls.call_args + assert engine_kwargs['workflow_context'].service.id == service.id + assert engine_kwargs['workflow_context'].execution.workflow_name == 'test_workflow' + + workflow_runner.execute() + mock_engine.execute.assert_called_once_with() + + +def test_cancel_execution(request): + mock_workflow = _setup_mock_workflow_in_service(request) + + mock_engine = mock.MagicMock() + with mock.patch('aria.orchestrator.workflow_runner.Engine', return_value=mock_engine): + workflow_runner = _create_workflow_runner(request, mock_workflow) + workflow_runner.cancel() + mock_engine.cancel_execution.assert_called_once_with() + + +def test_execution_model_creation(request, service, model): + mock_workflow = _setup_mock_workflow_in_service(request) + + with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls: + workflow_runner = _create_workflow_runner(request, mock_workflow) + + _, engine_kwargs = mock_engine_cls.call_args + assert engine_kwargs['workflow_context'].execution == workflow_runner.execution + assert model.execution.get(workflow_runner.execution.id) == workflow_runner.execution + assert workflow_runner.execution.service.id == service.id + assert workflow_runner.execution.workflow_name == mock_workflow + assert workflow_runner.execution.created_at <= datetime.utcnow() + assert workflow_runner.execution.inputs == dict() + + +def test_execution_inputs_override_workflow_inputs(request): + wf_inputs = {'input1': 'value1', 'input2': 'value2', 'input3': 5} + mock_workflow = _setup_mock_workflow_in_service( + request, + inputs=dict((name, models.Parameter.wrap(name, val)) for name, val + in wf_inputs.iteritems())) + + with mock.patch('aria.orchestrator.workflow_runner.Engine'): + workflow_runner = _create_workflow_runner( + request, mock_workflow, inputs={'input2': 'overriding-value2', 'input3': 7}) + + assert len(workflow_runner.execution.inputs) == 3 + # did not override input1 - expecting the default value from the workflow inputs + assert workflow_runner.execution.inputs['input1'].value == 'value1' + # overrode input2 + assert workflow_runner.execution.inputs['input2'].value == 'overriding-value2' + # overrode input of integer type + assert workflow_runner.execution.inputs['input3'].value == 7 + + +def test_execution_inputs_undeclared_inputs(request): + mock_workflow = _setup_mock_workflow_in_service(request) + + with pytest.raises(modeling_exceptions.UndeclaredInputsException): + _create_workflow_runner(request, mock_workflow, inputs={'undeclared_input': 'value'}) + + +def test_execution_inputs_missing_required_inputs(request): + mock_workflow = _setup_mock_workflow_in_service( + request, inputs={'required_input': models.Parameter.wrap('required_input', value=None)}) + + with pytest.raises(modeling_exceptions.MissingRequiredInputsException): + _create_workflow_runner(request, mock_workflow, inputs={}) + + +def test_execution_inputs_wrong_type_inputs(request): + mock_workflow = _setup_mock_workflow_in_service( + request, inputs={'input': models.Parameter.wrap('input', 'value')}) + + with pytest.raises(modeling_exceptions.InputsOfWrongTypeException): + _create_workflow_runner(request, mock_workflow, inputs={'input': 5}) + + +def test_execution_inputs_builtin_workflow_with_inputs(request): + # built-in workflows don't have inputs + with pytest.raises(modeling_exceptions.UndeclaredInputsException): + _create_workflow_runner(request, 'install', inputs={'undeclared_input': 'value'}) + + +def test_workflow_function_parameters(request, tmpdir): + # validating the workflow function is passed with the + # merged execution inputs, in dict form + + # the workflow function parameters will be written to this file + output_path = str(tmpdir.join('output')) + wf_inputs = {'output_path': output_path, 'input1': 'value1', 'input2': 'value2', 'input3': 5} + + mock_workflow = _setup_mock_workflow_in_service( + request, inputs=dict((name, models.Parameter.wrap(name, val)) for name, val + in wf_inputs.iteritems())) + + _create_workflow_runner(request, mock_workflow, + inputs={'input2': 'overriding-value2', 'input3': 7}) + + with open(output_path) as f: + wf_call_kwargs = json.load(f) + assert len(wf_call_kwargs) == 3 + assert wf_call_kwargs.get('input1') == 'value1' + assert wf_call_kwargs.get('input2') == 'overriding-value2' + assert wf_call_kwargs.get('input3') == 7 + + +@pytest.fixture +def service(model): + # sets up a service in the storage + service_id = topology.create_simple_topology_two_nodes(model) + service = model.service.get(service_id) + return service + + +def _setup_mock_workflow_in_service(request, inputs=None): + # sets up a mock workflow as part of the service, including uploading + # the workflow code to the service's dir on the resource storage + service = request.getfuncargvalue('service') + resource = request.getfuncargvalue('resource') + + source = workflow_mocks.__file__ + resource.service_template.upload(str(service.service_template.id), source) + mock_workflow_name = 'test_workflow' + workflow = models.Operation( + name=mock_workflow_name, + service=service, + implementation='workflow.mock_workflow', + inputs=inputs or {}) + service.workflows[mock_workflow_name] = workflow + return mock_workflow_name + + +def _create_workflow_runner(request, workflow_name, inputs=None, executor=None, + task_max_attempts=None, task_retry_interval=None): + # helper method for instantiating a workflow runner + service_id = request.getfuncargvalue('service').id + model = request.getfuncargvalue('model') + resource = request.getfuncargvalue('resource') + plugin_manager = request.getfuncargvalue('plugin_manager') + + # task configuration parameters can't be set to None, therefore only + # passing those if they've been set by the test + task_configuration_kwargs = dict() + if task_max_attempts is not None: + task_configuration_kwargs['task_max_attempts'] = task_max_attempts + if task_retry_interval is not None: + task_configuration_kwargs['task_retry_interval'] = task_retry_interval + + return WorkflowRunner( + workflow_name=workflow_name, + service_id=service_id, + inputs=inputs or {}, + executor=executor, + model_storage=model, + resource_storage=resource, + plugin_manager=plugin_manager, + **task_configuration_kwargs) diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py index a7051998..ab623619 100644 --- a/tests/orchestrator/workflows/api/test_task.py +++ b/tests/orchestrator/workflows/api/test_task.py @@ -44,17 +44,19 @@ def test_node_operation_task_creation(self, ctx): plugin = mock.models.create_plugin('test_plugin', '0.1') ctx.model.node.update(plugin) + inputs = {'test_input': True} + interface = mock.models.create_interface( ctx.service, interface_name, operation_name, operation_kwargs=dict(plugin=plugin, - implementation='op_path')) + implementation='op_path', + inputs=inputs),) node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME) node.interfaces[interface_name] = interface ctx.model.node.update(node) - inputs = {'test_input': True} max_attempts = 10 retry_interval = 10 ignore_failure = True @@ -90,17 +92,19 @@ def test_source_relationship_operation_task_creation(self, ctx): plugin = mock.models.create_plugin('test_plugin', '0.1') ctx.model.plugin.update(plugin) + inputs = {'test_input': True} + interface = mock.models.create_interface( ctx.service, interface_name, operation_name, operation_kwargs=dict(plugin=plugin, - implementation='op_path') + implementation='op_path', + inputs=inputs) ) relationship = ctx.model.relationship.list()[0] relationship.interfaces[interface.name] = interface - inputs = {'test_input': True} max_attempts = 10 retry_interval = 10 @@ -133,17 +137,19 @@ def test_target_relationship_operation_task_creation(self, ctx): plugin = mock.models.create_plugin('test_plugin', '0.1') ctx.model.node.update(plugin) + inputs = {'test_input': True} + interface = mock.models.create_interface( ctx.service, interface_name, operation_name, operation_kwargs=dict(plugin=plugin, - implementation='op_path') + implementation='op_path', + inputs=inputs) ) relationship = ctx.model.relationship.list()[0] relationship.interfaces[interface.name] = interface - inputs = {'test_input': True} max_attempts = 10 retry_interval = 10 diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py index 0b48870c..1a88f13d 100644 --- a/tests/orchestrator/workflows/core/test_engine.py +++ b/tests/orchestrator/workflows/core/test_engine.py @@ -61,12 +61,18 @@ def _op(func, ctx, retry_interval=None, ignore_failure=None): node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + + operation_kwargs = dict(implementation='{name}.{func.__name__}'.format( + name=__name__, func=func)) + if inputs: + # the operation has to declare the inputs before those may be passed + operation_kwargs['inputs'] = inputs + interface = mock.models.create_interface( node.service, 'aria.interfaces.lifecycle', 'create', - operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__, - func=func)) + operation_kwargs=operation_kwargs ) node.interfaces[interface.name] = interface return api.task.OperationTask.for_node( diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py similarity index 100% rename from tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py rename to tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py index 502c9fda..839b9f1c 100644 --- a/tests/orchestrator/workflows/executor/test_process_executor.py +++ b/tests/orchestrator/workflows/executor/test_process_executor.py @@ -21,19 +21,18 @@ import pytest -from aria import application_model_storage from aria.modeling import models as aria_models -from aria.storage import sql_mapi -from aria.orchestrator import ( - events, - plugin -) +from aria.orchestrator import events from aria.utils.plugin import create as create_plugin from aria.orchestrator.workflows.executor import process - import tests.storage import tests.resources +from tests.fixtures import ( # pylint: disable=unused-import + plugins_dir, + plugin_manager, + fs_model as model +) class TestProcessExecutor(object): @@ -74,27 +73,6 @@ def test_closed(self, executor): assert 'closed' in exc_info.value.message -@pytest.fixture -def model(tmpdir): - result = application_model_storage(sql_mapi.SQLAlchemyModelAPI, - initiator_kwargs=dict(base_dir=str(tmpdir)), - initiator=sql_mapi.init_storage) - yield result - tests.storage.release_sqlite_storage(result) - - -@pytest.fixture -def plugins_dir(tmpdir): - result = tmpdir.join('plugins') - result.mkdir() - return str(result) - - -@pytest.fixture -def plugin_manager(model, plugins_dir): - return plugin.PluginManager(model=model, plugins_dir=plugins_dir) - - @pytest.fixture def executor(plugin_manager): result = process.ProcessExecutor(plugin_manager=plugin_manager) diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py index 6d0eb5bc..88e7ae05 100644 --- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py +++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py @@ -90,7 +90,8 @@ def _node(ctx): node.service, interface_name, operation_name, - operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__)) + operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface context.model.node.update(node) diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py index 0988faec..7ae337de 100644 --- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py +++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py @@ -42,7 +42,8 @@ def mock_workflow(ctx, graph): interface_name, operation_name, operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, - _mock_operation.__name__)) + _mock_operation.__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface task = api.task.OperationTask.for_node(node=node, diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py index 5512189f..3a8c54b7 100644 --- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py +++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py @@ -90,17 +90,19 @@ def mock_workflow(ctx, graph): node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) interface_name = 'test_interface' operation_name = 'operation' + wf_inputs = inputs or {} interface = mock.models.create_interface( ctx.service, interface_name, operation_name, - operation_kwargs=dict(implementation=_operation_mapping(op_func)) + operation_kwargs=dict(implementation=_operation_mapping(op_func), + inputs=wf_inputs) ) node.interfaces[interface.name] = interface task = api.task.OperationTask.for_node(node=node, interface_name=interface_name, operation_name=operation_name, - inputs=inputs or {}) + inputs=wf_inputs) graph.add_tasks(task) return graph graph = mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter diff --git a/tests/parser/service_templates.py b/tests/parser/service_templates.py index a07fba80..a8fde14e 100644 --- a/tests/parser/service_templates.py +++ b/tests/parser/service_templates.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from aria.utils.caching import cachedmethod from .utils import (get_example_uri, get_test_uri, create_context, create_consumer) @@ -23,7 +25,9 @@ def consume_use_case(use_case_name, consumer_class_name='instance', cache=True): uri = get_example_uri('tosca-simple-1.0', 'use-cases', use_case_name, '{0}.yaml'.format(use_case_name)) context = create_context(uri) - #context.args.append('--inputs=' + get_example_uri('node-cellar', 'inputs.yaml')) + inputs_file = get_example_uri('tosca-simple-1.0', 'use-cases', use_case_name, 'inputs.yaml') + if os.path.isfile(inputs_file): + context.args.append('--inputs={0}'.format(inputs_file)) consumer, dumper = create_consumer(context, consumer_class_name) consumer.consume() context.validation.dump_issues() diff --git a/tests/end2end/test_tosca_simple_v1_0.py b/tests/parser/test_tosca_simple_v1_0.py similarity index 97% rename from tests/end2end/test_tosca_simple_v1_0.py rename to tests/parser/test_tosca_simple_v1_0.py index 4658fc38..a583db55 100644 --- a/tests/end2end/test_tosca_simple_v1_0.py +++ b/tests/parser/test_tosca_simple_v1_0.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from tests.parser.service_templates import (consume_use_case, consume_node_cellar) +from .service_templates import (consume_use_case, consume_node_cellar) # Use Cases diff --git a/tests/utils/test_plugin.py b/tests/utils/test_plugin.py index 09885ef7..3350247e 100644 --- a/tests/utils/test_plugin.py +++ b/tests/utils/test_plugin.py @@ -17,13 +17,14 @@ import pytest -from aria import application_model_storage from aria.orchestrator import exceptions -from aria.orchestrator import plugin from aria.utils.plugin import create as create_plugin -from aria.storage import sql_mapi -from .. import storage +from ..fixtures import ( # pylint: disable=unused-import + plugins_dir, + plugin_manager, + inmemory_model as model +) PACKAGE_NAME = 'mock-plugin' @@ -47,26 +48,6 @@ def test_install_already_exits(self, plugin_manager, mock_plugin): plugin_manager.install(mock_plugin) -@pytest.fixture -def model(): - model = application_model_storage(sql_mapi.SQLAlchemyModelAPI, - initiator=storage.init_inmemory_model_storage) - yield model - storage.release_sqlite_storage(model) - - -@pytest.fixture -def plugins_dir(tmpdir): - result = tmpdir.join('plugins') - result.mkdir() - return str(result) - - -@pytest.fixture -def plugin_manager(model, plugins_dir): - return plugin.PluginManager(model=model, plugins_dir=plugins_dir) - - @pytest.fixture def mock_plugin(tmpdir): source_dir = tmpdir.join('mock_plugin') diff --git a/aria/cli/config.py b/tests/utils/test_threading.py similarity index 51% rename from aria/cli/config.py rename to tests/utils/test_threading.py index d82886d9..d24661fb 100644 --- a/aria/cli/config.py +++ b/tests/utils/test_threading.py @@ -13,34 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -CLI configuration -""" -import os -import logging -from getpass import getuser -from tempfile import gettempdir +import pytest -from yaml import safe_load +from aria.utils import threading -from .storage import config_file_path -# path to a file where cli logs will be saved. -logging_filename = os.path.join(gettempdir(), 'aria_cli_{0}.log'.format(getuser())) -# loggers log level to show -logger_level = logging.INFO -# loggers log level to show -colors = True +def test_exception_raised_from_thread(): -import_resolver = None + def error_raising_func(): + raise ValueError('This is an error') + thread = threading.ExceptionThread(target=error_raising_func) + thread.start() + thread.join() -def load_configurations(): - """ - Dynamically load attributes into the config module from the ``config.yaml`` defined in the user - configuration directory - """ - config_path = config_file_path() - with open(config_path) as config_file: - globals().update(safe_load(config_file) or {}) + assert thread.is_error() + with pytest.raises(ValueError): + thread.raise_error_if_exists() diff --git a/tox.ini b/tox.ini index fa4bd5c1..6ad048fa 100644 --- a/tox.ini +++ b/tox.ini @@ -41,7 +41,7 @@ commands=pytest tests --cov-report term-missing --cov aria commands=pytest tests --cov-report term-missing --cov aria [testenv:pylint_code] -commands=pylint --rcfile=aria/.pylintrc --disable=fixme,missing-docstring --ignore=commands.py aria +commands=pylint --rcfile=aria/.pylintrc --disable=fixme,missing-docstring aria [testenv:pylint_tests] commands=pylint --rcfile=tests/.pylintrc --disable=fixme,missing-docstring tests