diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b4a3e7f12c..3365aad1fe 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,7 +1,9 @@ ## Status -Ready/In Progress/In Hold (Reason for hold) +- [x] In Progress +- [ ] Ready +- [ ] In Hold - (Reason for hold) ## Related Issues fixes: link to the issue @@ -11,3 +13,7 @@ A few sentences describing the overall goals of the pull request's commits. ## Screenshots Paste here any images that will help the reviewer + +## Must have +- [ ] Tests +- [ ] Documentation diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ecf0edad19..39cc445e09 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,9 @@ repos: rev: v2.4.0 hooks: - id: trailing-whitespace + exclude: demisto_sdk/tests/test_files/* - id: end-of-file-fixer + exclude: demisto_sdk/tests/test_files/* - id: check-docstring-first exclude: demisto_sdk/tests/test_files/* - id: check-json @@ -20,3 +22,7 @@ repos: rev: v1.5 hooks: - id: autopep8 +- repo: https://github.com/pre-commit/mirrors-isort + rev: v4.3.21 + hooks: + - id: isort diff --git a/CHANGELOG.md b/CHANGELOG.md index ea12f019cb..4219f7cae4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog - +* Added *-c, --command* option in *generate-docs* to generate specific command from integration. +* Fixed an issue in getting a README/CHANGELOG files from git and loading it. +* Removed release notes validation for new content. +* Fixed secretes validations for files with the same name in a different directory. +* demisto-sdk lint - parralel working with specify the number of workers. +* demisto-sdk lint - logging levels output, 3 levels. +* demisto-sdk lint - json report, structured error reports in json format. +* demisto-sdk lint - xml junit report for unit-tests. +* demisto-sdk lint - New packages used in order to excellarate execution time. + +#### 0.5.0 [PyPI History][1] [1]: https://pypi.org/project/demisto-sdk/#history @@ -12,6 +22,8 @@ * Added the *-fv --from-version*, *-nv --no-validation* arguments. * Removed the *-t yml_type* argument, the file type will be inferred. * Removed the *-g use_git* argument, running format without arguments will run automatically on git diff. +* Fixed an issue in loading playbooks with '=' character. +* Fixed an issue in *validate* failed on deleted README files. ### 0.4.8 * Added the *max* field to the Playbook schema, allowing to define it in tasks loop. diff --git a/CONTRIBUTION.md b/CONTRIBUTION.md index f79ce3953b..595bcf0c81 100644 --- a/CONTRIBUTION.md +++ b/CONTRIBUTION.md @@ -33,7 +33,7 @@ git clone https://github.com/demisto/demisto-sdk.git pip3 uninstall demisto-sdk ``` -2. Inside root directory of `demisto-sdk` repository - Install PyPi package as [editable package](https://pip.pypa.io/en/stable/reference/pip_install/): +2. Inside root directory of `demisto-sdk` repository - Install PyPi package as [editable package](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs): ```shell pip3 install -e . @@ -51,7 +51,7 @@ git clone https://github.com/demisto/demisto-sdk.git 4. Install dev-requirements - ```shell - pip3 install /resources/utils/requirements-dev.txt + pip3 install -r /requirements-dev.txt ``` --- diff --git a/README.md b/README.md index d3a6643eca..b76303b807 100644 --- a/README.md +++ b/README.md @@ -284,6 +284,14 @@ Generate documentation file for integration, playbook or script from yaml file. Each Command should be in a separate line. **For script** - the script example surrounded by double quotes. When the argument is empty, the documentation will be generate without examples. +* **-c, --command** + **For integration only** - A comma-separated value of commands to replace (e.g `panorama-get-address,panorama-create-address`). + It can be used only with an existing README.md file. + If any new command is presented - the new command will be added to the end of the docs file. + For an existing command, it will try to replace the existing command in the README.md file. If it will fail, it will + add it to the end of the file as well. + + * **-id, --id_set** Path of updated id_set.json file, used for generates script documentation. When the argument is empty, the documentation will be generate without `Used In` section. @@ -301,6 +309,9 @@ This will generate documentation file to Tanium V2 integration in /Users/Documen `demisto-sdk generate-docs -o /Users/Documentations -i /demisto/content/Scripts/script-PrintErrorEntry.yml -id /demisto/content/Tests/id_set.json -e "!PrintErrorEntry message=Hi"` This will generate documentation file to PrintErrorEntry script in /Users/Documentations/README.md. id_set.json should be updated to gets all the integration that uses this script. +`demisto-sdk generate-docs -i /demisto/content/Integrations/Tanium_v2/Tanium_v2.yml -c tn-get-package` +This will generate the command section for `tn-get-package` and will replace it in the README.md file located in /demisto/content/Integrations/Tanium_v2/. + ## create-id-set Create the content dependency tree by ids. diff --git a/demisto_sdk/__main__.py b/demisto_sdk/__main__.py index 6d7e3ab8af..0efde5718c 100644 --- a/demisto_sdk/__main__.py +++ b/demisto_sdk/__main__.py @@ -1,34 +1,42 @@ # Site packages import os -from pkg_resources import get_distribution import sys +from pkg_resources import get_distribution + # Third party packages import click - -# Import demisto-sdk commands -from demisto_sdk.commands.run_cmd.runner import Runner +from demisto_sdk.commands.common.configuration import Configuration +# Common tools +from demisto_sdk.commands.common.tools import (find_type, + get_last_remote_release_version, + print_error, print_warning) +from demisto_sdk.commands.create_artifacts.content_creator import \ + ContentCreator +from demisto_sdk.commands.create_id_set.create_id_set import IDSetCreator +from demisto_sdk.commands.find_dependencies.find_dependencies import \ + PackDependencies from demisto_sdk.commands.format.format_module import format_manager -from demisto_sdk.commands.unify.unifier import Unifier -from demisto_sdk.commands.upload.uploader import Uploader +from demisto_sdk.commands.generate_docs.generate_integration_doc import \ + generate_integration_doc +from demisto_sdk.commands.generate_docs.generate_playbook_doc import \ + generate_playbook_doc +from demisto_sdk.commands.generate_docs.generate_script_doc import \ + generate_script_doc +from demisto_sdk.commands.generate_test_playbook.test_playbook_generator import \ + PlaybookTestsGenerator from demisto_sdk.commands.init.initiator import Initiator -from demisto_sdk.commands.split_yml.extractor import Extractor -from demisto_sdk.commands.common.configuration import Configuration +from demisto_sdk.commands.json_to_outputs.json_to_outputs import \ + json_to_outputs from demisto_sdk.commands.lint.lint_manager import LintManager -from demisto_sdk.commands.secrets.secrets import SecretsValidator +# Import demisto-sdk commands +from demisto_sdk.commands.run_cmd.runner import Runner from demisto_sdk.commands.run_playbook.playbook_runner import PlaybookRunner +from demisto_sdk.commands.secrets.secrets import SecretsValidator +from demisto_sdk.commands.split_yml.extractor import Extractor +from demisto_sdk.commands.unify.unifier import Unifier +from demisto_sdk.commands.upload.uploader import Uploader from demisto_sdk.commands.validate.file_validator import FilesValidator -from demisto_sdk.commands.create_artifacts.content_creator import ContentCreator -from demisto_sdk.commands.json_to_outputs.json_to_outputs import json_to_outputs -from demisto_sdk.commands.generate_test_playbook.test_playbook_generator import PlaybookTestsGenerator -from demisto_sdk.commands.generate_docs.generate_integration_doc import generate_integration_doc -from demisto_sdk.commands.generate_docs.generate_script_doc import generate_script_doc -from demisto_sdk.commands.generate_docs.generate_playbook_doc import generate_playbook_doc -from demisto_sdk.commands.create_id_set.create_id_set import IDSetCreator -from demisto_sdk.commands.find_dependencies.find_dependencies import PackDependencies - -# Common tools -from demisto_sdk.commands.common.tools import print_error, print_warning, get_last_remote_release_version, find_type class DemistoSDK: @@ -61,7 +69,7 @@ def main(config, version): f'You should consider upgrading via "pip3 install --upgrade demisto-sdk" command.') if version: version = get_distribution('demisto-sdk').version - print(version) + print(f'demisto-sdk {version}') # ====================== split-yml ====================== # @@ -219,8 +227,8 @@ def validate(config, **kwargs): name="create-content-artifacts", hidden=True, short_help='Create content artifacts. This will generate content_new.zip file which can be used to ' - 'upload to your server in order to upload a whole new content version to your Demisto ' - 'instance.', + 'upload to your server in order to upload a whole new content version to your Demisto ' + 'instance.', ) @click.help_option( '-h', '--help' @@ -268,52 +276,58 @@ def secrets(config, **kwargs): # ====================== lint ====================== # @main.command(name="lint", - short_help="Run lintings (flake8, mypy, pylint, bandit, vulture) and pytest. pylint and pytest will run " - "within the docker image of an integration/script. Meant to be used with integrations/scripts " - "that use the folder (package) structure. Will lookup up what docker image to use and will " - "setup the dev dependencies and file in the target folder. ") -@click.help_option( - '-h', '--help' -) -@click.option( - "-d", "--dir", help="Specify directory of integration/script") -@click.option( - "--no-pylint", is_flag=True, help="Do NOT run pylint linter") -@click.option( - "--no-mypy", is_flag=True, help="Do NOT run mypy static type checking") -@click.option( - "--no-flake8", is_flag=True, help="Do NOT run flake8 linter") -@click.option( - "--no-bandit", is_flag=True, help="Do NOT run bandit linter") -@click.option( - "--no-vulture", is_flag=True, help="Do NOT run vulture linter") -@click.option( - "--no-test", is_flag=True, help="Do NOT test (skip pytest)") -@click.option( - "-r", "--root", is_flag=True, help="Run pytest container with root user") -@click.option( - "-k", "--keep-container", is_flag=True, help="Keep the test container") -@click.option( - "-v", "--verbose", is_flag=True, help="Verbose output - mainly for debugging purposes") -@click.option( - "--cpu-num", - help="Number of CPUs to run pytest on (can set to `auto` for automatic detection of the number of CPUs)", - default=0) -@click.option( - "-p", "--parallel", is_flag=True, help="Run tests in parallel") -@click.option( - "-m", "--max-workers", type=int, help="How many threads to run in parallel") -@click.option( - "-g", "--git", is_flag=True, help="Will run only on changed packages") -@click.option( - "-a", "--run-all-tests", is_flag=True, help="Run lint on all directories in content repo") -@click.option( - "--outfile", help="Save failing packages to a file" -) -@pass_config -def lint(config, dir, **kwargs): - linter = LintManager(configuration=config.configuration, project_dir_list=dir, **kwargs) - return linter.run_dev_packages() + short_help="Lint command will perform:\n 1. Package in host checks - flake8, bandit, mypy, vulture.\n 2. " + "Package in docker image checks - pylint, pytest, powershell - test, powershell - analyze.\n " + "Meant to be used with integrations/scripts that use the folder (package) structure. Will lookup up what" + "docker image to use and will setup the dev dependencies and file in the target folder. ") +@click.help_option('-h', '--help') +@click.option("-i", "--input", help="Specify directory of integration/script", type=click.Path(exists=True, + resolve_path=True)) +@click.option("-g", "--git", is_flag=True, help="Will run only on changed packages") +@click.option("-a", "--all-packs", is_flag=True, help="Run lint on all directories in content repo") +@click.option('-v', "--verbose", count=True, help="Verbosity level -v / -vv / .. / -vvv", + type=click.IntRange(0, 3, clamp=True), default=2, show_default=True) +@click.option('-q', "--quiet", is_flag=True, help="Quiet output, only output results in the end") +@click.option("-p", "--parallel", default=1, help="Run tests in parallel", type=click.IntRange(0, 15, clamp=True), + show_default=True) +@click.option("--no-flake8", is_flag=True, help="Do NOT run flake8 linter") +@click.option("--no-bandit", is_flag=True, help="Do NOT run bandit linter") +@click.option("--no-mypy", is_flag=True, help="Do NOT run mypy static type checking") +@click.option("--no-vulture", is_flag=True, help="Do NOT run vulture linter") +@click.option("--no-pylint", is_flag=True, help="Do NOT run pylint linter") +@click.option("--no-test", is_flag=True, help="Do NOT test (skip pytest)") +@click.option("--no-pwsh-analyze", is_flag=True, help="Do NOT run powershell analyze") +@click.option("--no-pwsh-test", is_flag=True, help="Do NOT run powershell test") +@click.option("-kc", "--keep-container", is_flag=True, help="Keep the test container") +@click.option("--test-xml", help="Path to store pytest xml results", type=click.Path(exists=True, resolve_path=True)) +@click.option("--json-report", help="Path to store json results", type=click.Path(exists=True, resolve_path=True)) +@click.option("-lp", "--log-path", help="Path to store all levels of logs", type=click.Path(exists=True, resolve_path=True)) +def lint(input: str, git: bool, all_packs: bool, verbose: int, quiet: bool, parallel: int, no_flake8: bool, + no_bandit: bool, no_mypy: bool, no_vulture: bool, no_pylint: bool, no_test: bool, no_pwsh_analyze: bool, + no_pwsh_test: bool, keep_container: bool, test_xml: str, json_report: str, log_path: str): + """Lint command will perform:\n + 1. Package in host checks - flake8, bandit, mypy, vulture.\n + 2. Package in docker image checks - pylint, pytest, powershell - test, powershell - analyze.\n + Meant to be used with integrations/scripts that use the folder (package) structure. Will lookup up what + docker image to use and will setup the dev dependencies and file in the target folder.""" + lint_manager = LintManager(input=input, + git=git, + all_packs=all_packs, + verbose=verbose, + quiet=quiet, + log_path=log_path) + return lint_manager.run_dev_packages(parallel=parallel, + no_flake8=no_flake8, + no_bandit=no_bandit, + no_mypy=no_mypy, + no_vulture=no_vulture, + no_pylint=no_pylint, + no_test=no_test, + no_pwsh_analyze=no_pwsh_analyze, + no_pwsh_test=no_pwsh_test, + keep_container=keep_container, + test_xml=test_xml, + json_report=json_report) # ====================== format ====================== # @@ -524,6 +538,11 @@ def init(**kwargs): @click.option( "-uc", "--use_cases", help="For integration - Top use-cases. Number the steps by '*' (i.e. '* foo. * bar.')", required=False) +@click.option( + "-c", "--command", help="A comma-separated command names to generate doc for, will ignore the rest of the commands." + "e.g (xdr-get-incidents,xdr-update-incident", + required=False +) @click.option( "-e", "--examples", help="Path for file containing command or script examples." " Each Command should be in a separate line." @@ -545,6 +564,7 @@ def init(**kwargs): def generate_doc(**kwargs): input_path = kwargs.get('input') output_path = kwargs.get('output') + command = kwargs.get('command') examples = kwargs.get('examples') permissions = kwargs.get('permissions') limitations = kwargs.get('limitations') @@ -564,9 +584,16 @@ def generate_doc(**kwargs): print_error(F'Output directory {output_path} was not found.') return 1 + if command: + if output_path and (not os.path.isfile(os.path.join(output_path, "README.md")))\ + or (not output_path)\ + and (not os.path.isfile(os.path.join(os.path.dirname(os.path.realpath(input_path)), "README.md"))): + print_error("The `command` argument must be presented with existing `README.md` docs.") + return 1 + file_type = find_type(kwargs.get('input', '')) if file_type not in ["integration", "script", "playbook"]: - print_error(F'File is not an Integration, Script or a Playbook.') + print_error('File is not an Integration, Script or a Playbook.') return 1 print(f'Start generating {file_type} documentation...') @@ -576,7 +603,7 @@ def generate_doc(**kwargs): return generate_integration_doc(input=input_path, output=output_path, use_cases=use_cases, examples=examples, permissions=permissions, command_permissions=command_permissions, limitations=limitations, - insecure=insecure, verbose=verbose) + insecure=insecure, verbose=verbose, command=command) elif file_type == 'script': return generate_script_doc(input=input_path, output=output_path, examples=examples, permissions=permissions, limitations=limitations, insecure=insecure, verbose=verbose) diff --git a/demisto_sdk/commands/common/configuration.py b/demisto_sdk/commands/common/configuration.py index 5339ee0119..28b9b68d4f 100644 --- a/demisto_sdk/commands/common/configuration.py +++ b/demisto_sdk/commands/common/configuration.py @@ -1,5 +1,5 @@ -import os import logging +from pathlib import Path class Configuration: @@ -15,6 +15,7 @@ class Configuration: def __init__(self, log_verbose=False, logging_level=logging.INFO): logging.basicConfig(level=logging_level) self.log_verbose = log_verbose - self.sdk_env_dir = os.path.dirname(os.path.dirname(os.path.join(__file__))) - self.env_dir = os.getcwd() - self.envs_dirs_base = os.path.join(self.sdk_env_dir, 'lint', 'dev_envs', 'default_python') + # refers to "demisto_sdk/commands" dir + self.sdk_env_dir = str(Path(__file__).parent.parent) + self.env_dir = str(Path().cwd()) + self.envs_dirs_base = str(Path(self.sdk_env_dir) / 'lint' / 'resources' / 'pipfile_python') diff --git a/demisto_sdk/commands/common/constants.py b/demisto_sdk/commands/common/constants.py index 4573fa9870..1790ea9ed6 100644 --- a/demisto_sdk/commands/common/constants.py +++ b/demisto_sdk/commands/common/constants.py @@ -10,6 +10,11 @@ def feed_wrong_from_version(file_path, given_fromversion, needed_from_version="5 return "{} is a feed and has wrong fromversion. got `{}` expected `{}`" \ .format(file_path, given_fromversion, needed_from_version) + @staticmethod + def pwsh_wrong_version(file_path, given_fromversion, needed_from_version='5.5.0'): + return (f'{file_path}: detected type: powershell and fromversion less than {needed_from_version}.' + f' Found version: {given_fromversion}') + @staticmethod def not_used_display_name(file_path, field_name): return "The display details for {} will not be used in the file {} due to the type of the parameter".format( @@ -255,7 +260,7 @@ def found_hidden_param(parameter_name): INDICATOR_FIELDS_DIR = 'IndicatorFields' LAYOUTS_DIR = 'Layouts' CLASSIFIERS_DIR = 'Classifiers' -MISC_DIR = 'Packs/Base/Misc' +MISC_DIR = 'Misc' CONNECTIONS_DIR = 'Connections' BETA_INTEGRATIONS_DIR = 'Beta_Integrations' PACKS_DIR = 'Packs' @@ -1054,3 +1059,5 @@ class PB_Status: 'type': 8 } ] + +DOCS_COMMAND_SECTION_REGEX = r'(?:###\s{}).+?(?:(?=(?:\n###\s))|(?=(?:\n##\s))|\Z)' diff --git a/demisto_sdk/commands/common/dev_sh_scripts/pkg_dev_container_setup.sh b/demisto_sdk/commands/common/dev_sh_scripts/pkg_dev_container_setup.sh deleted file mode 100644 index 59b89925ed..0000000000 --- a/demisto_sdk/commands/common/dev_sh_scripts/pkg_dev_container_setup.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/sh - -# Setup a container for dev testing. -# -# In alpine will install necessary python dev tools and then use pip to install - -# exit on errors -set -e - -# /dev/stdin is coming in in the format of a requirements file as pip install expects to receive -# see code at pgk_dev_tasks_in_docker.py (method: docker_image_create) -cp /dev/stdin /pkg-dev-test-requirements.txt - -# for alpine we install dev dependencies to support dependencies that need native code (seen with typed_ast) -if [ -f "/etc/alpine-release" ]; then - apk --update add --no-cache --virtual .build-dependencies python-dev build-base wget -fi - -pip install -r /pkg-dev-test-requirements.txt - -# remove the dev dependencies -if [ -f "/etc/alpine-release" ]; then - apk del .build-dependencies -fi - -mkdir /devwork -chown :4000 /devwork -chmod 775 /devwork diff --git a/demisto_sdk/commands/common/dev_sh_scripts/pkg_dev_container_setup_pwsh.sh b/demisto_sdk/commands/common/dev_sh_scripts/pkg_dev_container_setup_pwsh.sh deleted file mode 100644 index a44a0ebda6..0000000000 --- a/demisto_sdk/commands/common/dev_sh_scripts/pkg_dev_container_setup_pwsh.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh - -# Setup a container for dev testing for pwsh -# - -# exit on errors -set -e - -echo "setting up powershell testing image..." - -if [ "${DEMISTO_LINT_UPDATE_CERTS}" = "yes" ]; then - echo "updating ca certificates ..." - update-ca-certificates -fi - -pwsh -Command Set-PSRepository -name PSGallery -installationpolicy trusted -pwsh -Command 'Install-Module -Name Pester -Scope AllUsers; Invoke-Pester -? | Out-Null' -pwsh -Command 'Install-Module -Name PSScriptAnalyzer -Scope AllUsers; Invoke-ScriptAnalyzer -? | Out-Null' diff --git a/demisto_sdk/commands/common/dev_sh_scripts/run_dev_tasks.sh b/demisto_sdk/commands/common/dev_sh_scripts/run_dev_tasks.sh deleted file mode 100755 index 63b4a0d756..0000000000 --- a/demisto_sdk/commands/common/dev_sh_scripts/run_dev_tasks.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/sh - -# Run pylint and pytest in the current directory. -# Used by pkg_dev_test_tasks.py to run pylint and pytest -# inside a docker. Since this is meant to run inside a minimal docker -# image it uses sh and not bash. Additionally, script tries to keep it -# simply and not use any shell utilities that may be missing in a minimal docker. - -# Env variables: -# PYLINT_FILES: file names to pass to pylint -# PYLINT_SKIP: if set will skip pylint -# PYTEST_SKIP: if set will skip pytest -# PYTEST_FAIL_NO_TESTS: if set will fail if no tests are defined -# CPU_NUM: number of CPUs to run tests on - -pylint_return=0 -if [ -z "${PYLINT_SKIP}" ]; then - echo "=============== Running pylint on files: ${PYLINT_FILES} ===============" - python -m pylint -E -e string -d duplicate-string-formatting-argument -f parseable --generated-members=requests.packages.urllib3,requests.codes.ok \ - ${PYLINT_FILES} - pylint_return=$? - echo "Pylint completed with status code: $pylint_return" -fi - -if [ -z "${PYTEST_SKIP}" ]; then - echo "" - echo "========= Running pytest ===============" -fi - -if [ -z "${PYTEST_SKIP}" -a -z "${PYTEST_FAIL_NO_TESTS}" ]; then - echo "collecting tests..." - collect_res=$(python -m pytest --collect-only 2>&1) - case "$collect_res" in - *"errors"*) - echo "========== Errors while collecting tests. Will execute tests anyway... =========" - echo "$collect_res" - ;; - *"collected 0 items"*) - echo "========= No tests found. Skipping. ========" - echo "========= Output of: pytest --collect-only ========" - echo "$collect_res" - echo -e "\e[32m=============== End of Output for: ${PYLINT_FILES} ===============\e[39m" - PYTEST_SKIP=1 - ;; - esac -fi - -pytest_return=0 -if [ -z "${PYTEST_SKIP}" ]; then - python -m pytest -v -n="${CPU_NUM}" - pytest_return=$? - echo "Pytest completed with status code: $pytest_return" -fi - -if [ $pylint_return -ne 0 -o $pytest_return -ne 0 ]; then - echo "=========== ERRORS FOUND ===========" 1>&2 - echo "pylint/pytest returned errors. pylint: [$pylint_return], pytest: [$pytest_return]" 1>&2 - echo "====================================" 1>&2 - exit 3 -fi diff --git a/demisto_sdk/commands/common/dev_sh_scripts/run_dev_tasks_pwsh.sh b/demisto_sdk/commands/common/dev_sh_scripts/run_dev_tasks_pwsh.sh deleted file mode 100755 index 89d8c5fe3e..0000000000 --- a/demisto_sdk/commands/common/dev_sh_scripts/run_dev_tasks_pwsh.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/sh - -# Run ScriptAnalayzer (lint) and Pester (test) in the current directory. -# inside a docker. Since this is meant to run inside a minimal docker -# image it uses sh and not bash. Additionally, script tries to keep it -# simply and not use any shell utilities that may be missing in a minimal docker. - -# Env variables: -# PS_LINT_FILES: file names to pass to ScriptAnalayzer -# PS_LINT_SKIP: if set will skip lint -# PS_TEST_SKIP: if set will skip testing - -pslint_return=0 -if [ -z "${PS_LINT_SKIP}" ]; then - echo "=============== Running PowerShell ScriptAnalayzer on files: ${PS_LINT_FILES} ===============" - pwsh -Command "Invoke-ScriptAnalyzer -EnableExit -Path ${PS_LINT_FILES}" - pslint_return=$? - echo "PowerShell ScriptAnalayzer completed with status code: $pslint_return" -fi - -if [ -z "${PS_TEST_SKIP}" ]; then - echo "" - echo "========= Running PowerShell Tests ===============" - pwsh -Command Invoke-Pester -EnableExit - pstest_return=$? - echo "PowerShell Pester completed with status code: $pstest_return" -fi - -if [ $pslint_return -ne 0 -o $pstest_return -ne 0 ]; then - echo "=========== ERRORS FOUND ===========" 1>&2 - echo "lint/test returned errors. lint: [$pslint_return], test: [$pstest_return]" 1>&2 - echo "====================================" 1>&2 - exit 3 -fi diff --git a/demisto_sdk/commands/common/dev_sh_scripts/run_mypy.sh b/demisto_sdk/commands/common/dev_sh_scripts/run_mypy.sh deleted file mode 100755 index dcbc89c065..0000000000 --- a/demisto_sdk/commands/common/dev_sh_scripts/run_mypy.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env bash - -# Run mypy with a set of default parameters. -# Will not return an error code even if mypy returns so. -# This allows treating mypy as non-fatal. -# -# Arguments: python_version python_file -# -# Env: MYPY_NO_FAIL: if set will always return a 0 return code. Can be used in nightly to not fail the build. - -if [[ $# -lt 2 ]]; then - echo "Usage: $BASH_SOURCE " - echo "For example: $BASH_SOURCE 2.7 Active_Directory_Query.py" - exit 1 -fi - -PY_VERSION=$1 -PY_FILE=$2 -PY_BACKUP="" - -if [[ "$PY_VERSION" = "2.7" ]]; then - # typing import if not present - grep -E '^(from typing import|import typing)' "$PY_FILE" > /dev/null 2>&1 - if [[ $? -ne 0 ]]; then - PY_BACKUP="$PY_FILE.mypy_bak" - mv "$PY_FILE" "$PY_BACKUP" || exit 2 - sed -e '1s/^/from typing import *;/' "$PY_BACKUP" > "$PY_FILE" || exit 3 - fi -fi - -mypy --python-version $PY_VERSION --check-untyped-defs --ignore-missing-imports \ - --follow-imports=silent --show-column-numbers --show-error-codes --pretty \ - --allow-redefinition $PY_FILE 2>&1 - -res=$? - -if [[ -n "$PY_BACKUP" ]]; then - mv "$PY_BACKUP" "$PY_FILE" || exit 4 -fi - -if [[ -n "${MYPY_NO_FAIL}" ]]; then - exit 0 -fi - -exit $res diff --git a/demisto_sdk/commands/common/hook_validations/base_validator.py b/demisto_sdk/commands/common/hook_validations/base_validator.py index 3208f4cd4e..d23f7da8f5 100644 --- a/demisto_sdk/commands/common/hook_validations/base_validator.py +++ b/demisto_sdk/commands/common/hook_validations/base_validator.py @@ -2,11 +2,13 @@ import re from abc import abstractmethod -from demisto_sdk.commands.common.constants import Errors -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator -from demisto_sdk.commands.common.tools import print_error, get_release_notes_file_path, \ - get_latest_release_notes_text, run_command -from demisto_sdk.commands.common.constants import ID_IN_COMMONFIELDS, ID_IN_ROOT +from demisto_sdk.commands.common.constants import (ID_IN_COMMONFIELDS, + ID_IN_ROOT, Errors) +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator +from demisto_sdk.commands.common.tools import (get_latest_release_notes_text, + get_release_notes_file_path, + print_error, run_command) class BaseValidator: diff --git a/demisto_sdk/commands/common/hook_validations/classifier.py b/demisto_sdk/commands/common/hook_validations/classifier.py index d2f89ec993..4b5456a2b3 100644 --- a/demisto_sdk/commands/common/hook_validations/classifier.py +++ b/demisto_sdk/commands/common/hook_validations/classifier.py @@ -1,4 +1,5 @@ -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator class ClassifierValidator(BaseValidator): diff --git a/demisto_sdk/commands/common/hook_validations/dashboard.py b/demisto_sdk/commands/common/hook_validations/dashboard.py index 6551ca6b10..59624775e0 100644 --- a/demisto_sdk/commands/common/hook_validations/dashboard.py +++ b/demisto_sdk/commands/common/hook_validations/dashboard.py @@ -1,4 +1,5 @@ -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator from demisto_sdk.commands.common.tools import print_error diff --git a/demisto_sdk/commands/common/hook_validations/description.py b/demisto_sdk/commands/common/hook_validations/description.py index b095caf909..017b635099 100644 --- a/demisto_sdk/commands/common/hook_validations/description.py +++ b/demisto_sdk/commands/common/hook_validations/description.py @@ -1,7 +1,10 @@ import glob -from demisto_sdk.commands.common.tools import re, print_error, print_warning, os, get_yaml -from demisto_sdk.commands.common.constants import INTEGRATION_REGEX, BETA_INTEGRATION_REGEX, BETA_INTEGRATION_DISCLAIMER +from demisto_sdk.commands.common.constants import (BETA_INTEGRATION_DISCLAIMER, + BETA_INTEGRATION_REGEX, + INTEGRATION_REGEX) +from demisto_sdk.commands.common.tools import (get_yaml, os, print_error, + print_warning, re) class DescriptionValidator: diff --git a/demisto_sdk/commands/common/hook_validations/docker.py b/demisto_sdk/commands/common/hook_validations/docker.py index 1ddcc8855a..abd93c1d81 100644 --- a/demisto_sdk/commands/common/hook_validations/docker.py +++ b/demisto_sdk/commands/common/hook_validations/docker.py @@ -1,11 +1,13 @@ -from demisto_sdk.commands.common.constants import Errors -from demisto_sdk.commands.common.tools import get_yaml, print_error, print_warning +import re +from datetime import datetime, timedelta from distutils.version import LooseVersion + from pkg_resources import parse_version -from datetime import datetime, timedelta -import re -import requests +import requests +from demisto_sdk.commands.common.constants import Errors +from demisto_sdk.commands.common.tools import (get_yaml, print_error, + print_warning) # disable insecure warnings requests.packages.urllib3.disable_warnings() diff --git a/demisto_sdk/commands/common/hook_validations/id.py b/demisto_sdk/commands/common/hook_validations/id.py index 57907b814d..1c59a7059b 100644 --- a/demisto_sdk/commands/common/hook_validations/id.py +++ b/demisto_sdk/commands/common/hook_validations/id.py @@ -1,14 +1,25 @@ +import json import os import re -import json -from distutils.version import LooseVersion from collections import OrderedDict +from distutils.version import LooseVersion from demisto_sdk.commands.common.configuration import Configuration -from demisto_sdk.commands.common.tools import get_script_or_integration_id, collect_ids, print_error -from demisto_sdk.commands.common.constants import INTEGRATION_REGEX, TEST_PLAYBOOK_REGEX, SCRIPT_JS_REGEX, \ - SCRIPT_REGEX, TEST_SCRIPT_REGEX, INTEGRATION_YML_REGEX, PLAYBOOK_REGEX, SCRIPT_YML_REGEX, SCRIPT_PY_REGEX -from demisto_sdk.commands.common.update_id_set import get_script_data, get_playbook_data, get_integration_data +from demisto_sdk.commands.common.constants import (INTEGRATION_REGEX, + INTEGRATION_YML_REGEX, + PLAYBOOK_REGEX, + SCRIPT_JS_REGEX, + SCRIPT_PY_REGEX, + SCRIPT_REGEX, + SCRIPT_YML_REGEX, + TEST_PLAYBOOK_REGEX, + TEST_SCRIPT_REGEX) +from demisto_sdk.commands.common.tools import (collect_ids, + get_script_or_integration_id, + print_error) +from demisto_sdk.commands.common.update_id_set import (get_integration_data, + get_playbook_data, + get_script_data) from demisto_sdk.commands.unify.unifier import Unifier diff --git a/demisto_sdk/commands/common/hook_validations/image.py b/demisto_sdk/commands/common/hook_validations/image.py index 3f57ef2c6c..d959eb38d5 100644 --- a/demisto_sdk/commands/common/hook_validations/image.py +++ b/demisto_sdk/commands/common/hook_validations/image.py @@ -1,9 +1,14 @@ -import glob import base64 +import glob -from demisto_sdk.commands.common.tools import re, print_error, os, get_yaml, checked_type -from demisto_sdk.commands.common.constants import IMAGE_REGEX, INTEGRATION_REGEX, DEFAULT_IMAGE_BASE64, \ - DEFAULT_DBOT_IMAGE_BASE64, INTEGRATION_REGXES, YML_INTEGRATION_REGEXES +from demisto_sdk.commands.common.constants import (DEFAULT_DBOT_IMAGE_BASE64, + DEFAULT_IMAGE_BASE64, + IMAGE_REGEX, + INTEGRATION_REGEX, + INTEGRATION_REGXES, + YML_INTEGRATION_REGEXES) +from demisto_sdk.commands.common.tools import (checked_type, get_yaml, os, + print_error, re) class ImageValidator: diff --git a/demisto_sdk/commands/common/hook_validations/incident_field.py b/demisto_sdk/commands/common/hook_validations/incident_field.py index 36d239d2f4..8f9840eca7 100644 --- a/demisto_sdk/commands/common/hook_validations/incident_field.py +++ b/demisto_sdk/commands/common/hook_validations/incident_field.py @@ -1,12 +1,14 @@ """ This module is designed to validate the correctness of incident field entities in content. """ +import re from distutils.version import LooseVersion +from enum import Enum, IntEnum + from demisto_sdk.commands.common.constants import Errors -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator from demisto_sdk.commands.common.tools import print_error -from enum import Enum, IntEnum -import re class TypeFields(Enum): diff --git a/demisto_sdk/commands/common/hook_validations/incident_type.py b/demisto_sdk/commands/common/hook_validations/incident_type.py index c5ed25d703..67d97c1e60 100644 --- a/demisto_sdk/commands/common/hook_validations/incident_type.py +++ b/demisto_sdk/commands/common/hook_validations/incident_type.py @@ -1,7 +1,8 @@ from distutils.version import LooseVersion from demisto_sdk.commands.common.constants import Errors -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator from demisto_sdk.commands.common.tools import print_error diff --git a/demisto_sdk/commands/common/hook_validations/integration.py b/demisto_sdk/commands/common/hook_validations/integration.py index ed23000dab..7e9989db43 100644 --- a/demisto_sdk/commands/common/hook_validations/integration.py +++ b/demisto_sdk/commands/common/hook_validations/integration.py @@ -1,10 +1,22 @@ -from demisto_sdk.commands.common.constants import Errors, INTEGRATION_CATEGORIES, PYTHON_SUBTYPES, BANG_COMMAND_NAMES, \ - DBOT_SCORES_DICT, IOC_OUTPUTS_DICT, FEED_REQUIRED_PARAMS, FETCH_REQUIRED_PARAMS -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator -from demisto_sdk.commands.common.tools import print_error, print_warning, get_dockerimage45, server_version_compare +from demisto_sdk.commands.common.constants import (BANG_COMMAND_NAMES, + DBOT_SCORES_DICT, + FEED_REQUIRED_PARAMS, + FETCH_REQUIRED_PARAMS, + INTEGRATION_CATEGORIES, + IOC_OUTPUTS_DICT, + PYTHON_SUBTYPES, TYPE_PWSH, + Errors) +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator +from demisto_sdk.commands.common.hook_validations.description import \ + DescriptionValidator +from demisto_sdk.commands.common.hook_validations.docker import \ + DockerImageValidator +from demisto_sdk.commands.common.hook_validations.image import ImageValidator from demisto_sdk.commands.common.hook_validations.utils import is_v2_file - -from demisto_sdk.commands.common.hook_validations.docker import DockerImageValidator +from demisto_sdk.commands.common.tools import (get_dockerimage45, print_error, + print_warning, + server_version_compare) class IntegrationValidator(BaseValidator): @@ -42,9 +54,15 @@ def is_backward_compatible(self): ] return not any(answers) - def is_valid_file(self, validate_rn=True): - # type: (bool) -> bool - """Check whether the Integration is valid or not""" + def is_valid_file(self, validate_rn: bool = True) -> bool: + """Check whether the Integration is valid or not + + Args: + validate_rn (bool): Whether to validate release notes (changelog) or not. + + Returns: + bool: True if integration is valid, False otherwise. + """ answers = [ super().is_valid_file(validate_rn), self.is_valid_subtype(), @@ -58,15 +76,26 @@ def is_valid_file(self, validate_rn=True): self.is_valid_fetch(), self.is_valid_display_name(), self.is_all_params_not_hidden(), + self.is_valid_pwsh(), + self.is_valid_image(), + self.is_valid_description(beta_integration=False), ] return all(answers) - def is_valid_beta_integration(self): - # type: () -> bool - """Check whether the beta Integration is valid or not, update the _is_valid field to determine that""" + def is_valid_beta_integration(self, validate_rn: bool = True) -> bool: + """Check whether the beta Integration is valid or not, update the _is_valid field to determine that + Args: + validate_rn (bool): Whether to validate release notes (changelog) or not. + + Returns: + bool: True if integration is valid, False otherwise. + """ answers = [ + super().is_valid_file(validate_rn), self.is_valid_default_arguments(), self.is_valid_beta(), + self.is_valid_image(), + self.is_valid_description(beta_integration=True), ] return all(answers) @@ -513,6 +542,14 @@ def is_valid_feed(self): valid_feed_params = self.all_feed_params_exist() return valid_from_version and valid_feed_params + def is_valid_pwsh(self) -> bool: + if self.current_file.get("script", {}).get("type") == TYPE_PWSH: + from_version = self.current_file.get("fromversion", "0.0.0") + if not from_version or server_version_compare("5.5.0", from_version) > 0: + print_error(Errors.pwsh_wrong_version(self.file_path, from_version)) + return False + return True + def is_valid_fetch(self) -> bool: """ validate that all required fields in integration that have fetch incidents are in the yml file. @@ -577,3 +614,29 @@ def is_all_params_not_hidden(self) -> bool: ans = False print_error(Errors.found_hidden_param(int_parameter.get('name'))) return ans + + def is_valid_image(self) -> bool: + """Verifies integration image/logo is valid. + + Returns: + bool. True if integration image/logo is valid, False otherwise. + """ + image_validator = ImageValidator(self.file_path) + if not image_validator.is_valid(): + return False + return True + + def is_valid_description(self, beta_integration: bool = False) -> bool: + """Verifies integration description is valid. + + Returns: + bool: True if description is valid, False otherwise. + """ + description_validator = DescriptionValidator(self.file_path) + if beta_integration: + if not description_validator.is_valid_beta_description(): + return False + else: + if not description_validator.is_valid(): + return False + return True diff --git a/demisto_sdk/commands/common/hook_validations/layout.py b/demisto_sdk/commands/common/hook_validations/layout.py index 2d32fad411..f520cf036f 100644 --- a/demisto_sdk/commands/common/hook_validations/layout.py +++ b/demisto_sdk/commands/common/hook_validations/layout.py @@ -1,5 +1,6 @@ from demisto_sdk.commands.common.constants import Errors -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator from demisto_sdk.commands.common.tools import print_error diff --git a/demisto_sdk/commands/common/hook_validations/pack_unique_files.py b/demisto_sdk/commands/common/hook_validations/pack_unique_files.py index a9cd8ceea5..fbd9b8a3a1 100644 --- a/demisto_sdk/commands/common/hook_validations/pack_unique_files.py +++ b/demisto_sdk/commands/common/hook_validations/pack_unique_files.py @@ -1,17 +1,24 @@ """ This module is designed to validate the existence and structure of content pack essential files in content. """ -import os import io -import re import json +import os +import re +from demisto_sdk.commands.common.constants import (API_MODULES_PACK, + PACK_METADATA_CATEGORIES, + PACK_METADATA_DEPENDENCIES, + PACK_METADATA_FIELDS, + PACK_METADATA_KEYWORDS, + PACK_METADATA_PRICE, + PACK_METADATA_TAGS, + PACK_METADATA_USE_CASES, + PACKS_PACK_IGNORE_FILE_NAME, + PACKS_PACK_META_FILE_NAME, + PACKS_README_FILE_NAME, + PACKS_WHITELIST_FILE_NAME) from demisto_sdk.commands.common.tools import pack_name_to_path -from demisto_sdk.commands.common.constants import (PACKS_WHITELIST_FILE_NAME, PACKS_PACK_IGNORE_FILE_NAME, - PACKS_PACK_META_FILE_NAME, PACKS_README_FILE_NAME, - PACK_METADATA_FIELDS, PACK_METADATA_DEPENDENCIES, - PACK_METADATA_PRICE, PACK_METADATA_KEYWORDS, PACK_METADATA_TAGS, - PACK_METADATA_CATEGORIES, PACK_METADATA_USE_CASES, API_MODULES_PACK) class PackUniqueFilesValidator: diff --git a/demisto_sdk/commands/common/hook_validations/playbook.py b/demisto_sdk/commands/common/hook_validations/playbook.py index df6e46605a..946b6c483c 100644 --- a/demisto_sdk/commands/common/hook_validations/playbook.py +++ b/demisto_sdk/commands/common/hook_validations/playbook.py @@ -1,18 +1,19 @@ from typing import Dict +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator from demisto_sdk.commands.common.tools import print_error -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator - class PlaybookValidator(BaseValidator): """PlaybookValidator is designed to validate the correctness of the file structure we enter to content repo.""" - def is_valid_playbook(self, is_new_playbook=True): # type: (bool) -> bool + def is_valid_playbook(self, is_new_playbook: bool = True, validate_rn: bool = True) -> bool: """Check whether the playbook is valid or not. Args: is_new_playbook (bool): whether the playbook is new or modified + validate_rn (bool): whether we need to validate release notes or not Returns: bool. Whether the playbook is valid or not @@ -20,6 +21,7 @@ def is_valid_playbook(self, is_new_playbook=True): # type: (bool) -> bool if is_new_playbook: new_playbook_checks = [ + super().is_valid_file(validate_rn), self.is_valid_version(), self.is_id_equals_name(), self.is_no_rolename(), diff --git a/demisto_sdk/commands/common/hook_validations/readme.py b/demisto_sdk/commands/common/hook_validations/readme.py index 1f4fe6ea57..fe431dda2a 100644 --- a/demisto_sdk/commands/common/hook_validations/readme.py +++ b/demisto_sdk/commands/common/hook_validations/readme.py @@ -1,6 +1,8 @@ -from pathlib import Path import os -from demisto_sdk.commands.common.tools import print_error, print_warning, run_command_os, get_content_path +from pathlib import Path + +from demisto_sdk.commands.common.tools import (get_content_path, print_error, + print_warning, run_command_os) NO_HTML = '' YES_HTML = '' diff --git a/demisto_sdk/commands/common/hook_validations/release_notes.py b/demisto_sdk/commands/common/hook_validations/release_notes.py index a7b6af7cd3..9a49e6c3f2 100644 --- a/demisto_sdk/commands/common/hook_validations/release_notes.py +++ b/demisto_sdk/commands/common/hook_validations/release_notes.py @@ -3,9 +3,9 @@ import os import re -from demisto_sdk.commands.common.tools import run_command -from demisto_sdk.commands.common.tools import print_error, get_latest_release_notes_text, \ - get_release_notes_file_path +from demisto_sdk.commands.common.tools import (get_latest_release_notes_text, + get_release_notes_file_path, + print_error, run_command) class ReleaseNotesValidator: diff --git a/demisto_sdk/commands/common/hook_validations/reputation.py b/demisto_sdk/commands/common/hook_validations/reputation.py index 9ac4df4c78..522a90b27f 100644 --- a/demisto_sdk/commands/common/hook_validations/reputation.py +++ b/demisto_sdk/commands/common/hook_validations/reputation.py @@ -1,6 +1,8 @@ from distutils.version import LooseVersion + from demisto_sdk.commands.common.constants import Errors -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator from demisto_sdk.commands.common.tools import print_error diff --git a/demisto_sdk/commands/common/hook_validations/script.py b/demisto_sdk/commands/common/hook_validations/script.py index e634329e99..d2d98f6197 100644 --- a/demisto_sdk/commands/common/hook_validations/script.py +++ b/demisto_sdk/commands/common/hook_validations/script.py @@ -1,8 +1,12 @@ -from demisto_sdk.commands.common.constants import PYTHON_SUBTYPES, Errors -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator -from demisto_sdk.commands.common.tools import print_error, server_version_compare, get_dockerimage45 -from demisto_sdk.commands.common.hook_validations.docker import DockerImageValidator +from demisto_sdk.commands.common.constants import (PYTHON_SUBTYPES, TYPE_PWSH, + Errors) +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator +from demisto_sdk.commands.common.hook_validations.docker import \ + DockerImageValidator from demisto_sdk.commands.common.hook_validations.utils import is_v2_file +from demisto_sdk.commands.common.tools import (get_dockerimage45, print_error, + server_version_compare) class ScriptValidator(BaseValidator): @@ -57,6 +61,7 @@ def is_valid_file(self, validate_rn=True): self.is_valid_subtype(), self.is_id_equals_name(), self.is_docker_image_valid(), + self.is_valid_pwsh(), ]) # check only on added files if not self.old_file: @@ -186,3 +191,11 @@ def is_valid_name(self): print_error(Errors.invalid_v2_script_name(self.file_path)) return False return True + + def is_valid_pwsh(self) -> bool: + if self.current_file.get("type") == TYPE_PWSH: + from_version = self.current_file.get("fromversion", "0.0.0") + if not from_version or server_version_compare("5.5.0", from_version) > 0: + print_error(Errors.pwsh_wrong_version(self.file_path, from_version)) + return False + return True diff --git a/demisto_sdk/commands/common/hook_validations/structure.py b/demisto_sdk/commands/common/hook_validations/structure.py index d5e38e43b1..7dbe905b6a 100644 --- a/demisto_sdk/commands/common/hook_validations/structure.py +++ b/demisto_sdk/commands/common/hook_validations/structure.py @@ -2,17 +2,21 @@ Module contains validation of schemas, ids and paths. """ -from typing import Optional +import json import logging -import re import os -import json +import re +from typing import Optional + import yaml from demisto_sdk.commands.common.configuration import Configuration -from demisto_sdk.commands.common.tools import get_remote_file, get_matching_regex, print_error -from demisto_sdk.commands.common.constants import Errors, ACCEPTED_FILE_EXTENSIONS, FILE_TYPES_PATHS_TO_VALIDATE, \ - SCHEMA_TO_REGEX -from demisto_sdk.commands.format.format_constants import OLD_FILE_DEFAULT_1_FROMVERSION +from demisto_sdk.commands.common.constants import ( + ACCEPTED_FILE_EXTENSIONS, FILE_TYPES_PATHS_TO_VALIDATE, SCHEMA_TO_REGEX, + Errors) +from demisto_sdk.commands.common.tools import (get_matching_regex, + get_remote_file, print_error) +from demisto_sdk.commands.format.format_constants import \ + OLD_FILE_DEFAULT_1_FROMVERSION from pykwalify.core import Core diff --git a/demisto_sdk/commands/common/hook_validations/widget.py b/demisto_sdk/commands/common/hook_validations/widget.py index 58a7f2822e..5c19706d28 100644 --- a/demisto_sdk/commands/common/hook_validations/widget.py +++ b/demisto_sdk/commands/common/hook_validations/widget.py @@ -1,4 +1,5 @@ -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator class WidgetValidator(BaseValidator): diff --git a/demisto_sdk/commands/common/logger.py b/demisto_sdk/commands/common/logger.py new file mode 100644 index 0000000000..ae867ef73a --- /dev/null +++ b/demisto_sdk/commands/common/logger.py @@ -0,0 +1,85 @@ +import logging +import os + + +def logging_setup(verbose: int, quiet: bool, log_path: str) -> logging.Logger: + """ Init logger object for logging in demisto-sdk + For more info - https://docs.python.org/3/library/logging.html + + Args: + verbose(int) verosity level - 1-3 + quiet(bool): Whether to output a quiet response. + log_path(str): Path to save log of all levels + + Returns: + logging.Logger: logger object + """ + if quiet: + verbose = 0 + logger: logging.Logger = logging.getLogger('demisto-sdk') + logger.setLevel(logging.DEBUG) + log_level = logging.getLevelName((6 - 2 * verbose) * 10) + fmt = logging.Formatter('%(message)s') + + if verbose: + console_handler = logging.StreamHandler() + console_handler.setLevel(log_level) + console_handler.setFormatter(fmt) + logger.addHandler(console_handler) + + # Setting debug log file if in circleci + if log_path: + file_handler = logging.FileHandler(filename=os.path.join(log_path, 'lint_debug_log.log')) + file_handler.setFormatter(fmt) + file_handler.setLevel(level=logging.DEBUG) + logger.addHandler(file_handler) + + logger.propagate = False + + return logger + + +# Python program to print +# colored text and background +class Colors: + """Colors class:reset all colors with colors.reset; two + sub classes fg for foreground + and bg for background; use as colors.subclass.colorname. + i.e. colors.fg.red or colors.bg.greenalso, the generic bold, disable, + underline, reverse, strike through, + and invisible work with the main class i.e. colors.bold""" + reset = '\033[0m' + bold = '\033[01m' + disable = '\033[02m' + underline = '\033[04m' + reverse = '\033[07m' + strikethrough = '\033[09m' + invisible = '\033[08m' + + class Fg: + """Forgrownd""" + black = '\033[30m' + red = '\033[31m' + green = '\033[32m' + orange = '\033[33m' + blue = '\033[34m' + purple = '\033[35m' + cyan = '\033[36m' + lightgrey = '\033[37m' + darkgrey = '\033[90m' + lightred = '\033[91m' + lightgreen = '\033[92m' + yellow = '\033[93m' + lightblue = '\033[94m' + pink = '\033[95m' + lightcyan = '\033[96m' + + class Bg: + """Backgrownd""" + black = '\033[40m' + red = '\033[41m' + green = '\033[42m' + orange = '\033[43m' + blue = '\033[44m' + purple = '\033[45m' + cyan = '\033[46m' diff --git a/demisto_sdk/commands/common/tests/conf_test.py b/demisto_sdk/commands/common/tests/conf_test.py index f413c26fa3..a3aca34ded 100644 --- a/demisto_sdk/commands/common/tests/conf_test.py +++ b/demisto_sdk/commands/common/tests/conf_test.py @@ -1,6 +1,6 @@ import pytest -from demisto_sdk.commands.common.hook_validations.conf_json import ConfJsonValidator - +from demisto_sdk.commands.common.hook_validations.conf_json import \ + ConfJsonValidator WITH_DESCRIPTION = { "test": "description" diff --git a/demisto_sdk/commands/common/tests/constants_regex_test.py b/demisto_sdk/commands/common/tests/constants_regex_test.py index fe6788a6f4..8550518cd1 100644 --- a/demisto_sdk/commands/common/tests/constants_regex_test.py +++ b/demisto_sdk/commands/common/tests/constants_regex_test.py @@ -1,18 +1,21 @@ import os import pytest - -from demisto_sdk.commands.common.constants import PACKS_INTEGRATION_PY_REGEX, PACKS_INTEGRATION_YML_REGEX, \ - PACKS_CHANGELOG_REGEX, PACKS_SCRIPT_YML_REGEX, PACKS_SCRIPT_PY_REGEX, PACKS_PLAYBOOK_YML_REGEX, \ - PACKS_TEST_PLAYBOOKS_REGEX, PACKS_CLASSIFIERS_REGEX, PACKS_DASHBOARDS_REGEX, PACKS_INCIDENT_TYPES_REGEX, \ - PACKS_PACKAGE_META_REGEX, PACKS_WIDGETS_REGEX, PACKS_INCIDENT_FIELDS_REGEX, PACKS_INTEGRATION_TEST_PY_REGEX, \ - PACKS_SCRIPT_TEST_PY_REGEX, PACKS_LAYOUTS_REGEX, SCRIPT_TEST_PY_REGEX, \ - INTEGRATION_TEST_PY_REGEX, PACKS_INTEGRATION_JS_REGEX, PACKS_SCRIPT_JS_REGEX, INDICATOR_FIELDS_REGEX, \ - INCIDENT_TYPE_REGEX, BETA_INTEGRATION_YML_REGEX, BETA_INTEGRATIONS_DIR, BETA_INTEGRATION_REGEX, \ - INTEGRATION_PY_REGEX, INTEGRATION_JS_REGEX, INTEGRATIONS_DIR, SCRIPT_PY_REGEX, SCRIPT_JS_REGEX, \ - INTEGRATION_YML_REGEX, INTEGRATION_REGEX, SCRIPT_YML_REGEX, SCRIPT_REGEX, SCRIPTS_DIR, BETA_SCRIPT_REGEX, \ - PACKAGE_YML_FILE_REGEX - +from demisto_sdk.commands.common.constants import ( + BETA_INTEGRATION_REGEX, BETA_INTEGRATION_YML_REGEX, BETA_INTEGRATIONS_DIR, + BETA_SCRIPT_REGEX, INCIDENT_TYPE_REGEX, INDICATOR_FIELDS_REGEX, + INTEGRATION_JS_REGEX, INTEGRATION_PY_REGEX, INTEGRATION_REGEX, + INTEGRATION_TEST_PY_REGEX, INTEGRATION_YML_REGEX, INTEGRATIONS_DIR, + PACKAGE_YML_FILE_REGEX, PACKS_CHANGELOG_REGEX, PACKS_CLASSIFIERS_REGEX, + PACKS_DASHBOARDS_REGEX, PACKS_INCIDENT_FIELDS_REGEX, + PACKS_INCIDENT_TYPES_REGEX, PACKS_INTEGRATION_JS_REGEX, + PACKS_INTEGRATION_PY_REGEX, PACKS_INTEGRATION_TEST_PY_REGEX, + PACKS_INTEGRATION_YML_REGEX, PACKS_LAYOUTS_REGEX, PACKS_PACKAGE_META_REGEX, + PACKS_PLAYBOOK_YML_REGEX, PACKS_SCRIPT_JS_REGEX, PACKS_SCRIPT_PY_REGEX, + PACKS_SCRIPT_TEST_PY_REGEX, PACKS_SCRIPT_YML_REGEX, + PACKS_TEST_PLAYBOOKS_REGEX, PACKS_WIDGETS_REGEX, SCRIPT_JS_REGEX, + SCRIPT_PY_REGEX, SCRIPT_REGEX, SCRIPT_TEST_PY_REGEX, SCRIPT_YML_REGEX, + SCRIPTS_DIR) from demisto_sdk.commands.common.tools import checked_type diff --git a/demisto_sdk/commands/common/tests/dashboard_test.py b/demisto_sdk/commands/common/tests/dashboard_test.py index a72cd0a947..d49f3e29a7 100644 --- a/demisto_sdk/commands/common/tests/dashboard_test.py +++ b/demisto_sdk/commands/common/tests/dashboard_test.py @@ -1,8 +1,11 @@ +from typing import Optional + import pytest +from demisto_sdk.commands.common.hook_validations.dashboard import \ + DashboardValidator +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator from mock import patch -from typing import Optional -from demisto_sdk.commands.common.hook_validations.dashboard import DashboardValidator -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator def mock_structure(file_path=None, current_file=None, old_file=None): diff --git a/demisto_sdk/commands/common/tests/docker_test.py b/demisto_sdk/commands/common/tests/docker_test.py index 5e563c5d1e..b6c1b01847 100644 --- a/demisto_sdk/commands/common/tests/docker_test.py +++ b/demisto_sdk/commands/common/tests/docker_test.py @@ -1,9 +1,11 @@ import os -from mock import patch + import pytest -from demisto_sdk.commands.common.tools import get_yaml -from demisto_sdk.commands.common.hook_validations.docker import DockerImageValidator from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.hook_validations.docker import \ + DockerImageValidator +from demisto_sdk.commands.common.tools import get_yaml +from mock import patch RETURN_ERROR_TARGET = 'GetDockerImageLatestTag.return_error' diff --git a/demisto_sdk/commands/common/tests/id_test.py b/demisto_sdk/commands/common/tests/id_test.py index 98c69ceec2..44325f898f 100644 --- a/demisto_sdk/commands/common/tests/id_test.py +++ b/demisto_sdk/commands/common/tests/id_test.py @@ -1,5 +1,6 @@ from demisto_sdk.commands.common.configuration import Configuration from demisto_sdk.commands.common.hook_validations.id import IDSetValidator + CONFIG = Configuration() diff --git a/demisto_sdk/commands/common/tests/image_test.py b/demisto_sdk/commands/common/tests/image_test.py index 2a3439e279..2096e02121 100644 --- a/demisto_sdk/commands/common/tests/image_test.py +++ b/demisto_sdk/commands/common/tests/image_test.py @@ -1,6 +1,7 @@ import os -from demisto_sdk.commands.common.hook_validations import image + from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.hook_validations import image def test_is_not_default_image(): diff --git a/demisto_sdk/commands/common/tests/incident_field_test.py b/demisto_sdk/commands/common/tests/incident_field_test.py index 4db8d48ccf..7cf9a0f636 100644 --- a/demisto_sdk/commands/common/tests/incident_field_test.py +++ b/demisto_sdk/commands/common/tests/incident_field_test.py @@ -1,7 +1,8 @@ import pytest - -from demisto_sdk.commands.common.hook_validations.incident_field import IncidentFieldValidator, GroupFieldTypes -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator +from demisto_sdk.commands.common.hook_validations.incident_field import ( + GroupFieldTypes, IncidentFieldValidator) +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator from mock import patch diff --git a/demisto_sdk/commands/common/tests/incident_type_test.py b/demisto_sdk/commands/common/tests/incident_type_test.py index eb72439425..befbacf65b 100644 --- a/demisto_sdk/commands/common/tests/incident_type_test.py +++ b/demisto_sdk/commands/common/tests/incident_type_test.py @@ -1,8 +1,11 @@ +from typing import Optional + import pytest +from demisto_sdk.commands.common.hook_validations.incident_type import \ + IncidentTypeValidator +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator from mock import patch -from typing import Optional -from demisto_sdk.commands.common.hook_validations.incident_type import IncidentTypeValidator -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator def mock_structure(file_path=None, current_file=None, old_file=None): diff --git a/demisto_sdk/commands/common/tests/integration_test.py b/demisto_sdk/commands/common/tests/integration_test.py index 3e1fd37ca0..cfa80d7c4b 100644 --- a/demisto_sdk/commands/common/tests/integration_test.py +++ b/demisto_sdk/commands/common/tests/integration_test.py @@ -1,11 +1,16 @@ -import pytest -from mock import patch -from typing import Optional +import os from copy import deepcopy +from typing import Optional -from demisto_sdk.commands.common.constants import FETCH_REQUIRED_PARAMS, FEED_REQUIRED_PARAMS -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator -from demisto_sdk.commands.common.hook_validations.integration import IntegrationValidator +import pytest +from demisto_sdk.commands.common.constants import (FEED_REQUIRED_PARAMS, + FETCH_REQUIRED_PARAMS) +from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.hook_validations.integration import \ + IntegrationValidator +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator +from mock import patch def mock_structure(file_path=None, current_file=None, old_file=None): @@ -299,7 +304,7 @@ def test_is_valid_beta_integration(self, current, old, answer): validator = IntegrationValidator(structure) validator.current_file = current validator.old_file = old - assert validator.is_valid_beta_integration() is answer + assert validator.is_valid_beta() is answer PROXY_VALID = [{"name": "proxy", "type": 8, "display": "Use system proxy settings", "required": False}] PROXY_WRONG_TYPE = [{"name": "proxy", "type": 9, "display": "Use system proxy settings", "required": False}] @@ -439,6 +444,26 @@ def test_is_valid_display_name(self, current, answer): validator.current_file = current assert validator.is_valid_display_name() is answer + def test_is_valid_image_positive(self, monkeypatch): + integration_path = os.path.normpath( + os.path.join(f'{git_path()}/demisto_sdk/tests', 'test_files', 'integration-Zoom.yml') + ) + structure = mock_structure(file_path=integration_path) + monkeypatch.setattr( + 'demisto_sdk.commands.common.hook_validations.image.INTEGRATION_REGXES', + [integration_path] + ) + validator = IntegrationValidator(structure) + assert validator.is_valid_image() is True + + def test_is_valid_description_positive(self): + integration_path = os.path.normpath( + os.path.join(f'{git_path()}/demisto_sdk/tests', 'test_files', 'integration-Zoom.yml') + ) + structure = mock_structure(file_path=integration_path) + validator = IntegrationValidator(structure) + assert validator.is_valid_description() is True + class TestIsFetchParamsExist: def setup(self): @@ -536,3 +561,21 @@ def test_is_all_params_not_hidden(self, current, answer): structure = mock_structure(current_file=current) validator = IntegrationValidator(structure) assert validator.is_all_params_not_hidden() is answer + + @pytest.mark.parametrize("script_type, fromversion, res", [ + ('powershell', None, False), + ('powershell', '4.5.0', False), + ('powershell', '5.5.0', True), + ('powershell', '5.5.1', True), + ('powershell', '6.0.0', True), + ('python', '', True), + ('python', '4.5.0', True), + ]) + def test_valid_pwsh(self, script_type, fromversion, res): + current = { + "script": {"type": script_type}, + "fromversion": fromversion, + } + structure = mock_structure("", current) + validator = IntegrationValidator(structure) + assert validator.is_valid_pwsh() == res diff --git a/demisto_sdk/commands/common/tests/pack_metadata_validator_test.py b/demisto_sdk/commands/common/tests/pack_metadata_validator_test.py index 80b946c3b9..bdf5103598 100644 --- a/demisto_sdk/commands/common/tests/pack_metadata_validator_test.py +++ b/demisto_sdk/commands/common/tests/pack_metadata_validator_test.py @@ -1,8 +1,10 @@ -import pytest import io import os -from demisto_sdk.commands.common.hook_validations.pack_unique_files import PackUniqueFilesValidator + +import pytest from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.hook_validations.pack_unique_files import \ + PackUniqueFilesValidator class TestPackMetadataValidator: diff --git a/demisto_sdk/commands/common/tests/pack_unique_files_test.py b/demisto_sdk/commands/common/tests/pack_unique_files_test.py index 7239b9429a..7ffa21b817 100644 --- a/demisto_sdk/commands/common/tests/pack_unique_files_test.py +++ b/demisto_sdk/commands/common/tests/pack_unique_files_test.py @@ -1,7 +1,9 @@ import os + from demisto_sdk.commands.common.constants import PACKS_README_FILE_NAME -from demisto_sdk.commands.common.hook_validations.pack_unique_files import PackUniqueFilesValidator from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.hook_validations.pack_unique_files import \ + PackUniqueFilesValidator class TestPackUniqueFilesValidator: diff --git a/demisto_sdk/commands/common/tests/playbook_test.py b/demisto_sdk/commands/common/tests/playbook_test.py index d84cf31554..73e41a5aad 100644 --- a/demisto_sdk/commands/common/tests/playbook_test.py +++ b/demisto_sdk/commands/common/tests/playbook_test.py @@ -1,8 +1,11 @@ +from typing import Optional + import pytest +from demisto_sdk.commands.common.hook_validations.playbook import \ + PlaybookValidator +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator from mock import patch -from typing import Optional -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator -from demisto_sdk.commands.common.hook_validations.playbook import PlaybookValidator def mock_structure(file_path=None, current_file=None, old_file=None): diff --git a/demisto_sdk/commands/common/tests/readme_test.py b/demisto_sdk/commands/common/tests/readme_test.py index e1bb3a9ce4..1e909f3a48 100644 --- a/demisto_sdk/commands/common/tests/readme_test.py +++ b/demisto_sdk/commands/common/tests/readme_test.py @@ -1,7 +1,8 @@ +import os + import pytest -from demisto_sdk.commands.common.hook_validations.readme import ReadMeValidator from demisto_sdk.commands.common.git_tools import git_path -import os +from demisto_sdk.commands.common.hook_validations.readme import ReadMeValidator VALID_MD = f'{git_path()}/demisto_sdk/tests/test_files/README-valid.md' INVALID_MD = f'{git_path()}/demisto_sdk/tests/test_files/README-invalid.md' diff --git a/demisto_sdk/commands/common/tests/release_notes_test.py b/demisto_sdk/commands/common/tests/release_notes_test.py index b4a951e7e8..a4b587c671 100644 --- a/demisto_sdk/commands/common/tests/release_notes_test.py +++ b/demisto_sdk/commands/common/tests/release_notes_test.py @@ -1,6 +1,6 @@ import pytest - -from demisto_sdk.commands.common.hook_validations.release_notes import ReleaseNotesValidator +from demisto_sdk.commands.common.hook_validations.release_notes import \ + ReleaseNotesValidator def get_validator(file_path='', diff=''): diff --git a/demisto_sdk/commands/common/tests/reputation_test.py b/demisto_sdk/commands/common/tests/reputation_test.py index 8967665ffc..6435603d7f 100644 --- a/demisto_sdk/commands/common/tests/reputation_test.py +++ b/demisto_sdk/commands/common/tests/reputation_test.py @@ -1,8 +1,8 @@ import pytest - -from demisto_sdk.commands.common.hook_validations.reputation import ReputationValidator -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator - +from demisto_sdk.commands.common.hook_validations.reputation import \ + ReputationValidator +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator data_is_valid_version = [ (-1, True), diff --git a/demisto_sdk/commands/common/tests/script_test.py b/demisto_sdk/commands/common/tests/script_test.py index 0302c15c29..cb96350eb9 100644 --- a/demisto_sdk/commands/common/tests/script_test.py +++ b/demisto_sdk/commands/common/tests/script_test.py @@ -1,8 +1,8 @@ -from mock import patch import pytest - from demisto_sdk.commands.common.hook_validations.script import ScriptValidator -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator +from mock import patch def get_validator(current_file=None, old_file=None, file_path=""): @@ -346,3 +346,21 @@ def test_is_valid_name(self, current, answer): validator = get_validator() validator.current_file = current assert validator.is_valid_name() is answer + + @pytest.mark.parametrize("script_type, fromversion, res", [ + ('powershell', None, False), + ('powershell', '4.5.0', False), + ('powershell', '5.5.0', True), + ('powershell', '5.5.1', True), + ('powershell', '6.0.0', True), + ('python', '', True), + ('python', '4.5.0', True), + ]) + def test_valid_pwsh(self, script_type, fromversion, res): + current = { + "type": script_type, + "fromversion": fromversion, + } + validator = get_validator() + validator.current_file = current + assert validator.is_valid_pwsh() == res diff --git a/demisto_sdk/commands/common/tests/structure_test.py b/demisto_sdk/commands/common/tests/structure_test.py index b27a0f29b1..342f2e159d 100644 --- a/demisto_sdk/commands/common/tests/structure_test.py +++ b/demisto_sdk/commands/common/tests/structure_test.py @@ -2,21 +2,25 @@ from os.path import isfile from shutil import copyfile from typing import List, Tuple + import pytest import yaml - from demisto_sdk.commands.common.constants import DIR_LIST -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator - -from demisto_sdk.tests.constants_test import VALID_TEST_PLAYBOOK_PATH, INVALID_PLAYBOOK_PATH, \ - VALID_INTEGRATION_TEST_PATH, VALID_INTEGRATION_ID_PATH, INVALID_INTEGRATION_ID_PATH, VALID_PLAYBOOK_ID_PATH, \ - INVALID_PLAYBOOK_ID_PATH, VALID_LAYOUT_PATH, INVALID_LAYOUT_PATH, INVALID_WIDGET_PATH, \ - VALID_WIDGET_PATH, VALID_DASHBOARD_PATH, INVALID_DASHBOARD_PATH, LAYOUT_TARGET, \ - DASHBOARD_TARGET, WIDGET_TARGET, PLAYBOOK_TARGET, VALID_PLAYBOOK_ARCSIGHT_ADD_DOMAIN_PATH, INTEGRATION_TARGET, \ - INCIDENT_FIELD_TARGET, PLAYBOOK_PACK_TARGET, INDICATORFIELD_EXACT_SCHEME, INDICATORFIELD_EXTRA_FIELDS, \ - INDICATORFIELD_MISSING_AND_EXTRA_FIELDS, INDICATORFIELD_MISSING_FIELD, \ - INVALID_INTEGRATION_YML_1, INVALID_INTEGRATION_YML_2, INVALID_INTEGRATION_YML_3, INVALID_INTEGRATION_YML_4, \ - VALID_REPUTATION_FILE, INVALID_REPUTATION_FILE +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator +from demisto_sdk.tests.constants_test import ( + DASHBOARD_TARGET, INCIDENT_FIELD_TARGET, INDICATORFIELD_EXACT_SCHEME, + INDICATORFIELD_EXTRA_FIELDS, INDICATORFIELD_MISSING_AND_EXTRA_FIELDS, + INDICATORFIELD_MISSING_FIELD, INTEGRATION_TARGET, INVALID_DASHBOARD_PATH, + INVALID_INTEGRATION_ID_PATH, INVALID_INTEGRATION_YML_1, + INVALID_INTEGRATION_YML_2, INVALID_INTEGRATION_YML_3, + INVALID_INTEGRATION_YML_4, INVALID_LAYOUT_PATH, INVALID_PLAYBOOK_ID_PATH, + INVALID_PLAYBOOK_PATH, INVALID_REPUTATION_FILE, INVALID_WIDGET_PATH, + LAYOUT_TARGET, PLAYBOOK_PACK_TARGET, PLAYBOOK_TARGET, VALID_DASHBOARD_PATH, + VALID_INTEGRATION_ID_PATH, VALID_INTEGRATION_TEST_PATH, VALID_LAYOUT_PATH, + VALID_PLAYBOOK_ARCSIGHT_ADD_DOMAIN_PATH, VALID_PLAYBOOK_ID_PATH, + VALID_REPUTATION_FILE, VALID_TEST_PLAYBOOK_PATH, VALID_WIDGET_PATH, + WIDGET_TARGET) class TestStructureValidator: diff --git a/demisto_sdk/commands/common/tests/tools_test.py b/demisto_sdk/commands/common/tests/tools_test.py index 57ecfff426..046dd27ef5 100644 --- a/demisto_sdk/commands/common/tests/tools_test.py +++ b/demisto_sdk/commands/common/tests/tools_test.py @@ -1,15 +1,27 @@ -import os import glob -import pytest +import os -from demisto_sdk.commands.common.git_tools import git_path +import pytest from demisto_sdk.commands.common import tools -from demisto_sdk.commands.common.constants import PACKS_PLAYBOOK_YML_REGEX, PACKS_TEST_PLAYBOOKS_REGEX -from demisto_sdk.commands.common.tools import get_matching_regex, server_version_compare, find_type,\ - get_dict_from_file, LOG_COLORS, get_last_release_version -from demisto_sdk.tests.constants_test import VALID_REPUTATION_FILE, VALID_SCRIPT_PATH, VALID_INTEGRATION_TEST_PATH, \ - VALID_PLAYBOOK_ID_PATH, VALID_LAYOUT_PATH, VALID_WIDGET_PATH, VALID_INCIDENT_FIELD_PATH, VALID_DASHBOARD_PATH, \ - INDICATORFIELD_EXTRA_FIELDS, VALID_INCIDENT_TYPE_PATH +from demisto_sdk.commands.common.constants import (PACKS_PLAYBOOK_YML_REGEX, + PACKS_TEST_PLAYBOOKS_REGEX) +from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.tools import (LOG_COLORS, + filter_packagify_changes, + find_type, get_dict_from_file, + get_last_release_version, + get_matching_regex, + server_version_compare) +from demisto_sdk.tests.constants_test import (INDICATORFIELD_EXTRA_FIELDS, + VALID_DASHBOARD_PATH, + VALID_INCIDENT_FIELD_PATH, + VALID_INCIDENT_TYPE_PATH, + VALID_INTEGRATION_TEST_PATH, + VALID_LAYOUT_PATH, VALID_MD, + VALID_PLAYBOOK_ID_PATH, + VALID_REPUTATION_FILE, + VALID_SCRIPT_PATH, + VALID_WIDGET_PATH) class TestGenericFunctions: @@ -64,6 +76,17 @@ def test_find_type(self, path, _type): output = find_type(str(path)) assert output == _type, f'find_type({path}) returns: {output} instead {_type}' + test_path_md = [ + VALID_MD + ] + + @pytest.mark.parametrize('path', test_path_md) + def test_filter_packagify_changes(self, path): + modified, added, removed = filter_packagify_changes(modified_files=[], added_files=[], removed_files=[path]) + assert modified == [] + assert added == set() + assert removed == [VALID_MD] + class TestGetRemoteFile: def test_get_remote_file_sanity(self): @@ -76,10 +99,6 @@ def test_get_remote_file_origin(self): assert hello_world_yml assert hello_world_yml['commonfields']['id'] == 'HelloWorld' - def test_get_remote_md_file_origin(self): - hello_world_readme = tools.get_remote_file('Packs/HelloWorld/README.md', 'master') - assert hello_world_readme == {} - def test_get_remote_file_tag(self): gmail_yml = tools.get_remote_file('Integrations/Gmail/Gmail.yml', '19.10.0') assert gmail_yml @@ -102,6 +121,10 @@ def test_get_remote_file_invalid_origin_branch(self): invalid_yml = tools.get_remote_file('Integrations/Gmail/Gmail.yml', 'origin/NoSuchBranch') assert not invalid_yml + def test_get_remote_md_file_origin(self): + hello_world_readme = tools.get_remote_file('Packs/HelloWorld/README.md', 'master') + assert hello_world_readme == {} + class TestGetMatchingRegex: INPUTS = [ diff --git a/demisto_sdk/commands/common/tests/update_id_set_test.py b/demisto_sdk/commands/common/tests/update_id_set_test.py index 8a07fc0df2..4c3f859fa7 100644 --- a/demisto_sdk/commands/common/tests/update_id_set_test.py +++ b/demisto_sdk/commands/common/tests/update_id_set_test.py @@ -1,12 +1,17 @@ -import unittest -import pytest -import os import json +import os import sys import tempfile -from demisto_sdk.commands.common.update_id_set import has_duplicate, get_integration_data, get_script_data, get_playbook_data, \ - re_create_id_set, find_duplicates +import unittest + +import pytest from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.update_id_set import (find_duplicates, + get_integration_data, + get_playbook_data, + get_script_data, + has_duplicate, + re_create_id_set) WIDGET_DATA = { "id": "temp-widget-dup-check", diff --git a/demisto_sdk/commands/common/tests/utils_test.py b/demisto_sdk/commands/common/tests/utils_test.py index 83f0d03325..7127f29bf0 100644 --- a/demisto_sdk/commands/common/tests/utils_test.py +++ b/demisto_sdk/commands/common/tests/utils_test.py @@ -1,8 +1,6 @@ import pytest - from demisto_sdk.commands.common.hook_validations.utils import is_v2_file - V2_VALID_1 = {"display": "integrationname v2", "name": "integrationname v2", "id": "integrationname v2"} V2_WRONG_DISPLAY = {"display": "integrationname V2", "name": "integrationv2name", "id": "integrationname V2"} V2_VALID_2 = {"display": "integrationnameV2", "name": "integrationnameV2", "id": "integrationnameV2"} diff --git a/demisto_sdk/commands/common/tools.py b/demisto_sdk/commands/common/tools.py index c5aae0eaae..cfb4ad0ecc 100644 --- a/demisto_sdk/commands/common/tools.py +++ b/demisto_sdk/commands/common/tools.py @@ -1,22 +1,25 @@ -import re +import argparse +import glob +import io +import json import os +import re +import shlex import sys -import json -import glob -import argparse -from subprocess import Popen, PIPE, DEVNULL, check_output from distutils.version import LooseVersion -from typing import Union, Optional, Tuple, Dict, List -import git -import shlex from pathlib import Path +from subprocess import DEVNULL, PIPE, Popen, check_output +from typing import Dict, List, Optional, Tuple, Union + +import git +import requests import urllib3 import yaml -import requests - -from demisto_sdk.commands.common.constants import CHECKED_TYPES_REGEXES, PACKAGE_SUPPORTING_DIRECTORIES, \ - CONTENT_GITHUB_LINK, PACKAGE_YML_FILE_REGEX, UNRELEASE_HEADER, RELEASE_NOTES_REGEX, PACKS_DIR, PACKS_DIR_REGEX, \ - DEF_DOCKER, DEF_DOCKER_PWSH, TYPE_PWSH, SDK_API_GITHUB_RELEASES, PACKS_CHANGELOG_REGEX +from demisto_sdk.commands.common.constants import ( + CHECKED_TYPES_REGEXES, CONTENT_GITHUB_LINK, DEF_DOCKER, DEF_DOCKER_PWSH, + PACKAGE_SUPPORTING_DIRECTORIES, PACKAGE_YML_FILE_REGEX, + PACKS_CHANGELOG_REGEX, PACKS_DIR, PACKS_DIR_REGEX, PACKS_README_FILE_NAME, + RELEASE_NOTES_REGEX, SDK_API_GITHUB_RELEASES, TYPE_PWSH, UNRELEASE_HEADER) # disable insecure warnings urllib3.disable_warnings() @@ -159,6 +162,8 @@ def filter_packagify_changes(modified_files, added_files, removed_files, tag='ma packagify_diff = {} # type: dict for file_path in removed_files: if file_path.split("/")[0] in PACKAGE_SUPPORTING_DIRECTORIES: + if PACKS_README_FILE_NAME in file_path: + continue details = get_remote_file(file_path, tag) if details: uniq_identifier = '_'.join([ @@ -171,7 +176,7 @@ def filter_packagify_changes(modified_files, added_files, removed_files, tag='ma updated_added_files = set() for file_path in added_files: if file_path.split("/")[0] in PACKAGE_SUPPORTING_DIRECTORIES: - if "README.md" in file_path: + if PACKS_README_FILE_NAME in file_path: updated_added_files.add(file_path) continue with open(file_path) as f: @@ -250,8 +255,12 @@ def get_file(method, file_path, type_of_file): data_dictionary = None with open(os.path.expanduser(file_path), mode="r", encoding="utf8") as f: if file_path.endswith(type_of_file): + read_file = f.read() + replaced = read_file.replace("simple: =", "simple: '='") + # revert str to stream for loader + stream = io.StringIO(replaced) try: - data_dictionary = method(f) + data_dictionary = method(stream) except Exception as e: print_error( "{} has a structure issue of file type{}. Error was: {}".format(file_path, type_of_file, str(e))) diff --git a/demisto_sdk/commands/common/update_id_set.py b/demisto_sdk/commands/common/update_id_set.py index bf05e9efdf..0495fa4a9b 100755 --- a/demisto_sdk/commands/common/update_id_set.py +++ b/demisto_sdk/commands/common/update_id_set.py @@ -1,26 +1,36 @@ -import itertools -import os import glob +import itertools import json +import os import re -import click +import time from collections import OrderedDict +from distutils.version import LooseVersion from functools import partial from multiprocessing import Pool, cpu_count -from distutils.version import LooseVersion -import time -from demisto_sdk.commands.common.constants import INTEGRATION_REGEX, INTEGRATION_YML_REGEX, \ - PACKS_INTEGRATION_REGEX, SCRIPT_REGEX, PACKS_SCRIPT_YML_REGEX, PLAYBOOK_REGEX, TEST_PLAYBOOK_REGEX, \ - PACKS_PLAYBOOK_YML_REGEX, PACKS_TEST_PLAYBOOKS_REGEX, SCRIPTS_REGEX_LIST, BETA_INTEGRATION_REGEX, \ - BETA_PLAYBOOK_REGEX, TEST_SCRIPT_REGEX, PACKS_INTEGRATION_YML_REGEX, PACKS_CLASSIFIERS_REGEX, CLASSIFIER_REGEX, \ - SCRIPTS_DIR, WIDGETS_DIR, TEST_PLAYBOOKS_DIR, CLASSIFIERS_DIR, DASHBOARDS_DIR, INCIDENT_FIELDS_DIR, \ - INCIDENT_TYPES_DIR, INDICATOR_FIELDS_DIR, LAYOUTS_DIR, REPORTS_DIR, DASHBOARD_REGEX, PACKS_DASHBOARDS_REGEX, \ - INCIDENT_FIELD_REGEX, PACKS_INCIDENT_FIELDS_REGEX, INCIDENT_TYPE_REGEX, PACKS_INCIDENT_TYPES_REGEX, \ - INDICATOR_FIELDS_REGEX, PACKS_INDICATOR_FIELDS_REGEX, LAYOUT_REGEX, PACKS_LAYOUTS_REGEX, REPORT_REGEX, \ - PACKS_REPORTS_REGEX, WIDGETS_REGEX, PACKS_WIDGETS_REGEX -from demisto_sdk.commands.common.tools import get_yaml, get_to_version, get_from_version, collect_ids, get_json, \ - get_script_or_integration_id, LOG_COLORS, print_color, print_error, print_warning, run_command, get_pack_name +import click +from demisto_sdk.commands.common.constants import ( + BETA_INTEGRATION_REGEX, BETA_PLAYBOOK_REGEX, CLASSIFIER_REGEX, + CLASSIFIERS_DIR, DASHBOARD_REGEX, DASHBOARDS_DIR, INCIDENT_FIELD_REGEX, + INCIDENT_FIELDS_DIR, INCIDENT_TYPE_REGEX, INCIDENT_TYPES_DIR, + INDICATOR_FIELDS_DIR, INDICATOR_FIELDS_REGEX, INTEGRATION_REGEX, + INTEGRATION_YML_REGEX, LAYOUT_REGEX, LAYOUTS_DIR, PACKS_CLASSIFIERS_REGEX, + PACKS_DASHBOARDS_REGEX, PACKS_INCIDENT_FIELDS_REGEX, + PACKS_INCIDENT_TYPES_REGEX, PACKS_INDICATOR_FIELDS_REGEX, + PACKS_INTEGRATION_REGEX, PACKS_INTEGRATION_YML_REGEX, PACKS_LAYOUTS_REGEX, + PACKS_PLAYBOOK_YML_REGEX, PACKS_REPORTS_REGEX, PACKS_SCRIPT_YML_REGEX, + PACKS_TEST_PLAYBOOKS_REGEX, PACKS_WIDGETS_REGEX, PLAYBOOK_REGEX, + REPORT_REGEX, REPORTS_DIR, SCRIPT_REGEX, SCRIPTS_DIR, SCRIPTS_REGEX_LIST, + TEST_PLAYBOOK_REGEX, TEST_PLAYBOOKS_DIR, TEST_SCRIPT_REGEX, WIDGETS_DIR, + WIDGETS_REGEX) +from demisto_sdk.commands.common.tools import (LOG_COLORS, collect_ids, + get_from_version, get_json, + get_pack_name, + get_script_or_integration_id, + get_to_version, get_yaml, + print_color, print_error, + print_warning, run_command) from demisto_sdk.commands.unify.unifier import Unifier CHECKED_TYPES_REGEXES = ( diff --git a/demisto_sdk/commands/create_artifacts/content_creator.py b/demisto_sdk/commands/create_artifacts/content_creator.py index 4e31c8a553..1965a6fc01 100644 --- a/demisto_sdk/commands/create_artifacts/content_creator.py +++ b/demisto_sdk/commands/create_artifacts/content_creator.py @@ -1,21 +1,35 @@ -import os -import io -import re import glob +import io import json +import os +import re import shutil import zipfile from typing import List -from ruamel.yaml import YAML -from demisto_sdk.commands.unify.unifier import Unifier -from demisto_sdk.commands.common.tools import get_child_directories, get_child_files, print_warning, \ - get_yml_paths_in_dir, print_error, find_type, get_common_server_path +from demisto_sdk.commands.common.constants import (BETA_INTEGRATIONS_DIR, + CLASSIFIERS_DIR, + CONNECTIONS_DIR, + DASHBOARDS_DIR, + DIR_TO_PREFIX, + INCIDENT_FIELDS_DIR, + INCIDENT_TYPES_DIR, + INDICATOR_FIELDS_DIR, + INTEGRATIONS_DIR, + LAYOUTS_DIR, MISC_DIR, + PACKS_DIR, PLAYBOOKS_DIR, + REPORTS_DIR, SCRIPTS_DIR, + TEST_PLAYBOOKS_DIR, + TOOLS_DIR, WIDGETS_DIR) from demisto_sdk.commands.common.git_tools import get_current_working_branch -from demisto_sdk.commands.common.constants import INTEGRATIONS_DIR, MISC_DIR, PLAYBOOKS_DIR, REPORTS_DIR,\ - DASHBOARDS_DIR, WIDGETS_DIR, SCRIPTS_DIR, INCIDENT_FIELDS_DIR, CLASSIFIERS_DIR, LAYOUTS_DIR, CONNECTIONS_DIR, \ - BETA_INTEGRATIONS_DIR, INDICATOR_FIELDS_DIR, INCIDENT_TYPES_DIR, TEST_PLAYBOOKS_DIR, PACKS_DIR, DIR_TO_PREFIX, \ - TOOLS_DIR +from demisto_sdk.commands.common.tools import (find_type, + get_child_directories, + get_child_files, + get_common_server_path, + get_yml_paths_in_dir, + print_error, print_warning) +from demisto_sdk.commands.unify.unifier import Unifier +from ruamel.yaml import YAML class ContentCreator: diff --git a/demisto_sdk/commands/create_artifacts/tests/content_creator_test.py b/demisto_sdk/commands/create_artifacts/tests/content_creator_test.py index 8b031daea5..5a80933f85 100644 --- a/demisto_sdk/commands/create_artifacts/tests/content_creator_test.py +++ b/demisto_sdk/commands/create_artifacts/tests/content_creator_test.py @@ -1,8 +1,8 @@ import filecmp from tempfile import mkdtemp -from demisto_sdk.commands.create_artifacts.content_creator import * from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.create_artifacts.content_creator import * class TestContentCreator: diff --git a/demisto_sdk/commands/create_id_set/tests/id_set_creator_test.py b/demisto_sdk/commands/create_id_set/tests/id_set_creator_test.py index c3cc433270..639ff47736 100644 --- a/demisto_sdk/commands/create_id_set/tests/id_set_creator_test.py +++ b/demisto_sdk/commands/create_id_set/tests/id_set_creator_test.py @@ -2,8 +2,8 @@ import shutil from tempfile import mkdtemp -from demisto_sdk.commands.create_id_set.create_id_set import IDSetCreator from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.create_id_set.create_id_set import IDSetCreator class TestIDSetCreator: diff --git a/demisto_sdk/commands/find_dependencies/find_dependencies.py b/demisto_sdk/commands/find_dependencies/find_dependencies.py index 9c2a45cc23..bd0a32d800 100644 --- a/demisto_sdk/commands/find_dependencies/find_dependencies.py +++ b/demisto_sdk/commands/find_dependencies/find_dependencies.py @@ -1,9 +1,10 @@ -import networkx as nx +import glob import json +import os import sys -import glob + import click -import os +import networkx as nx from demisto_sdk.commands.common import constants from demisto_sdk.commands.common.tools import print_error from demisto_sdk.commands.create_id_set.create_id_set import IDSetCreator diff --git a/demisto_sdk/commands/find_dependencies/tests/find_dependencies_test.py b/demisto_sdk/commands/find_dependencies/tests/find_dependencies_test.py index 4ab2b04a64..38b21fc093 100644 --- a/demisto_sdk/commands/find_dependencies/tests/find_dependencies_test.py +++ b/demisto_sdk/commands/find_dependencies/tests/find_dependencies_test.py @@ -1,8 +1,10 @@ -import pytest import json import os -from demisto_sdk.commands.find_dependencies.find_dependencies import PackDependencies + +import pytest from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.find_dependencies.find_dependencies import \ + PackDependencies @pytest.fixture(scope="module") diff --git a/demisto_sdk/commands/format/format_module.py b/demisto_sdk/commands/format/format_module.py index 8b0c559db7..ff56ef0513 100644 --- a/demisto_sdk/commands/format/format_module.py +++ b/demisto_sdk/commands/format/format_module.py @@ -1,18 +1,25 @@ +import os from typing import List, Tuple + from demisto_sdk.commands.common.git_tools import get_changed_files -import os -from demisto_sdk.commands.common.tools import print_error, find_type, get_files_in_dir, print_success, print_warning -from demisto_sdk.commands.format.update_playbook import PlaybookYMLFormat -from demisto_sdk.commands.format.update_script import ScriptYMLFormat +from demisto_sdk.commands.common.tools import (find_type, get_files_in_dir, + print_error, print_success, + print_warning) +from demisto_sdk.commands.format.format_constants import SCHEMAS_PATH +from demisto_sdk.commands.format.update_classifier import ClassifierJSONFormat +from demisto_sdk.commands.format.update_dashboard import DashboardJSONFormat +from demisto_sdk.commands.format.update_incidentfields import \ + IncidentFieldJSONFormat +from demisto_sdk.commands.format.update_incidenttype import \ + IncidentTypesJSONFormat +from demisto_sdk.commands.format.update_indicatorfields import \ + IndicatorFieldJSONFormat +from demisto_sdk.commands.format.update_indicatortype import \ + IndicatorTypeJSONFormat from demisto_sdk.commands.format.update_integration import IntegrationYMLFormat -from demisto_sdk.commands.format.update_incidentfields import IncidentFieldJSONFormat -from demisto_sdk.commands.format.update_incidenttype import IncidentTypesJSONFormat -from demisto_sdk.commands.format.update_indicatorfields import IndicatorFieldJSONFormat -from demisto_sdk.commands.format.update_indicatortype import IndicatorTypeJSONFormat from demisto_sdk.commands.format.update_layout import LayoutJSONFormat -from demisto_sdk.commands.format.update_dashboard import DashboardJSONFormat -from demisto_sdk.commands.format.update_classifier import ClassifierJSONFormat -from demisto_sdk.commands.format.format_constants import SCHEMAS_PATH +from demisto_sdk.commands.format.update_playbook import PlaybookYMLFormat +from demisto_sdk.commands.format.update_script import ScriptYMLFormat FILE_TYPE_AND_LINKED_CLASS = { 'integration': IntegrationYMLFormat, diff --git a/demisto_sdk/commands/format/tests/test_formatting_json_test.py b/demisto_sdk/commands/format/tests/test_formatting_json_test.py index 7ee03212fd..58a85f86e7 100644 --- a/demisto_sdk/commands/format/tests/test_formatting_json_test.py +++ b/demisto_sdk/commands/format/tests/test_formatting_json_test.py @@ -1,15 +1,19 @@ import os -import pytest import shutil -from demisto_sdk.tests.constants_test import SOURCE_FORMAT_INCIDENTFIELD_COPY, DESTINATION_FORMAT_INCIDENTFIELD_COPY, \ - SOURCE_FORMAT_INCIDENTTYPE_COPY, DESTINATION_FORMAT_INCIDENTTYPE_COPY, SOURCE_FORMAT_INDICATORFIELD_COPY, \ - DESTINATION_FORMAT_INDICATORFIELD_COPY, SOURCE_FORMAT_INDICATORTYPE_COPY, DESTINATION_FORMAT_INDICATORTYPE_COPY, \ - SOURCE_FORMAT_LAYOUT_COPY, DESTINATION_FORMAT_LAYOUT_COPY, SOURCE_FORMAT_DASHBOARD_COPY, \ - DESTINATION_FORMAT_DASHBOARD_COPY, INCIDENTFIELD_PATH, DASHBOARD_PATH, LAYOUT_PATH, INCIDENTTYPE_PATH,\ - INDICATORFIELD_PATH, INDICATORTYPE_PATH - +import pytest from demisto_sdk.commands.format.format_module import format_manager +from demisto_sdk.tests.constants_test import ( + DASHBOARD_PATH, DESTINATION_FORMAT_DASHBOARD_COPY, + DESTINATION_FORMAT_INCIDENTFIELD_COPY, + DESTINATION_FORMAT_INCIDENTTYPE_COPY, + DESTINATION_FORMAT_INDICATORFIELD_COPY, + DESTINATION_FORMAT_INDICATORTYPE_COPY, DESTINATION_FORMAT_LAYOUT_COPY, + INCIDENTFIELD_PATH, INCIDENTTYPE_PATH, INDICATORFIELD_PATH, + INDICATORTYPE_PATH, LAYOUT_PATH, SOURCE_FORMAT_DASHBOARD_COPY, + SOURCE_FORMAT_INCIDENTFIELD_COPY, SOURCE_FORMAT_INCIDENTTYPE_COPY, + SOURCE_FORMAT_INDICATORFIELD_COPY, SOURCE_FORMAT_INDICATORTYPE_COPY, + SOURCE_FORMAT_LAYOUT_COPY) class TestFormattingJson: diff --git a/demisto_sdk/commands/format/tests/test_formatting_yml_test.py b/demisto_sdk/commands/format/tests/test_formatting_yml_test.py index 0c5fd78b59..208bf67273 100644 --- a/demisto_sdk/commands/format/tests/test_formatting_yml_test.py +++ b/demisto_sdk/commands/format/tests/test_formatting_yml_test.py @@ -1,13 +1,18 @@ import os -import pytest - -from demisto_sdk.tests.constants_test import SOURCE_FORMAT_INTEGRATION_COPY, DESTINATION_FORMAT_INTEGRATION_COPY, \ - SOURCE_FORMAT_SCRIPT_COPY, DESTINATION_FORMAT_SCRIPT_COPY, SOURCE_FORMAT_PLAYBOOK_COPY, \ - DESTINATION_FORMAT_PLAYBOOK_COPY +import shutil -from demisto_sdk.commands.format.update_script import ScriptYMLFormat -from demisto_sdk.commands.format.update_playbook import PlaybookYMLFormat +import pytest +import yaml +from demisto_sdk.commands.format.format_module import format_manager from demisto_sdk.commands.format.update_integration import IntegrationYMLFormat +from demisto_sdk.commands.format.update_playbook import PlaybookYMLFormat +from demisto_sdk.commands.format.update_script import ScriptYMLFormat +from demisto_sdk.tests.constants_test import ( + DESTINATION_FORMAT_INTEGRATION_COPY, DESTINATION_FORMAT_PLAYBOOK_COPY, + DESTINATION_FORMAT_SCRIPT_COPY, EQUAL_VAL_FORMAT_PLAYBOOK_DESTINATION, + EQUAL_VAL_FORMAT_PLAYBOOK_SOURCE, EQUAL_VAL_PATH, GIT_ROOT, + SOURCE_FORMAT_INTEGRATION_COPY, SOURCE_FORMAT_PLAYBOOK_COPY, + SOURCE_FORMAT_SCRIPT_COPY) BASIC_YML_TEST_PACKS = [ (SOURCE_FORMAT_INTEGRATION_COPY, DESTINATION_FORMAT_INTEGRATION_COPY, IntegrationYMLFormat, 'New Integration_copy', @@ -116,3 +121,43 @@ def test_playbook_sourceplaybookid(source_path): base_yml.delete_sourceplaybookid() assert 'sourceplaybookid' not in base_yml.data + + +EQUAL_TEST = [ + (EQUAL_VAL_FORMAT_PLAYBOOK_SOURCE, EQUAL_VAL_FORMAT_PLAYBOOK_DESTINATION, EQUAL_VAL_PATH), +] + + +@pytest.mark.parametrize('input, output, path', EQUAL_TEST) +def test_eqaul_value_in_file(input, output, path): + os.mkdir(path) + shutil.copyfile(input, output) + format = format_manager(input=output) + check = True + with open(output, 'r') as f: + if 'simple: =' in f: + check = False + os.remove(output) + os.rmdir(path) + assert check + assert not format + + +@pytest.mark.parametrize('yml_file, yml_type', [ + ('format_pwsh_script.yml', 'script'), + ('format_pwsh_integration.yml', 'integration') +]) +def test_pwsh_format(tmpdir, yml_file, yml_type): + schema_path = os.path.normpath( + os.path.join(__file__, "..", "..", "..", "common", "schemas", '{}.yml'.format(yml_type))) + dest = str(tmpdir.join('pwsh_format_res.yml')) + src_file = f'{GIT_ROOT}/demisto_sdk/tests/test_files/{yml_file}' + if yml_type == 'script': + format_obj = ScriptYMLFormat(src_file, output=dest, path=schema_path) + else: + format_obj = IntegrationYMLFormat(src_file, output=dest, path=schema_path) + assert format_obj.run_format() == 0 + with open(dest) as f: + data = yaml.safe_load(f) + assert data['fromversion'] == '5.5.0' + assert data['commonfields']['version'] == -1 diff --git a/demisto_sdk/commands/format/update_classifier.py b/demisto_sdk/commands/format/update_classifier.py index 0ad57f52b4..62e524aa19 100644 --- a/demisto_sdk/commands/format/update_classifier.py +++ b/demisto_sdk/commands/format/update_classifier.py @@ -1,7 +1,9 @@ from typing import Tuple +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE class ClassifierJSONFormat(BaseUpdateJSON): diff --git a/demisto_sdk/commands/format/update_dashboard.py b/demisto_sdk/commands/format/update_dashboard.py index 601b014c8a..d633fbe298 100644 --- a/demisto_sdk/commands/format/update_dashboard.py +++ b/demisto_sdk/commands/format/update_dashboard.py @@ -1,8 +1,11 @@ from typing import Tuple -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE +from demisto_sdk.commands.common.hook_validations.dashboard import \ + DashboardValidator +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON -from demisto_sdk.commands.common.hook_validations.dashboard import DashboardValidator class DashboardJSONFormat(BaseUpdateJSON): diff --git a/demisto_sdk/commands/format/update_generic.py b/demisto_sdk/commands/format/update_generic.py index 0250cd920f..a156b2133f 100644 --- a/demisto_sdk/commands/format/update_generic.py +++ b/demisto_sdk/commands/format/update_generic.py @@ -1,11 +1,18 @@ -from typing import Union, List -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS, get_dict_from_file, get_remote_file, is_file_from_content_repo import os +from typing import List, Union + import yaml +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator +from demisto_sdk.commands.common.tools import (LOG_COLORS, get_dict_from_file, + get_remote_file, + is_file_from_content_repo, + print_color) +from demisto_sdk.commands.format.format_constants import ( + DEFAULT_VERSION, ERROR_RETURN_CODE, NEW_FILE_DEFAULT_5_FROMVERSION, + OLD_FILE_DEFAULT_1_FROMVERSION, SKIP_RETURN_CODE, SUCCESS_RETURN_CODE) from ruamel.yaml import YAML -from demisto_sdk.commands.format.format_constants import NEW_FILE_DEFAULT_5_FROMVERSION, OLD_FILE_DEFAULT_1_FROMVERSION,\ - ERROR_RETURN_CODE, SKIP_RETURN_CODE, SUCCESS_RETURN_CODE, DEFAULT_VERSION + ryaml = YAML() ryaml.allow_duplicate_keys = True diff --git a/demisto_sdk/commands/format/update_generic_json.py b/demisto_sdk/commands/format/update_generic_json.py index 3c1ffb48f6..e0d7fc3649 100644 --- a/demisto_sdk/commands/format/update_generic_json.py +++ b/demisto_sdk/commands/format/update_generic_json.py @@ -1,7 +1,9 @@ import json -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS + +from demisto_sdk.commands.common.tools import LOG_COLORS, print_color +from demisto_sdk.commands.format.format_constants import \ + ARGUMENTS_DEFAULT_VALUES from demisto_sdk.commands.format.update_generic import BaseUpdate -from demisto_sdk.commands.format.format_constants import ARGUMENTS_DEFAULT_VALUES class BaseUpdateJSON(BaseUpdate): diff --git a/demisto_sdk/commands/format/update_generic_yml.py b/demisto_sdk/commands/format/update_generic_yml.py index 0983171b27..ef46dc10e8 100644 --- a/demisto_sdk/commands/format/update_generic_yml.py +++ b/demisto_sdk/commands/format/update_generic_yml.py @@ -1,8 +1,8 @@ -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS import yaml import yamlordereddictloader -from ruamel.yaml import YAML +from demisto_sdk.commands.common.tools import LOG_COLORS, print_color from demisto_sdk.commands.format.update_generic import BaseUpdate +from ruamel.yaml import YAML ryaml = YAML() ryaml.allow_duplicate_keys = True diff --git a/demisto_sdk/commands/format/update_incidentfields.py b/demisto_sdk/commands/format/update_incidentfields.py index fe9036b407..167947d521 100644 --- a/demisto_sdk/commands/format/update_incidentfields.py +++ b/demisto_sdk/commands/format/update_incidentfields.py @@ -1,8 +1,11 @@ from typing import Tuple +from demisto_sdk.commands.common.hook_validations.incident_field import \ + IncidentFieldValidator +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON -from demisto_sdk.commands.common.hook_validations.incident_field import IncidentFieldValidator -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE class IncidentFieldJSONFormat(BaseUpdateJSON): diff --git a/demisto_sdk/commands/format/update_incidenttype.py b/demisto_sdk/commands/format/update_incidenttype.py index 151bc59f16..d738d5cb9e 100644 --- a/demisto_sdk/commands/format/update_incidenttype.py +++ b/demisto_sdk/commands/format/update_incidenttype.py @@ -1,8 +1,11 @@ from typing import Tuple +from demisto_sdk.commands.common.hook_validations.incident_type import \ + IncidentTypeValidator +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON -from demisto_sdk.commands.common.hook_validations.incident_type import IncidentTypeValidator -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE class IncidentTypesJSONFormat(BaseUpdateJSON): diff --git a/demisto_sdk/commands/format/update_indicatorfields.py b/demisto_sdk/commands/format/update_indicatorfields.py index 627b636a95..65b39e79b0 100644 --- a/demisto_sdk/commands/format/update_indicatorfields.py +++ b/demisto_sdk/commands/format/update_indicatorfields.py @@ -1,8 +1,11 @@ from typing import Tuple +from demisto_sdk.commands.common.hook_validations.incident_field import \ + IncidentFieldValidator +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON -from demisto_sdk.commands.common.hook_validations.incident_field import IncidentFieldValidator -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE class IndicatorFieldJSONFormat(BaseUpdateJSON): diff --git a/demisto_sdk/commands/format/update_indicatortype.py b/demisto_sdk/commands/format/update_indicatortype.py index 56850aa9db..543948bed2 100644 --- a/demisto_sdk/commands/format/update_indicatortype.py +++ b/demisto_sdk/commands/format/update_indicatortype.py @@ -1,8 +1,11 @@ from typing import Tuple -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE +from demisto_sdk.commands.common.hook_validations.reputation import \ + ReputationValidator +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON -from demisto_sdk.commands.common.hook_validations.reputation import ReputationValidator class IndicatorTypeJSONFormat(BaseUpdateJSON): diff --git a/demisto_sdk/commands/format/update_integration.py b/demisto_sdk/commands/format/update_integration.py index 1026867a75..d9ebbc5171 100644 --- a/demisto_sdk/commands/format/update_integration.py +++ b/demisto_sdk/commands/format/update_integration.py @@ -1,8 +1,12 @@ from typing import List, Tuple -from demisto_sdk.commands.common.constants import BANG_COMMAND_NAMES -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE + +from demisto_sdk.commands.common.constants import BANG_COMMAND_NAMES, TYPE_PWSH +from demisto_sdk.commands.common.hook_validations.integration import \ + IntegrationValidator +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_yml import BaseUpdateYML -from demisto_sdk.commands.common.hook_validations.integration import IntegrationValidator class IntegrationYMLFormat(BaseUpdateYML): @@ -20,6 +24,8 @@ class IntegrationYMLFormat(BaseUpdateYML): def __init__(self, input: str = '', output: str = '', path: str = '', from_version: str = '', no_validate: bool = False): super().__init__(input, output, path, from_version, no_validate) + if not from_version and self.data.get("script", {}).get("type") == TYPE_PWSH: + self.from_version = '5.5.0' def update_proxy_insecure_param_to_default(self): """Updates important integration arguments names and description.""" diff --git a/demisto_sdk/commands/format/update_layout.py b/demisto_sdk/commands/format/update_layout.py index c2a18a3819..10564617a6 100644 --- a/demisto_sdk/commands/format/update_layout.py +++ b/demisto_sdk/commands/format/update_layout.py @@ -1,11 +1,12 @@ from typing import Tuple -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS -from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON -from demisto_sdk.commands.common.hook_validations.layout import LayoutValidator import yaml -from demisto_sdk.commands.format.format_constants import DEFAULT_VERSION, NEW_FILE_DEFAULT_5_FROMVERSION,\ - SUCCESS_RETURN_CODE, ERROR_RETURN_CODE, SKIP_RETURN_CODE +from demisto_sdk.commands.common.hook_validations.layout import LayoutValidator +from demisto_sdk.commands.common.tools import LOG_COLORS, print_color +from demisto_sdk.commands.format.format_constants import ( + DEFAULT_VERSION, ERROR_RETURN_CODE, NEW_FILE_DEFAULT_5_FROMVERSION, + SKIP_RETURN_CODE, SUCCESS_RETURN_CODE) +from demisto_sdk.commands.format.update_generic_json import BaseUpdateJSON class LayoutJSONFormat(BaseUpdateJSON): diff --git a/demisto_sdk/commands/format/update_playbook.py b/demisto_sdk/commands/format/update_playbook.py index cce661d4cf..274056b4a7 100644 --- a/demisto_sdk/commands/format/update_playbook.py +++ b/demisto_sdk/commands/format/update_playbook.py @@ -1,10 +1,14 @@ import re from typing import Tuple +from demisto_sdk.commands.common.hook_validations.playbook import \ + PlaybookValidator +from demisto_sdk.commands.common.tools import (LOG_COLORS, print_color, + print_error) +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) from demisto_sdk.commands.format.update_generic_yml import BaseUpdateYML -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS, print_error -from demisto_sdk.commands.common.hook_validations.playbook import PlaybookValidator -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE class PlaybookYMLFormat(BaseUpdateYML): diff --git a/demisto_sdk/commands/format/update_script.py b/demisto_sdk/commands/format/update_script.py index cb10bbf9d4..5392cbb9ea 100644 --- a/demisto_sdk/commands/format/update_script.py +++ b/demisto_sdk/commands/format/update_script.py @@ -1,8 +1,11 @@ from typing import Tuple -from demisto_sdk.commands.format.format_constants import SKIP_RETURN_CODE, ERROR_RETURN_CODE, SUCCESS_RETURN_CODE -from demisto_sdk.commands.format.update_generic_yml import BaseUpdateYML +from demisto_sdk.commands.common.constants import TYPE_PWSH from demisto_sdk.commands.common.hook_validations.script import ScriptValidator +from demisto_sdk.commands.format.format_constants import (ERROR_RETURN_CODE, + SKIP_RETURN_CODE, + SUCCESS_RETURN_CODE) +from demisto_sdk.commands.format.update_generic_yml import BaseUpdateYML class ScriptYMLFormat(BaseUpdateYML): @@ -15,6 +18,8 @@ class ScriptYMLFormat(BaseUpdateYML): def __init__(self, input: str = '', output: str = '', path: str = '', from_version: str = '', no_validate: bool = False): super().__init__(input, output, path, from_version, no_validate) + if not from_version and self.data.get("type") == TYPE_PWSH: + self.from_version = '5.5.0' def run_format(self) -> int: try: diff --git a/demisto_sdk/commands/generate_docs/common.py b/demisto_sdk/commands/generate_docs/common.py index d6478013dd..43e4015e65 100644 --- a/demisto_sdk/commands/generate_docs/common.py +++ b/demisto_sdk/commands/generate_docs/common.py @@ -1,8 +1,9 @@ +import html import json import os.path import re -import html -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS + +from demisto_sdk.commands.common.tools import LOG_COLORS, print_color from demisto_sdk.commands.run_cmd.runner import Runner STRING_TYPES = (str, bytes) # type: ignore diff --git a/demisto_sdk/commands/generate_docs/generate_integration_doc.py b/demisto_sdk/commands/generate_docs/generate_integration_doc.py index 693a2841ca..5ad9f3a1b8 100644 --- a/demisto_sdk/commands/generate_docs/generate_integration_doc.py +++ b/demisto_sdk/commands/generate_docs/generate_integration_doc.py @@ -1,18 +1,77 @@ import os.path -from demisto_sdk.commands.common.tools import get_yaml, print_warning, print_error -from demisto_sdk.commands.generate_docs.common import build_example_dict, add_lines, generate_section, \ - save_output, generate_table_section, stringEscapeMD, generate_numbered_section +import re +from typing import Optional, Tuple +from demisto_sdk.commands.common.constants import DOCS_COMMAND_SECTION_REGEX +from demisto_sdk.commands.common.tools import (LOG_COLORS, get_yaml, + print_color, print_error, + print_warning) +from demisto_sdk.commands.generate_docs.common import ( + add_lines, build_example_dict, generate_numbered_section, generate_section, + generate_table_section, save_output, stringEscapeMD) -def generate_integration_doc(input, examples, output: str = None, use_cases: str = None, - permissions: str = None, command_permissions: str = None, - limitations: str = None, insecure: bool = False, verbose: bool = False): + +def append_or_replace_command_in_docs(old_docs: str, new_doc_section: str, command_name: str) -> Tuple[str, list]: + """ Replacing a command in a README.md file with a new string. + + Args: + old_docs: the old docs string + new_doc_section: the new string to replace + command_name: the command name itself + + Returns: + str: The whole documentation. + """ + regexp = DOCS_COMMAND_SECTION_REGEX.format(command_name) + # Read doc content + errs = list() + if re.findall(regexp, old_docs, flags=re.DOTALL): + new_docs = re.sub(regexp, new_doc_section, old_docs, flags=re.DOTALL) + print_color('New command docs has been replaced in README.md.', LOG_COLORS.GREEN) + else: + if command_name in old_docs: + errs.append(f'Could not replace the command `{command_name}` in the file although it' + f' is presented in the file.' + 'Copy and paste it in the appropriate spot.') + if old_docs.endswith('\n'): + # Remove trailing '\n' + old_docs = old_docs[:-1] + new_docs = f'{old_docs}\n{new_doc_section}' + print_color('New command docs has been added to the README.md.', LOG_COLORS.GREEN) + return new_docs, errs + + +def generate_integration_doc( + input: str, + examples: Optional[str] = None, + output: Optional[str] = None, + use_cases: Optional[str] = None, + permissions: Optional[str] = None, + command_permissions: Optional[str] = None, + limitations: Optional[str] = None, + insecure: bool = False, + verbose: bool = False, + command: Optional[str] = None): + """ Generate integration documentation. + + Args: + input: path to the yaml integration + examples: path to the command examples + output: path to the output documentation + use_cases: use cases string + permissions: global permissions for the docs + command_permissions: permissions per command + limitations: limitations description + insecure: should use insecure + verbose: verbose (debug mode) + command: specific command to generate docs for + + """ try: yml_data = get_yaml(input) if not output: # default output dir will be the dir of the input file output = os.path.dirname(os.path.realpath(input)) - errors = [] example_dict = {} if examples and os.path.isfile(examples): @@ -34,30 +93,45 @@ def generate_integration_doc(input, examples, output: str = None, use_cases: str errors.append(f'Command permissions was not found {command_permissions}.') else: # permissions in ['none', 'general'] command_permissions_dict = None - - docs = [] # type: list - docs.extend(add_lines(yml_data.get('description'))) - docs.extend(['This integration was integrated and tested with version xx of {}'.format(yml_data['name'])]) - - # Integration use cases - if use_cases: - docs.extend(generate_numbered_section('Use Cases', use_cases)) - # Integration general permissions - if permissions == 'general': - docs.extend(generate_section('Permissions', '')) - # Setup integration to work with Demisto - docs.extend(generate_section('Configure {} on Demisto'.format(yml_data['name']), '')) - # Setup integration on Demisto - docs.extend(generate_setup_section(yml_data)) - # Commands - command_section, command_errors = generate_commands_section(yml_data, example_dict, command_permissions_dict) - docs.extend(command_section) - errors.extend(command_errors) - # Known limitations - if limitations: - docs.extend(generate_numbered_section('Known Limitations', limitations)) - - doc_text = '\n'.join(docs) + if command: + specific_commands = command.split(',') + readme_path = os.path.join(output, 'README.md') + with open(readme_path) as f: + doc_text = f.read() + for specific_command in specific_commands: + print(f'Generating docs for command `{command}`') + command_section, command_errors = generate_commands_section( + yml_data, example_dict, + command_permissions_dict, command=specific_command + ) + command_section_str = '\n'.join(command_section) + doc_text, err = append_or_replace_command_in_docs(doc_text, command_section_str, specific_command) + errors.extend(err) + else: + docs = [] # type: list + docs.extend(add_lines(yml_data.get('description'))) + docs.extend(['This integration was integrated and tested with version xx of {}'.format(yml_data['name'])]) + + # Integration use cases + if use_cases: + docs.extend(generate_numbered_section('Use Cases', use_cases)) + # Integration general permissions + if permissions == 'general': + docs.extend(generate_section('Permissions', '')) + # Setup integration to work with Demisto + docs.extend(generate_section('Configure {} on Demisto'.format(yml_data['name']), '')) + # Setup integration on Demisto + docs.extend(generate_setup_section(yml_data)) + # Commands + command_section, command_errors = generate_commands_section(yml_data, example_dict, + command_permissions_dict, command=command) + docs.extend(command_section) + errors.extend(command_errors) + # Known limitations + if limitations: + docs.extend(generate_numbered_section('Known Limitations', limitations)) + + doc_text = '\n'.join(docs) save_output(output, 'README.md', doc_text) @@ -96,7 +170,23 @@ def generate_setup_section(yaml_data: dict): # Commands -def generate_commands_section(yaml_data: dict, example_dict: dict, command_permissions_dict): +def generate_commands_section( + yaml_data: dict, + example_dict: dict, + command_permissions_dict: dict, + command: Optional[str] = None +) -> Tuple[list, list]: + """Generate the commands section the the README.md file. + + Arguments: + yaml_data (dict): The data of the .yml file (integration or script) + example_dict (dict): Examples of running commands. + command_permissions_dict (dict): Permission needed per command + command (Optional[str]): A specific command to run on. will return the command itself without the section header. + + Returns: + [str, str] -- [commands section, errors] + """ errors = [] # type: list section = [ '## Commands', @@ -105,8 +195,16 @@ def generate_commands_section(yaml_data: dict, example_dict: dict, command_permi ] commands = filter(lambda cmd: not cmd.get('deprecated', False), yaml_data['script']['commands']) command_sections = [] - - for i, cmd in enumerate(commands): + if command: + # for specific command, return it only. + try: + command_dict = list(filter(lambda cmd: cmd['name'] == command, commands))[0] + except IndexError: + err = f'Could not find the command `{command}` in the .yml file.' + print_error(err) + raise IndexError(err) + return generate_single_command_section(command_dict, example_dict, command_permissions_dict) + for cmd in commands: cmd_section, cmd_errors = generate_single_command_section(cmd, example_dict, command_permissions_dict) command_sections.extend(cmd_section) errors.extend(cmd_errors) @@ -240,8 +338,7 @@ def get_command_examples(commands_file_path): print('failed to open command file') commands = commands_file_path.split('\n') - commands = map(command_example_filter, commands) - commands = list(filter(None, commands)) + commands = list(filter(None, map(command_example_filter, commands))) print('found the following commands:\n{}'.format('\n '.join(commands))) return commands diff --git a/demisto_sdk/commands/generate_docs/generate_playbook_doc.py b/demisto_sdk/commands/generate_docs/generate_playbook_doc.py index 5d019d8771..26cb18b305 100644 --- a/demisto_sdk/commands/generate_docs/generate_playbook_doc.py +++ b/demisto_sdk/commands/generate_docs/generate_playbook_doc.py @@ -1,8 +1,10 @@ import os -from demisto_sdk.commands.common.tools import get_yaml, print_warning, print_error -from demisto_sdk.commands.generate_docs.common import save_output, generate_table_section, stringEscapeMD, \ - generate_list_section, HEADER_TYPE, generate_section, generate_numbered_section +from demisto_sdk.commands.common.tools import (get_yaml, print_error, + print_warning) +from demisto_sdk.commands.generate_docs.common import ( + HEADER_TYPE, generate_list_section, generate_numbered_section, + generate_section, generate_table_section, save_output, stringEscapeMD) def generate_playbook_doc(input, output: str = None, permissions: str = None, limitations: str = None, diff --git a/demisto_sdk/commands/generate_docs/generate_script_doc.py b/demisto_sdk/commands/generate_docs/generate_script_doc.py index 88d2a0404c..b706c1bfb3 100644 --- a/demisto_sdk/commands/generate_docs/generate_script_doc.py +++ b/demisto_sdk/commands/generate_docs/generate_script_doc.py @@ -1,10 +1,12 @@ import os + +from demisto_sdk.commands.common.tools import (get_from_version, get_yaml, + print_error, print_warning) from demisto_sdk.commands.common.update_id_set import get_depends_on -from demisto_sdk.commands.common.tools import get_yaml, print_warning, print_error,\ - get_from_version -from demisto_sdk.commands.generate_docs.common import save_output, generate_table_section, stringEscapeMD, \ - generate_list_section, build_example_dict, generate_section, generate_numbered_section from demisto_sdk.commands.create_id_set.create_id_set import IDSetCreator +from demisto_sdk.commands.generate_docs.common import ( + build_example_dict, generate_list_section, generate_numbered_section, + generate_section, generate_table_section, save_output, stringEscapeMD) def generate_script_doc(input, examples, output: str = None, permissions: str = None, diff --git a/demisto_sdk/commands/generate_docs/tests/generate_docs_test.py b/demisto_sdk/commands/generate_docs/tests/generate_docs_test.py index c7a9eed7c5..421a1224a4 100644 --- a/demisto_sdk/commands/generate_docs/tests/generate_docs_test.py +++ b/demisto_sdk/commands/generate_docs/tests/generate_docs_test.py @@ -1,11 +1,16 @@ import os -from demisto_sdk.commands.common.tools import get_yaml, get_json + +import pytest from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.common.tools import get_json, get_yaml +from demisto_sdk.commands.generate_docs.generate_integration_doc import ( + append_or_replace_command_in_docs, generate_integration_doc) -FILES_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files')) +FILES_PATH = os.path.normpath(os.path.join(__file__, git_path(), 'demisto_sdk', 'tests', 'test_files')) FAKE_ID_SET = get_json(os.path.join(FILES_PATH, 'fake_id_set.json')) TEST_PLAYBOOK_PATH = os.path.join(FILES_PATH, 'playbook-Test_playbook.yml') TEST_SCRIPT_PATH = os.path.join(FILES_PATH, 'script-test_script.yml') +TEST_INTEGRATION_PATH = os.path.join(FILES_PATH, 'fake_integration/fake_integration.yml') # common tests @@ -247,7 +252,7 @@ def test_generate_commands_with_permissions_section(): } section, errors = generate_commands_section(yml_data, example_dict={}, command_permissions_dict={ - 'non-deprecated-cmd': 'SUPERUSER'}) + 'non-deprecated-cmd': 'SUPERUSER'}) expected_section = [ '## Commands', @@ -260,3 +265,44 @@ def test_generate_commands_with_permissions_section(): '##### Human Readable Output', '', ''] assert '\n'.join(section) == '\n'.join(expected_section) + + +class TestAppendOrReplaceCommandInDocs: + positive_test_data_file = os.path.join(FILES_PATH, 'docs_test', 'positive_docs_section_end_with_eof.md') + command = 'dxl-send-event' + old_doc = open(positive_test_data_file).read() + new_docs = "\n\n" + positive_inputs = [ + (old_doc, new_docs), + (old_doc + "\n## Known Limitation", new_docs + "\n## Known Limitation"), + (old_doc + "\n### new-command", new_docs + "\n### new-command"), + ("no docs (empty)\n", "no docs (empty)\n" + new_docs), + (f"Command in file, but cant replace. {command}", f"Command in file, but cant replace. {command}\n" + new_docs) + ] + + @pytest.mark.parametrize('doc_file, output_docs', positive_inputs) + def test_append_or_replace_command_in_docs_positive(self, doc_file, output_docs): + docs, _ = append_or_replace_command_in_docs(doc_file, self.new_docs, self.command) + assert docs == output_docs + + +class TestGenerateIntegrationDoc: + @classmethod + def rm_readme(cls): + test_integration_readme = os.path.join(os.path.dirname(TEST_INTEGRATION_PATH), 'README.md') + if os.path.isfile(test_integration_readme): + os.remove(test_integration_readme) + + @classmethod + def setup_class(cls): + cls.rm_readme() + + @classmethod + def teardown_class(cls): + cls.rm_readme() + + def test_generate_integration_doc(self): + fake_readme = os.path.join(os.path.dirname(TEST_INTEGRATION_PATH), 'fake_README.md') + # Generate doc + generate_integration_doc(TEST_INTEGRATION_PATH) + assert open(fake_readme).read() == open(os.path.join(os.path.dirname(TEST_INTEGRATION_PATH), 'README.md')).read() diff --git a/demisto_sdk/commands/generate_test_playbook/test_playbook_generator.py b/demisto_sdk/commands/generate_test_playbook/test_playbook_generator.py index 28c5d5c420..bf02c56061 100644 --- a/demisto_sdk/commands/generate_test_playbook/test_playbook_generator.py +++ b/demisto_sdk/commands/generate_test_playbook/test_playbook_generator.py @@ -1,9 +1,10 @@ -import os import json +import os from typing import Dict -from ruamel.yaml import YAML -from demisto_sdk.commands.common.tools import print_error, print_color, LOG_COLORS +from demisto_sdk.commands.common.tools import (LOG_COLORS, print_color, + print_error) +from ruamel.yaml import YAML class ContentItemType: diff --git a/demisto_sdk/commands/generate_test_playbook/tests/test_playbook_generator_test.py b/demisto_sdk/commands/generate_test_playbook/tests/test_playbook_generator_test.py index 36f85049fd..d1e8905660 100644 --- a/demisto_sdk/commands/generate_test_playbook/tests/test_playbook_generator_test.py +++ b/demisto_sdk/commands/generate_test_playbook/tests/test_playbook_generator_test.py @@ -1,9 +1,10 @@ -import os import io +import os import shutil -from demisto_sdk.commands.generate_test_playbook.test_playbook_generator import PlaybookTestsGenerator from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.generate_test_playbook.test_playbook_generator import \ + PlaybookTestsGenerator def load_file_from_test_dir(filename): diff --git a/demisto_sdk/commands/init/initiator.py b/demisto_sdk/commands/init/initiator.py index 04ee616dad..3330484413 100644 --- a/demisto_sdk/commands/init/initiator.py +++ b/demisto_sdk/commands/init/initiator.py @@ -1,18 +1,32 @@ +import json import os import shutil -import yaml -import yamlordereddictloader -import json from datetime import datetime -from typing import Dict from distutils.dir_util import copy_tree +from typing import Dict, List +import yaml +import yamlordereddictloader from demisto_sdk.commands.common.configuration import Configuration -from demisto_sdk.commands.common.tools import print_error, print_color, LOG_COLORS, get_common_server_path, print_v -from demisto_sdk.commands.common.constants import INTEGRATIONS_DIR, SCRIPTS_DIR, INCIDENT_FIELDS_DIR, \ - INCIDENT_TYPES_DIR, INDICATOR_FIELDS_DIR, PLAYBOOKS_DIR, LAYOUTS_DIR, TEST_PLAYBOOKS_DIR, CLASSIFIERS_DIR, \ - CONNECTIONS_DIR, DASHBOARDS_DIR, MISC_DIR, REPORTS_DIR, WIDGETS_DIR, PACK_INITIAL_VERSION, INTEGRATION_CATEGORIES, \ - PACK_SUPPORT_OPTIONS +from demisto_sdk.commands.common.constants import (CLASSIFIERS_DIR, + CONNECTIONS_DIR, + DASHBOARDS_DIR, + INCIDENT_FIELDS_DIR, + INCIDENT_TYPES_DIR, + INDICATOR_FIELDS_DIR, + INTEGRATION_CATEGORIES, + INTEGRATIONS_DIR, + LAYOUTS_DIR, MISC_DIR, + PACK_INITIAL_VERSION, + PACK_SUPPORT_OPTIONS, + PLAYBOOKS_DIR, REPORTS_DIR, + SCRIPTS_DIR, + TEST_PLAYBOOKS_DIR, + WIDGETS_DIR) +from demisto_sdk.commands.common.tools import (LOG_COLORS, + get_common_server_path, + print_color, print_error, + print_v) class Initiator: @@ -166,7 +180,15 @@ def pack_init(self) -> bool: return True @staticmethod - def create_metadata(fill_manually): + def create_metadata(fill_manually: bool) -> Dict: + """Builds pack metadata JSON content. + + Args: + fill_manually (bool): Whether to interact with the user to fill in metadata details or not. + + Returns: + Dict. Pack metadata JSON content. + """ metadata = { 'name': '## FILL OUT MANUALLY ##', 'description': '## FILL OUT MANUALLY ##', @@ -217,7 +239,16 @@ def create_metadata(fill_manually): return metadata @staticmethod - def get_valid_user_input(options_list, option_message): + def get_valid_user_input(options_list: List[str], option_message: str) -> str: + """Gets user input from a list of options, by integer represents the choice. + + Args: + options_list (List[str]): List of options for the user to choose from. + option_message (str): The message to show the user along with the list of options. + + Returns: + str. The chosen option. + """ for index, option in enumerate(options_list, start=1): option_message += f"[{index}] {option}\n" option_message += "\nEnter option: " diff --git a/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld.py b/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld.py index 287a8ea483..7da14da36e 100644 --- a/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld.py +++ b/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld.py @@ -1,11 +1,13 @@ +import json + +import dateparser import demistomock as demisto +import requests from CommonServerPython import * # noqa: E402 lgtm [py/polluting-import] from CommonServerUserPython import * # noqa: E402 lgtm [py/polluting-import] + # IMPORTS -import json -import requests -import dateparser # Disable insecure warnings requests.packages.urllib3.disable_warnings() diff --git a/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld_test.py b/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld_test.py index 62d5f22eff..606d5040b6 100644 --- a/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld_test.py +++ b/demisto_sdk/commands/init/templates/HelloWorld/HelloWorld_test.py @@ -1,5 +1,4 @@ -from .HelloWorld import Client, say_hello_command,\ - say_hello_over_http_command +from .HelloWorld import Client, say_hello_command, say_hello_over_http_command def test_say_hello(): diff --git a/demisto_sdk/commands/init/tests/initiator_test.py b/demisto_sdk/commands/init/tests/initiator_test.py new file mode 100644 index 0000000000..0b14f31f4f --- /dev/null +++ b/demisto_sdk/commands/init/tests/initiator_test.py @@ -0,0 +1,145 @@ +import os +from collections import deque +from datetime import datetime +from typing import Callable + +import pytest +from demisto_sdk.commands.common.constants import (INTEGRATION_CATEGORIES, + PACK_INITIAL_VERSION, + PACK_SUPPORT_OPTIONS) +from demisto_sdk.commands.init.initiator import Initiator + +DIR_NAME = 'DirName' +PACK_NAME = 'PackName' +PACK_DESC = 'PackDesc' +PACK_SERVER_MIN_VERSION = '5.5.0' +PACK_AUTHOR = 'PackAuthor' +PACK_URL = 'https://www.github.com/pack' +PACK_EMAIL = 'author@mail.com' +PACK_TAGS = 'Tag1,Tag2' +PACK_PRICE = '0' + + +@pytest.fixture +def initiator(): + return Initiator('') + + +def generate_multiple_inputs(inputs: deque) -> Callable: + def next_input(_): + return inputs.popleft() + return next_input + + +def raise_file_exists_error(): + raise FileExistsError + + +def test_get_created_dir_name(monkeypatch, initiator): + monkeypatch.setattr('builtins.input', lambda _: DIR_NAME) + initiator.get_created_dir_name('integration') + assert initiator.dir_name == DIR_NAME + + +def test_get_object_id(monkeypatch, initiator): + initiator.dir_name = DIR_NAME + # test integration object with ID like dir name + monkeypatch.setattr('builtins.input', lambda _: 'Y') + initiator.get_object_id('integration') + assert initiator.id == DIR_NAME + + initiator.id = '' + # test pack object with ID like dir name + monkeypatch.setattr('builtins.input', lambda _: 'Y') + initiator.get_object_id('pack') + assert initiator.id == DIR_NAME + + initiator.id = '' + # test script object with ID different than dir name + monkeypatch.setattr('builtins.input', generate_multiple_inputs(deque(['N', 'SomeIntegrationID']))) + initiator.get_object_id('script') + assert initiator.id == 'SomeIntegrationID' + + +def test_create_metadata(monkeypatch, initiator): + # test create_metadata without user filling manually + pack_metadata = initiator.create_metadata(False) + assert pack_metadata == { + 'name': '## FILL OUT MANUALLY ##', + 'description': '## FILL OUT MANUALLY ##', + 'support': 'demisto', + 'serverMinVersion': '## FILL OUT MANUALLY #', + 'currentVersion': PACK_INITIAL_VERSION, + 'author': 'demisto', + 'url': 'https://www.demisto.com', + 'email': '', + 'categories': [], + 'tags': [], + 'created': datetime.utcnow().strftime(Initiator.DATE_FORMAT), + 'updated': datetime.utcnow().strftime(Initiator.DATE_FORMAT), + 'beta': False, + 'deprecated': False, + 'certification': 'certified', + 'useCases': [], + 'keywords': [], + 'price': '0', + 'dependencies': {}, + } + + # test create_metadata with user filling manually + monkeypatch.setattr( + 'builtins.input', + generate_multiple_inputs( + deque([ + PACK_NAME, PACK_DESC, '1', PACK_SERVER_MIN_VERSION, PACK_AUTHOR, + PACK_URL, PACK_EMAIL, '1', PACK_TAGS, PACK_PRICE + ]) + ) + ) + pack_metadata = initiator.create_metadata(True) + assert pack_metadata == { + 'author': PACK_AUTHOR, + 'beta': False, + 'categories': [INTEGRATION_CATEGORIES[0]], + 'certification': 'certified', + 'currentVersion': '1.0.0', + 'dependencies': {}, + 'deprecated': False, + 'description': PACK_DESC, + 'email': PACK_EMAIL, + 'keywords': [], + 'name': PACK_NAME, + 'price': PACK_PRICE, + 'serverMinVersion': PACK_SERVER_MIN_VERSION, + 'support': PACK_SUPPORT_OPTIONS[0], + 'tags': ['Tag1', 'Tag2'], + 'created': datetime.utcnow().strftime(Initiator.DATE_FORMAT), + 'updated': datetime.utcnow().strftime(Initiator.DATE_FORMAT), + 'url': PACK_URL, + 'useCases': [] + } + + +def test_get_valid_user_input(monkeypatch, initiator): + monkeypatch.setattr('builtins.input', generate_multiple_inputs(deque(['InvalidInput', '100', '1']))) + user_choice = initiator.get_valid_user_input(INTEGRATION_CATEGORIES, 'Choose category') + assert user_choice == INTEGRATION_CATEGORIES[0] + + +def test_create_new_directory(mocker, monkeypatch, initiator): + full_output_path = 'path' + initiator.full_output_path = full_output_path + + # create new dir successfully + mocker.patch.object(os, 'mkdir', return_value=None) + assert initiator.create_new_directory() + + mocker.patch.object(os, 'mkdir', side_effect=FileExistsError()) + # override dir successfully + monkeypatch.setattr('builtins.input', lambda _: 'Y') + with pytest.raises(FileExistsError): + assert initiator.create_new_directory() + + # fail to create pack cause of existing dir without overriding it + monkeypatch.setattr('builtins.input', lambda _: 'N') + assert initiator.create_new_directory() is False diff --git a/demisto_sdk/commands/json_to_outputs/json_to_outputs.py b/demisto_sdk/commands/json_to_outputs/json_to_outputs.py index 8f894bc325..adede009e9 100644 --- a/demisto_sdk/commands/json_to_outputs/json_to_outputs.py +++ b/demisto_sdk/commands/json_to_outputs/json_to_outputs.py @@ -71,12 +71,13 @@ description: '' type: String """ -import yaml import json import sys -import dateparser -from demisto_sdk.commands.common.tools import print_error, print_color, LOG_COLORS +import dateparser +import yaml +from demisto_sdk.commands.common.tools import (LOG_COLORS, print_color, + print_error) def input_multiline(): diff --git a/demisto_sdk/commands/lint/README.md b/demisto_sdk/commands/lint/README.md index 97b1370d08..76b8fb1a87 100644 --- a/demisto_sdk/commands/lint/README.md +++ b/demisto_sdk/commands/lint/README.md @@ -1,67 +1,66 @@ ### Lint -Run lintings (flake8, mypy, pylint, bandit) and pytest. -pylint and pytest will run within all the docker images of an integration/script. -Meant to be used with integrations/scripts that use the folder (package) structure. + Lint command will perform: -The appropriate docker images for the integration/script will be used to execute the pytest and pylint checks. + 1. Package in host checks - flake8, bandit, mypy, vulture. -**Use Cases** -This command is used to make sure the code stands up to the python standards, prevents bugs and runs unit tests to -make sure the code works as intended. + 2. Package in docker image checks - pylint, pytest, powershell - test, powershell - + analyze. -**Arguments**: -* **-d DIR, --dir DIR** - Specify directory of integration/script. Also supports several direcories as a CSV (default: None) -* **--no-pylint** - Do NOT run pylint linter (default: False) -* **--no-mypy** - Do NOT run mypy static type checking (default: False) -* **--no-flake8** - Do NOT run flake8 linter (default: False) -* **--no-bandit** - Do NOT run bandit linter (default: False) -* **--no-test** - Do NOT test (skip pytest) (default: False) -* **-r, --root** - Run pytest container with root user (default: False) -* **-p, --parallel** - Run tests in parallel (default: False) -* **-m, --max-workers** - The max workers to use in a parallel run (default: 10) -* **-g, --git** - Run only on packages that changes between the current branch and content repo's origin/master branch (default: False) -* **-a, --run-all-tests** - Run lint on all directories in content repo (default: False) -* **-k, --keep-container** - Keep the test container (default: False) -* **-v, --verbose** - Verbose output (default: False) -* **--outfile** Specify a file path to save failing package list. (default: None) -* **--cpu-num CPU_NUM** - Number of CPUs to run pytest on (can set to `auto` for automatic detection of the number of CPUs.) (default: 0) + Meant to be used with integrations/scripts that use the folder (package) structure. Will + lookup up what docker image to use and will setup the dev dependencies and file in the target + folder. +Options: +* **-h, --help** + Show this message and exit. +* **-i, --input PATH** + Specify directory of integration/script +* **-g, --git** + Will run only on changed packages +* **-a, --all-packs** + Run lint on all directories in content repo +* **-v, --verbose** + Verbosity level -v / -vv / -vvv [default: vv] +* **-q, --quiet** + Quiet output, only output results in the end +* **-p, --parallel INTEGER RANGE** + Run tests in parallel [default: 1] +* **--no-flake8** + Do NOT run flake8 linter +* **--no-bandit** + Do NOT run bandit linter +* **--no-mypy** + Do NOT run mypy static type checking +* **--no-vulture** + Do NOT run vulture linter +* **--no-pylint** + Do NOT run pylint linter +* **--no-test** + Do NOT test (skip pytest) +* **--no-pwsh-analyze** + Do NOT run powershell analyze +* **--no-pwsh-test** + Do NOT run powershell test +* **-kc, --keep-container** + Keep the test container +* **--test-xml PATH** + Path to store pytest xml results +* **--json-report PATH** + Path to store json results +* **-lp, --log-path PATH** + Path to store all levels of logs -**Examples**: -`demisto-sdk lint -d Integrations/PaloAltoNetworks_XDR,Scripts/HellowWorldScript --no-mypy -p -m 2` -This will parallel run the linters, excluding mypy, on the python files inside the "Integrations/PaloAltoNetworks_XDR" and "Scripts/HelloWorldScript" directories, using 2 workers (threads). -

- -`demisto-sdk lint -a -g` -This will run on all content repo's packaged and packed integrations and scripts and will activate the linting and tests only on the directories which had their files changed in comparison with content origin/master branch. -

- -`demisto-sdk lint -d Interagtions/HelloWorld -v --no-bandit --no-flake8 --cpu-num auto` -This will run the linters, excluding bandit and flake8, on "Integrations/HelloWorld" and give additional details on the run itself as well as any failures detected. -Also this will check the amount of CPU's available to run pytest on and use them. -

-`demisto-sdk lint -d Scripts/HelloWorldScript --no-pytest --no-pylint` -This will run only the linters (flake8, mypy, bandit) on "Scripts/HelloWorldScript". -

- -`demisto-sdk lint -d Integrations/HelloWorld --no-mypy --no-flake8 --no-pytest -k -r` -This will run only pylint and pytest on "Integrations/HelloWorld" using the root user for the pytest and will also keep the test container with the docker image after the operation is over. - -`demisto-sdk lint -g --outfile ~/failures.txt` -This indicates lint runs only on changed packages from content repo's 'origin/master' branch and saves the failed packages to failures.txt file. +**Examples**: +--- +`demisto-sdk lint -i Integrations/PaloAltoNetworks_XDR,Scripts/HellowWorldScript --no-mypy ` +Details: +1. lint and test check will execute on Packages `Integrations/PaloAltoNetworks_XDR,Scripts/HellowWorldScript` +2. Mypy check will not be execute. +3. +--- +`demisto-sdk lint -g -p 2` +1. lint and test check will execute on all Packages which are changed from `origin/master` and from in staging. +2. 2 Threads will be used inorder to preform the lint. +--- diff --git a/demisto_sdk/commands/lint/commands_builder.py b/demisto_sdk/commands/lint/commands_builder.py new file mode 100644 index 0000000000..7d0cfa5cdc --- /dev/null +++ b/demisto_sdk/commands/lint/commands_builder.py @@ -0,0 +1,213 @@ +# STD python packages +import os +from pathlib import Path +from typing import List + +# Third party packages +# Local imports + +excluded_files = ["CommonServerPython.py", "demistomock.py", "CommonServerUserPython.py", "conftest.py", "venv"] + + +def get_python_exec(py_num: float) -> str: + """ Get python executable + + Args: + py_num(float): Python version X.Y + + Returns: + str: python executable + """ + if py_num < 3: + py_num = "" + else: + py_num = 3 + + return f"python{py_num}" + + +def build_flake8_command(files: List[Path], py_num: float) -> str: + """ Build command for executing flake8 lint check + https://flake8.pycqa.org/en/latest/user/invocation.html + Args: + files(List[Path]): files to execute lint + py_num(float): The python version in use + + Returns: + str: flake8 command + """ + + command = f"{get_python_exec(py_num)} -m flake8" + # Generating file pattrens - path1,path2,path3,.. + files = [str(file) for file in files] + command += ' ' + ' '.join(files) + + return command + + +def build_bandit_command(files: List[Path]) -> str: + """ Build command for executing bandit lint check + https://github.com/PyCQA/bandit + Args: + files(List(Path)): files to execute lint + + Returns: + str: bandit command + """ + command = "python3 -m bandit" + # Only reporting on the high-severity issues + command += " -lll" + # report only issues of a given confidence level HIGH + command += " -iii" + # Aggregate output by filename + command += " -a file" + # File to be excluded when performing lints check + command += f" --exclude={','.join(excluded_files)}" + # only show output in the case of an error + command += f" -q" + # Generating path pattrens - path1,path2,path3,.. + files = [str(item) for item in files] + command += f" -r {','.join(files)}" + + return command + + +def build_mypy_command(files: List[Path], version: float) -> str: + """ Build command to execute with mypy module + https://mypy.readthedocs.io/en/stable/command_line.html + Args: + files(List[Path]): files to execute lint + version(float): python varsion X.Y (3.7, 2.7 ..) + + Returns: + str: mypy command + """ + command = "python3 -m mypy" + # Define python versions + command += f" --python-version {version}" + # This flag enable type checks the body of every function, regardless of whether it has type annotations. + command += " --check-untyped-defs" + # This flag makes mypy ignore all missing imports. + command += " --ignore-missing-imports" + # This flag adjusts how mypy follows imported modules that were not explicitly passed in via the command line + command += " --follow-imports=silent" + # This flag will add column offsets to error messages. + command += " --show-column-numbers" + # This flag will precede all errors with “note” messages explaining the context of the error. + command += " --show-error-codes" + # Use visually nicer output in error messages + command += " --pretty" + # This flag enables redefinion of a variable with an arbitrary type in some contexts. + command += " --allow-redefinition" + # Disable cache creation + command += " --cache-dir=/dev/null" + # Generating path pattrens - file1 file2 file3,.. + files = [str(item) for item in files] + command += " " + " ".join(files) + + return command + + +def build_vulture_command(files: List[Path], pack_path: Path, py_num: float) -> str: + """ Build command to execute with pylint module + https://github.com/jendrikseipp/vulture + Args: + py_num(float): The python version in use + files(List[Path]): files to execute lint + pack_path(Path): Package path + + Returns: + str: vulture command + """ + command = f"{get_python_exec(py_num)} -m vulture" + # Excluded files + command += f" --min-confidence {os.environ.get('VULTURE_MIN_CONFIDENCE_LEVEL', '100')}" + # File to be excluded when performing lints check + command += f" --exclude={','.join(excluded_files)}" + # Whitelist vulture + whitelist = Path(pack_path) / '.vulture_whitelist.py' + if whitelist.exists(): + command += f" {whitelist}" + files = [str(item) for item in files] + command += " " + " ".join(files) + + return command + + +def build_pylint_command(files: List[Path]) -> str: + """ Build command to execute with pylint module + https://docs.pylint.org/en/1.6.0/run.html#invoking-pylint + Args: + files(List[Path]): files to execute lint + + Returns: + str: pylint command + """ + command = "python -m pylint" + # Excluded files + command += f" --ignore={','.join(excluded_files)}" + # Prints only errors + command += " -E" + # Disable specific errors + command += " -d duplicate-string-formatting-argument" + # List of members which are set dynamically and missed by pylint inference system, and so shouldn't trigger + # E1101 when accessed. + command += " --generated-members=requests.packages.urllib3,requests.codes.ok" + # Generating path pattrens - file1 file2 file3,.. + files = [file.name for file in files] + command += " " + " ".join(files) + + return command + + +def build_pytest_command(test_xml: str = "", json: bool = False) -> str: + """ Build command to execute with pytest module + https://docs.pytest.org/en/latest/usage.html + Args: + test_xml(str): path indicate if required or not + json(bool): Define json creation after test + + Returns: + str: pytest command + """ + command = "python -m pytest" + # Generating junit-xml report - used in circle ci + if test_xml: + command += f" --junitxml=/devwork/report_pytest.xml" + # Generating json report + if json: + command += f" --json=/devwork/report_pytest.json" + + return command + + +def build_pwsh_analyze_command(file: Path) -> str: + """ Build command for powershell analyze + + Args: + file(Path): files to execute lint + + Returns: + str: powershell analyze command + """ + # Invoke script analyzer + command = "Invoke-ScriptAnalyzer" + # Return exit code when finished + command += " -EnableExit" + # Lint Files paths + command += f" -Path {file.name}" + + return f"pwsh -Command {command}" + + +def build_pwsh_test_command() -> str: + """ Build command for powershell test + + Returns: + str: powershell test command + """ + command = "Invoke-Pester" + # Return exit code when finished + command += " -EnableExit" + + return f"pwsh -Command {command}" diff --git a/demisto_sdk/commands/lint/dev_envs/__init__.py b/demisto_sdk/commands/lint/dev_envs/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/demisto_sdk/commands/lint/dev_envs/pytest/__init__.py b/demisto_sdk/commands/lint/dev_envs/pytest/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/demisto_sdk/commands/lint/dev_envs/pytest/conftest.py b/demisto_sdk/commands/lint/dev_envs/pytest/conftest.py deleted file mode 100644 index 0be11f4bca..0000000000 --- a/demisto_sdk/commands/lint/dev_envs/pytest/conftest.py +++ /dev/null @@ -1,53 +0,0 @@ -import pytest -import logging - - -# File is coppied to each package dir when running tests. -# More info about conftest.py at: -# https://docs.pytest.org/en/latest/writing_plugins.html#conftest-py-plugins # disable-secrets-detection - - -@pytest.fixture(autouse=True) -def check_logging(caplog): - """ - Fixture validates that the python logger doesn't contain any warnings (or up) messages - - If your test fails and it is ok to have such messages then you can clear the log at the end of your test - By callign: caplog.clear() - - For example: - - def test_foo(caplog): - logging.getLogger().warning('this is ok') - caplog.clear() - """ - yield - messages = [ - "{}: {}".format(x.levelname, x.message) for x in caplog.get_records('call') if x.levelno >= logging.WARNING - ] - if messages: - pytest.fail( - "warning messages encountered during testing: {}".format(messages) - ) - - -@pytest.fixture(autouse=True) -def check_std_out_err(capfd): - """ - Fixture validates that there is no ouput to stdout or stderr. - - If your test fails and it is ok to have output in stdout/stderr, you can disable the capture use - "with capfd.disabled()" - - For example: - - def test_boo(capfd): - with capfd.disabled(): - print("this is ok") - """ - yield - (out, err) = capfd.readouterr() - if out: - pytest.fail("Found output in stdout: [{}]".format(out.strip())) - if err: - pytest.fail("Found output in stderr: [{}]".format(err.strip())) diff --git a/demisto_sdk/commands/lint/helpers.py b/demisto_sdk/commands/lint/helpers.py new file mode 100644 index 0000000000..148515699c --- /dev/null +++ b/demisto_sdk/commands/lint/helpers.py @@ -0,0 +1,357 @@ +# STD python packages +import io +import logging +import os +import re +import shlex +import shutil +import tarfile +import textwrap +from contextlib import contextmanager +from functools import lru_cache +from pathlib import Path +from typing import Dict, Generator, List, Optional + +import docker +import docker.errors +# Third party packages +import git +import requests +# Local packages +from demisto_sdk.commands.common.constants import TYPE_PWSH, TYPE_PYTHON +from demisto_sdk.commands.common.tools import print_warning, run_command_os +from docker.models.containers import Container + +# Python2 requirements +PYTHON2_REQ = ["flake8", "vulture"] + +# Define check exit code if failed +EXIT_CODES = { + "flake8": 0b1, + "bandit": 0b10, + "mypy": 0b100, + "vulture": 0b1000, + "pytest": 0b10000, + "pylint": 0b100000, + "pwsh_analyze": 0b1000000, + "pwsh_test": 0b10000000, + "image": 0b100000000, +} + +# Execution exit codes +SUCCESS = 0b0 +FAIL = 0b1 +RERUN = 0b10 + +# Power shell checks +PWSH_CHECKS = ["pwsh_analyze", "pwsh_test"] +PY_CHCEKS = ["flake8", "bandit", "mypy", "vulture", "pytest", "pylint"] + +# Line break +RL = '\n' + +logger = logging.getLogger('demisto-sdk') + + +def validate_env() -> None: + """Packs which use python2 will need to be run inside virtual enviorment including python2 as main and the specified req""" + command = "python -c \"import sys; print('{}.{}'.format(sys.version_info[0], sys.version_info[1]))\"" + stdout, stderr, exit_code = run_command_os(command, cwd=Path().cwd()) + if "2" not in stdout: + print_warning('Demsito-sdk lint not in virtual enviorment, Python2 lints will failed, use "source ' + '.hooks/bootstrap.sh" to create the virtual enviorment') + else: + stdout, stderr, exit_code = run_command_os("pip3 freeze", cwd=Path().cwd()) + for req in PYTHON2_REQ: + if req not in stdout: + print_warning('Demsito-sdk lint not in virtual enviorment, Python2 lints will failed, use "source ' + '.hooks/bootstrap.sh" to create the virtual enviorment') + + +def build_skipped_exit_code(no_flake8: bool, no_bandit: bool, no_mypy: bool, no_pylint: bool, no_vulture: bool, + no_test: bool, no_pwsh_analyze: bool, no_pwsh_test: bool, docker_engine: bool) -> bool: + """ + no_flake8(bool): Whether to skip flake8. + no_bandit(bool): Whether to skip bandit. + no_mypy(bool): Whether to skip mypy. + no_vulture(bool): Whether to skip vulture + no_pylint(bool): Whether to skip pylint. + no_test(bool): Whether to skip pytest. + docker_engine(bool): docker engine exists. + """ + skipped_code = 0b0 + if no_flake8: + skipped_code |= EXIT_CODES["flake8"] + if no_bandit: + skipped_code |= EXIT_CODES["bandit"] + if no_mypy or not docker_engine: + skipped_code |= EXIT_CODES["mypy"] + if no_vulture or not docker_engine: + skipped_code |= EXIT_CODES["vulture"] + if no_pylint or not docker_engine: + skipped_code |= EXIT_CODES["pylint"] + if no_test or not docker_engine: + skipped_code |= EXIT_CODES["pytest"] + if no_pwsh_analyze or not docker_engine: + skipped_code |= EXIT_CODES["pwsh_analyze"] + if no_pwsh_test or not docker_engine: + skipped_code |= EXIT_CODES["pwsh_test"] + + return skipped_code + + +def get_test_modules(content_repo: Optional[git.Repo]) -> Dict[Path, bytes]: + """ Get required test modules from content repository - {remote}/master + 1. Tests/demistomock/demistomock.py + 2. Tests/scripts/dev_envs/pytest/conftest.py + 3. Scripts/CommonServerPython/CommonServerPython.py + 4. CommonServerUserPython.py + + Returns: + dict: path and file content - see below modules dict + """ + modules = [Path("Tests/demistomock/demistomock.py"), + Path("Tests/scripts/dev_envs/pytest/conftest.py"), + Path("Packs/Base/Scripts/CommonServerPython/CommonServerPython.py"), + Path("Tests/demistomock/demistomock.ps1"), + Path("Packs/Base/Scripts/CommonServerPowerShell/CommonServerPowerShell.ps1")] + modules_content = {} + if content_repo: + # Trying to get file from local repo before downloading from GitHub repo (Get it from disk), Last fetch + for module in modules: + modules_content[module] = (content_repo.working_dir / module).read_bytes() + else: + # If not succeed to get from local repo copy, Download the required modules from GitHub + for module in modules: + url = f'https://raw.githubusercontent.com/demisto/content/master/{module}' + for trial in range(2): + res = requests.get(url=url, + verify=False) + if res.ok: + # ok - not 4XX or 5XX + modules_content[module] = res.content + break + elif trial == 2: + raise requests.exceptions.ConnectionError + + modules_content[Path("CommonServerUserPython.py")] = b'' + + return modules_content + + +@contextmanager +def add_typing_module(lint_files: List[Path], python_version: float): + """ Check for typing import for python2 packages + 1. Entrance - Add import typing in the begining of the file. + 2. Closing - change back to original. + + Args: + lint_files(list): File to execute lint - for adding typing in python 2.7 + python_version(float): The package python version. + + Raises: + IOError: if can't write to files due permissions or other reasons + """ + added_modules: List[Path] = [] + back_lint_files: List[Path] = [] + try: + # Add typing import if needed to python version 2 packages + if python_version < 3: + for lint_file in lint_files: + data = lint_file.read_text(encoding="utf-8") + typing_regex = "(from typing import|import typing)" + module_match = re.search(typing_regex, data) + if not module_match: + original_file = lint_file + back_file = lint_file.with_suffix('.bak') + original_file.rename(back_file) + data = back_file.read_text() + original_file.write_text("from typing import * # noqa: F401" + '\n' + data) + back_lint_files.append(back_file) + added_modules.append(original_file) + yield + except Exception: + pass + finally: + for added_module in added_modules: + if added_module.exists(): + added_module.unlink() + for back_file in back_lint_files: + if back_file.exists(): + original_name = back_file.with_suffix('.py') + back_file.rename(original_name) + + +@contextmanager +def add_tmp_lint_files(content_repo: git.Repo, pack_path: Path, lint_files: List[Path], modules: Dict[Path, bytes], + pack_type: str): + """ LintFiles is context manager to mandatory files for lint and test + 1. Entrance - download missing files to pack. + 2. Closing - Remove downloaded files from pack. + + Args: + pack_path(Path): Absolute path of pack + lint_files(list): File to execute lint - for adding typing in python 2.7 + modules(dict): modules content to locate in pack path + content_repo(Path): Repository object + pack_type(st): Pack type. + + Raises: + IOError: if can't write to files due permissions or other reasons + """ + added_modules: List[Path] = [] + try: + # Add mandatory test,lint modules + for module, content in modules.items(): + pwsh_module = TYPE_PWSH == pack_type and module.suffix == '.ps1' + python_module = TYPE_PYTHON == pack_type and module.suffix == '.py' + if pwsh_module or python_module: + cur_path = pack_path / module.name + if not cur_path.exists(): + cur_path.write_bytes(content) + added_modules.append(cur_path) + if pack_type == TYPE_PYTHON: + # Append empty so it will exists + cur_path = pack_path / "CommonServerUserPython.py" + if not cur_path.exists(): + cur_path.touch() + added_modules.append(cur_path) + + # Add API modules to directory if needed + module_regex = r'from ([\w\d]+ApiModule) import \*(?: # noqa: E402)?' + for lint_file in lint_files: + module_name = "" + data = lint_file.read_text() + module_match = re.search(module_regex, data) + if module_match: + module_name = module_match.group(1) + rel_api_path = Path('Packs/ApiModules/Scripts') / module_name / f'{module_name}.py' + cur_path = pack_path / f'{module_name}.py' + if content_repo: + module_path = content_repo / rel_api_path + shutil.copy(src=module_path, + dst=cur_path) + else: + url = f'https://raw.githubusercontent.com/demisto/content/master/{rel_api_path}' + api_content = requests.get(url=url, + verify=False).content + cur_path.write_bytes(api_content) + + added_modules.append(cur_path) + yield + except Exception: + pass + finally: + for added_module in added_modules: + if added_module.exists(): + added_module.unlink() + + +@lru_cache(maxsize=100) +def get_python_version_from_image(image: str) -> float: + """ Get python version from docker image + + Args: + image(str): Docker image id or name + + Returns: + float: Python version X.Y (3.7, 3.6, ..) + """ + docker_client = docker.from_env() + container_obj: Container = None + py_num = "" + for trial1 in range(2): + try: + command = "python -c \"import sys; print('{}.{}'.format(sys.version_info[0], sys.version_info[1]))\"" + + container_obj: Container = docker_client.containers.run(image=image, + command=shlex.split(command), + detach=True) + # Wait for container to finish + container_obj.wait(condition="exited") + # Get python version + py_num = container_obj.logs() + if isinstance(py_num, bytes): + py_num = float(py_num) + break + else: + raise docker.errors.ContainerError + except (docker.errors.APIError, docker.errors.ContainerError): + continue + + if container_obj: + for trial2 in range(2): + try: + container_obj.remove(force=True) + break + except docker.errors.APIError: + continue + + return py_num + + +def get_file_from_container(container_obj: Container, container_path: str, encoding: str = "") -> str: + """ Copy file from container. + + Args: + container_obj(Container): Container ID to copy file from + container_path(Path): Path in container image (file) + encoding(str): valide encoding e.g. utf-8 + + Returns: + str: file as string decode as utf-8 + + Raises: + IOError: Rase IO error if unable to create temp file + """ + archive, stat = container_obj.get_archive(container_path) + filelike = io.BytesIO(b"".join(b for b in archive)) + tar = tarfile.open(fileobj=filelike) + data = tar.extractfile(stat['name']).read() + if encoding: + data = data.decode(encoding) + + return data + + +def copy_dir_to_container(container_obj: Container, host_path: Path, container_path: Path): + """ Copy all content directory from container. + + Args: + container_obj(Container): Container ID to copy file from + host_path(Path): Path in host (directory) + container_path(Path): Path in container (directory) + + Returns: + str: file as string decode as utf-8 + + Raises: + IOError: Rase IO error if unable to create temp file + """ + excluded_regex = "(__init__.py|.*.back)" + file_like_object = io.BytesIO() + old_cwd = os.getcwd() + with tarfile.open(fileobj=file_like_object, mode='w:gz') as archive: + os.chdir(host_path) + archive.add('.', recursive=True, filter=lambda tarinfo: ( + tarinfo if not re.search(excluded_regex, Path(tarinfo.name).name) else None)) + os.chdir(old_cwd) + + container_obj.put_archive(path=container_path, + data=file_like_object.getvalue()) + + +def stream_docker_container_output(streamer: Generator) -> None: + """ Stream container logs + + Args: + streamer(Generator): Generator created by docker-sdk + """ + try: + wrapper = textwrap.TextWrapper(initial_indent='\t', + subsequent_indent='\t', + width=150) + for chunk in streamer: + logger.info(wrapper.fill(str(chunk.decode('utf-8')))) + except Exception: + pass diff --git a/demisto_sdk/commands/lint/lint_manager.py b/demisto_sdk/commands/lint/lint_manager.py index 4781ead938..e0c7f29e7f 100644 --- a/demisto_sdk/commands/lint/lint_manager.py +++ b/demisto_sdk/commands/lint/lint_manager.py @@ -1,301 +1,548 @@ -import os -import threading +# STD packages import concurrent.futures -from typing import Tuple, List - +import json +import logging +import os +import re +import sys +import textwrap +from typing import Any, Dict, List + +# Third party packages +import docker +import docker.errors +import git +import requests.exceptions +import urllib3.exceptions +from demisto_sdk.commands.common.constants import TYPE_PWSH, TYPE_PYTHON +# Local packages +from demisto_sdk.commands.common.logger import Colors, logging_setup +from demisto_sdk.commands.common.tools import (print_error, print_v, + print_warning) +from demisto_sdk.commands.lint.helpers import (EXIT_CODES, PWSH_CHECKS, + PY_CHCEKS, + build_skipped_exit_code, + get_test_modules, validate_env) from demisto_sdk.commands.lint.linter import Linter -from demisto_sdk.commands.common.configuration import Configuration -from demisto_sdk.commands.common.constants import PACKS_DIR, INTEGRATIONS_DIR, SCRIPTS_DIR, BETA_INTEGRATIONS_DIR -from demisto_sdk.commands.common.tools import get_dev_requirements, print_color, LOG_COLORS, run_command, \ - set_log_verbose, print_error, \ - get_common_server_dir, get_common_server_dir_pwsh - +from wcmatch.pathlib import Path -LOCK = threading.Lock() +logger: logging.Logger class LintManager: - """LintManager used to activate lint command using Linters in a single or multi thread. + """ LintManager used to activate lint command using Linters in a single or multi thread. Attributes: - project_dir_list (str): A CSV of directories to run lint on. - no_test (bool): Whether to skip pytest. - no_pylint (bool): Whether to skip pylint. - no_flake8 (bool): Whether to skip flake8. - no_mypy (bool): Whether to skip mypy. - no_bandit (bool): Whether to skip bandit. - no_pslint (bool): Whether to skip powershell lint. - no_vulture (bool): Whether to skip vulture. - verbose (bool): Whether to output a detailed response. - root (bool): Whether to run pytest container with root user. - keep_container (bool): Whether to keep the test container. - cpu_num (int): Number of CPUs to run pytest on. - parallel (bool): Whether to run command on multiple threads. - max_workers (int): How many workers to run for multi-thread run. - run_all_tests (bool): Whether to run all tests. - outfile (str): file path to save failed package list. - configuration (Configuration): The system configuration. + input(str): Directories to run lint on. + git(bool): Perform lint and test only on chaged packs. + all_packs(bool): Whether to run on all packages. + verbose(int): Whether to output a detailed response. + quiet(bool): Whether to output a quiet response. + log_path(str): Path to all levels of logs. """ - def __init__(self, project_dir_list: str, no_test: bool = False, no_pylint: bool = False, no_flake8: bool = False, - no_mypy: bool = False, verbose: bool = False, root: bool = False, keep_container: bool = False, - cpu_num: int = 0, parallel: bool = False, max_workers: int = 10, no_bandit: bool = False, - no_pslint: bool = False, - no_vulture: bool = False, git: bool = False, run_all_tests: bool = False, outfile: str = '', - configuration: Configuration = Configuration()): - - if no_test and no_pylint and no_flake8 and no_mypy and no_bandit: - raise ValueError("Nothing to run as all --no-* options specified.") - - self.parallel = parallel - set_log_verbose(verbose) - self.root = root - self.max_workers = 10 if max_workers is None else int(max_workers) - self.keep_container = keep_container - self.cpu_num = cpu_num - self.common_server_created = False - self.run_args = { - 'pylint': not no_pylint, - 'flake8': not no_flake8, - 'mypy': not no_mypy, - 'tests': not no_test, - 'bandit': not no_bandit, - 'pslint': not no_pslint, - 'vulture': not no_vulture, - } - - if run_all_tests or (not project_dir_list and git): - self.pkgs = self.get_all_directories() - - else: - self.pkgs = project_dir_list.split(',') - - if git: - self.pkgs = self._get_packages_to_run() - - self.configuration = configuration - self.requirements_for_python3 = get_dev_requirements(3.7, self.configuration.envs_dirs_base) - self.requirements_for_python2 = get_dev_requirements(2.7, self.configuration.envs_dirs_base) - self.outfile = outfile + def __init__(self, input: str, git: bool, all_packs: bool, quiet: bool, verbose: bool, log_path: str): + # Set logging level and file handler if required + global logger + logger = logging_setup(verbose=verbose, + quiet=quiet, + log_path=log_path) + # Verbosity level + self._verbose = not quiet if quiet else verbose + # Gather facts for manager + self._facts: dict = self._gather_facts() + # Filter packages to lint and test check + self._pkgs: List[Path] = self._get_packages(content_repo=self._facts["content_repo"], + input=input, + git=git, + all_packs=all_packs) @staticmethod - def get_all_directories() -> List[str]: - """Gets all integration, script and beta_integrations in packages and packs in content repo. + def _gather_facts() -> Dict[str, Any]: + """ Gather shared required facts for lint command execution - Also perform mandatory resource checkup. + 1. Content repo object. + 2. Requirements file for docker images. + 3. Mandatory test modules - demisto-mock.py etc + 3. Docker daemon check. Returns: - List. A list of integration, script and beta_integration names. + dict: facts """ - print("Getting all directory names") - all_directories = [] - # get all integrations, scripts and beta_integrations from packs - for root, _, _ in os.walk(PACKS_DIR): - if 'Packs/' in root: - if ('Integrations/' in root or 'Scripts/' in root or 'Beta_Integrations/' in root) \ - and len(root.split('/')) == 4: - all_directories.append(root) - - for root, _, _ in os.walk(INTEGRATIONS_DIR): - if 'Integrations/' in root and len(root.split('/')) == 2: - all_directories.append(root) - - for root, _, _ in os.walk(SCRIPTS_DIR): - if 'Scripts/' in root and len(root.split('/')) == 2: - all_directories.append(root) + facts = { + "content_repo": None, + "requirements_3": None, + "requirements_2": None, + "test_modules": None, + "docker_engine": True + } + # Check env requirements satisfied - bootstrap in use + validate_env() + # Get content repo object + try: + git_repo = git.Repo(os.getcwd(), + search_parent_directories=True) + if 'content' not in git_repo.remote().urls.__next__(): + raise git.InvalidGitRepositoryError + facts["content_repo"] = git_repo + logger.debug(f"Content path {git_repo.working_dir}") + except (git.InvalidGitRepositoryError, git.NoSuchPathError) as e: + print_warning("You are running demisto-sdk lint not in content repositorty!") + logger.warning(f"can't locate content repo {e}") + # Get global requirements file + pipfile_dir = Path(__file__).parent / 'resources' + try: + for py_num in ['2', '3']: + pipfile_lock_path = pipfile_dir / f'pipfile_python{py_num}/Pipfile.lock' + with open(file=pipfile_lock_path) as f: + lock_file: dict = json.load(fp=f)["develop"] + facts[f"requirements_{py_num}"] = [key + value["version"] for key, value in lock_file.items()] + logger.debug(f"Test requirements successfully collected for python {py_num}:\n" + f" {facts[f'requirements_{py_num}']}") + except (json.JSONDecodeError, IOError, FileNotFoundError, KeyError) as e: + print_error("Can't parse pipfile.lock - Aborting!") + logger.critical(f"demisto-sdk-can't parse pipfile.lock {e}") + sys.exit(1) + # ￿Get mandatory modulestest modules and Internet connection for docker usage + try: + facts["test_modules"] = get_test_modules(content_repo=facts["content_repo"]) + logger.debug(f"Test mandatory modules successfully collected") + except git.GitCommandError as e: + print_error("Unable to get test-modules demisto-mock.py etc - Aborting! corrupt repository of pull from master") + logger.error(f"demisto-sdk-unable to get mandatory test-modules demisto-mock.py etc {e}") + sys.exit(1) + except (requests.exceptions.ConnectionError, urllib3.exceptions.NewConnectionError) as e: + print_error("Unable to get mandatory test-modules demisto-mock.py etc - Aborting! (Check your internet " + "connection)") + logger.error(f"demisto-sdk-unable to get mandatory test-modules demisto-mock.py etc {e}") + sys.exit(1) + # Validating docker engine connection + docker_client: docker.DockerClient = docker.from_env() + try: + docker_client.ping() + except (requests.exceptions.ConnectionError, urllib3.exceptions.ProtocolError, docker.errors.APIError): + facts["docker_engine"] = False + print_warning("Can't communicate with Docker daemon - check your docker Engine is ON - Skiping lint, " + "test which require docker!") + logger.info(f"demisto-sdk-Can't communicate with Docker daemon") + logger.debug(f"Docker daemon test passed") - for root, _, _ in os.walk(BETA_INTEGRATIONS_DIR): - if 'Beta_Integrations/' in root and len(root.split('/')) == 2: - all_directories.append(root) + return facts - return all_directories + def _get_packages(self, content_repo: git.Repo, input: str, git: bool, all_packs: bool) -> List[Path]: + """ Get packages paths to run lint command. - def run_dev_packages(self) -> int: - """Runs the Lint command on all given packages. + Args: + content_repo(git.Repo): Content repository object. + input(str): dir pack specified as argument. + git(bool): Perform lint and test only on chaged packs. + all_packs(bool): Whether to run on all packages. Returns: - int. 0 on success and 1 if any package failed + List[Path]: Pkgs to run lint """ - good_pkgs = [] - fail_pkgs = [] - if not self.parallel: - for project_dir in self.pkgs: - - linter = Linter(project_dir, no_test=not self.run_args['tests'], - no_pylint=not self.run_args['pylint'], no_flake8=not self.run_args['flake8'], - no_mypy=not self.run_args['mypy'], root=self.root, - keep_container=self.keep_container, cpu_num=self.cpu_num, - configuration=self.configuration, no_bandit=not self.run_args['bandit'], - no_pslint=not self.run_args['pslint'], - no_vulture=not self.run_args['vulture'], - requirements_3=self.requirements_for_python3, - requirements_2=self.requirements_for_python2) - run_status_code = linter.run_dev_packages() - if run_status_code > 0: - fail_pkgs.append(project_dir) - else: - good_pkgs.append(project_dir) - - self._print_final_results(good_pkgs=good_pkgs, fail_pkgs=fail_pkgs) - - return 1 if fail_pkgs else 0 - - else: # we run parallel processes - return self.run_parallel_packages(self.pkgs) + pkgs: list + if all_packs or git: + pkgs = LintManager._get_all_packages(content_dir=content_repo.working_dir) + elif not all_packs and not git and not input: + pkgs = [Path().cwd()] + else: + pkgs = [Path(item) for item in input.split(',')] + total_found = len(pkgs) + if git: + pkgs = LintManager._filter_changed_packages(content_repo=content_repo, + pkgs=pkgs) + for pkg in pkgs: + print_v(f"Found changed package {Colors.Fg.cyan}{pkg}{Colors.reset}", + log_verbose=self._verbose) + print(f"Execute lint and test on {Colors.Fg.cyan}{len(pkgs)}/{total_found}{Colors.reset} packages") - def run_parallel_packages(self, pkgs_to_run: List[str]) -> int: - """Runs the Lint command in parallel on several threads. + return pkgs - Args: - pkgs_to_run: The packages to run in parallel + @staticmethod + def _get_all_packages(content_dir: str) -> List[str]: + """Gets all integration, script and beta_integrations in packages and packs in content repo. Returns: - int. 0 on success and 1 if any package failed + list: A list of integration, script and beta_integration names. """ - print("Starting parallel run for [{}] packages with [{}] max workers.\n".format(len(pkgs_to_run), - self.max_workers)) - fail_pkgs = [] - good_pkgs = [] - - # run CommonServer non parallel to avoid conflicts - # when we modify the file for mypy includes - single_thread_script = [ - get_common_server_dir(''), - get_common_server_dir_pwsh('') - ] - for script_dir in single_thread_script: - if script_dir in pkgs_to_run: - pkgs_to_run.remove(script_dir) - print(f'Running single threaded dir: {script_dir}') - res, _ = self._run_single_package_thread(package_dir=script_dir) - if res == 0: - good_pkgs.append(script_dir) - - else: - fail_pkgs.append(script_dir) - - with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor: - futures_submit = [executor.submit(self._run_single_package_thread, directory) for directory in pkgs_to_run] - for future in list(concurrent.futures.as_completed(futures_submit)): - result = future.result() - status_code = result[0] - package_ran = result[1] - if status_code == 0: - good_pkgs.append(package_ran) + # ￿Get packages from main content path + content_main_pkgs: set = set(Path(content_dir).glob(['Integrations/*/', + 'Scripts/*/', + 'Beta_Integrations/*/'])) + # Get packages from packs path + packs_dir: Path = Path(content_dir) / 'Packs' + content_packs_pkgs: set = set(packs_dir.glob(['*/Integrations/*/', + '*/Scripts/*/', + '*/Beta_Integrations/*/'])) + all_pkgs = content_packs_pkgs.union(content_main_pkgs) + + return list(all_pkgs) - else: - fail_pkgs.append(package_ran) - - return self._print_final_results(good_pkgs=good_pkgs, fail_pkgs=fail_pkgs) + @staticmethod + def _filter_changed_packages(content_repo: git.Repo, pkgs: List[Path]) -> List[Path]: + """ Checks which packages had changes using git (working tree, index, diff between HEAD and master in them and should + run on Lint. - def _get_packages_to_run(self) -> List[str]: - """Checks which packages had changes in them and should run on Lint. + Args: + pkgs(List[Path]): pkgs to check Returns: - list[str]. A list of names of packages that should run. + List[Path]: A list of names of packages that should run. """ - print("Filtering out directories that did not change") - pkgs_to_run = [] - - current_branch = run_command(f"git rev-parse --abbrev-ref HEAD") - print(f'current_branch = {current_branch}') - - if os.environ.get('CIRCLE_COMPARE_URL'): - print(f"CIRCLE_COMPARE_URL = {os.environ['CIRCLE_COMPARE_URL']}") - - for directory in self.pkgs: - if self._check_should_run_pkg(pkg_dir=directory, current_branch=current_branch): - pkgs_to_run.append(directory) - - return pkgs_to_run - - def _check_should_run_pkg(self, pkg_dir: str, current_branch: str) -> bool: - """Checks if there is a difference in the package before this Lint run and after it. + print(f"Comparing to {Colors.Fg.cyan}{content_repo.remote()}/master{Colors.reset} using branch {Colors.Fg.cyan}" + f"{content_repo.active_branch}{Colors.reset}") + staged_files = {content_repo.working_dir / Path(item.b_path).parent for item in + content_repo.active_branch.commit.tree.diff(None, paths=pkgs)} + last_common_commit = content_repo.merge_base(content_repo.active_branch.commit, + content_repo.remote().refs.master) + changed_from_master = {content_repo.working_dir / Path(item.b_path).parent for item in + content_repo.active_branch.commit.tree.diff(last_common_commit, paths=pkgs)} + all_changed = staged_files.union(changed_from_master) + pkgs_to_check = all_changed.intersection(pkgs) + + return list(pkgs_to_check) + + def run_dev_packages(self, parallel: int, no_flake8: bool, no_bandit: bool, no_mypy: bool, no_pylint: bool, + no_vulture: bool, no_test: bool, no_pwsh_analyze: bool, no_pwsh_test: bool, keep_container: bool, + test_xml: str, json_report: str) -> int: + """ Runs the Lint command on all given packages. Args: - pkg_dir: The package directory to check. + parallel(int): Whether to run command on multiple threads + no_flake8(bool): Whether to skip flake8 + no_bandit(bool): Whether to skip bandit + no_mypy(bool): Whether to skip mypy + no_vulture(bool): Whether to skip vulture + no_pylint(bool): Whether to skip pylint + no_test(bool): Whether to skip pytest + no_pwsh_analyze(bool): Whether to skip powershell code analyzing + no_pwsh_test(bool): whether to skip powershell tests + keep_container(bool): Whether to keep the test container + test_xml(str): Path for saving pytest xml results + json_report(str): Path for store json report Returns: - bool. True if there is a difference and False otherwise. + int: exit code by falil exit codes by var EXIT_CODES """ + lint_status = { + "fail_packs_flake8": [], + "fail_packs_bandit": [], + "fail_packs_mypy": [], + "fail_packs_vulture": [], + "fail_packs_pylint": [], + "fail_packs_pytest": [], + "fail_packs_pwsh_analyze": [], + "fail_packs_pwsh_test": [], + "fail_packs_image": [], + } - # This will check if there are any changes between current master version and the last commit in master - if os.environ.get('CIRCLE_COMPARE_URL') and current_branch == "master": - changes_from_last_commit_vs_master = run_command("git diff --name-only HEAD..HEAD^") - else: - # This will return a list of all files that changed up until the last commit (not including any changes - # which were made but not yet committed). - changes_from_last_commit_vs_master = run_command(f"git diff origin/master...{current_branch} --name-only") - - # This will check if any changes were made to the files in the package (pkg_dir) but are yet to be committed. - changes_since_last_commit = run_command(f"git diff --name-only -- {pkg_dir}") - - # if the package is in the list of changed files or if any files within the package were changed - # but not yet committed, return True - if pkg_dir in changes_from_last_commit_vs_master or len(changes_since_last_commit) > 0: - return True + # Python or powershell or both + pkgs_type = [] + + # Detailed packages status + pkgs_status = {} + + # Skiped lint and test codes + skipped_code = build_skipped_exit_code(no_flake8=no_flake8, no_bandit=no_bandit, no_mypy=no_mypy, + no_vulture=no_vulture, + no_pylint=no_pylint, no_test=no_test, no_pwsh_analyze=no_pwsh_analyze, + no_pwsh_test=no_pwsh_test, docker_engine=self._facts["docker_engine"]) + + with concurrent.futures.ThreadPoolExecutor(max_workers=parallel) as executor: + return_exit_code: int = 0 + results = [] + # Executing lint checks in diffrent threads + for pack in self._pkgs: + linter: Linter = Linter(pack_dir=pack, + content_repo="" if not self._facts["content_repo"] else + Path(self._facts["content_repo"].working_dir), + req_2=self._facts["requirements_2"], + req_3=self._facts["requirements_3"], + docker_engine=self._facts["docker_engine"]) + results.append(executor.submit(fn=linter.run_dev_packages, + no_flake8=no_flake8, + no_bandit=no_bandit, + no_mypy=no_mypy, + no_vulture=no_vulture, + no_pylint=no_pylint, + no_test=no_test, + no_pwsh_analyze=no_pwsh_analyze, + no_pwsh_test=no_pwsh_test, + modules=self._facts["test_modules"], + keep_container=keep_container, + test_xml=test_xml)) + try: + for future in concurrent.futures.as_completed(results): + pkg_status = future.result() + pkgs_status[pkg_status["pkg"]] = pkg_status + if pkg_status["exit_code"]: + for check, code in EXIT_CODES.items(): + if pkg_status["exit_code"] & code: + lint_status[f"fail_packs_{check}"].append(pkg_status["pkg"]) + if not return_exit_code & pkg_status["exit_code"]: + return_exit_code += pkg_status["exit_code"] + if pkg_status["pack_type"] not in pkgs_type: + pkgs_type.append(pkg_status["pack_type"]) + except KeyboardInterrupt: + print_warning("Stop demisto-sdk lint - Due to 'Ctrl C' signal") + try: + executor.shutdown(wait=False) + except Exception: + pass + return 1 + except Exception as e: + print_warning(f"Stop demisto-sdk lint - Due to Exception {e}") + try: + executor.shutdown(wait=False) + except Exception: + pass + return 1 + + self._report_results(lint_status=lint_status, + pkgs_status=pkgs_status, + return_exit_code=return_exit_code, + skipped_code=skipped_code, + pkgs_type=pkgs_type) + self._create_report(pkgs_status=pkgs_status, + path=json_report) + + return return_exit_code + + def _report_results(self, lint_status: dict, pkgs_status: dict, return_exit_code: int, skipped_code: int, pkgs_type: list): + """ Log report to console - # if no changes were made to the package - return False. - return False + Args: + lint_status(dict): Overall lint status + pkgs_status(dict): All pkgs status dict + return_exit_code(int): exit code will indicate which lint or test failed + skipped_code(int): skipped test code + pkgs_type(list): list determine which pack type exits. + + """ + self.report_pass_lint_checks(return_exit_code=return_exit_code, + skipped_code=skipped_code, + pkgs_type=pkgs_type) + self.report_failed_lint_checks(return_exit_code=return_exit_code, + pkgs_status=pkgs_status, + lint_status=lint_status) + self.report_unit_tests(return_exit_code=return_exit_code, + pkgs_status=pkgs_status, + lint_status=lint_status) + self.report_failed_image_creation(return_exit_code=return_exit_code, + pkgs_status=pkgs_status, + lint_status=lint_status) + self.report_summary(lint_status=lint_status) - def _run_single_package_thread(self, package_dir: str) -> Tuple[int, str]: - """Run a thread of lint command. + @staticmethod + def report_pass_lint_checks(return_exit_code: int, skipped_code: int, pkgs_type: list): + """ Log PASS/FAIL on each lint/test Args: - package_dir (str): The package directory to run the command on. - - Returns: - Tuple[int, str]. The result code for the lint command and the package name. - """ - try: - linter = Linter(package_dir, no_test=not self.run_args['tests'], - no_pylint=not self.run_args['pylint'], no_flake8=not self.run_args['flake8'], - no_mypy=not self.run_args['mypy'], root=self.root, - keep_container=self.keep_container, cpu_num=self.cpu_num, configuration=self.configuration, - lock=LOCK, no_bandit=not self.run_args['bandit'], - no_vulture=not self.run_args['vulture'], - no_pslint=not self.run_args['pslint'], - requirements_3=self.requirements_for_python3, - requirements_2=self.requirements_for_python2) - return linter.run_dev_packages(), package_dir - except Exception as ex: - print_error(f'Failed running lint for: {package_dir}. Exception: {ex}') - return 1, package_dir + return_exit_code(int): exit code will indicate which lint or test failed + skipped_code(int): skipped test code. + pkgs_type(list): list determine which pack type exits. + """ + longest_check_key = len(max(EXIT_CODES.keys(), key=len)) + for check, code in EXIT_CODES.items(): + spacing = longest_check_key - len(check) + check_str = check.capitalize().replace('_', ' ') + if (check in PY_CHCEKS and TYPE_PYTHON in pkgs_type) or (check in PWSH_CHECKS and TYPE_PWSH in pkgs_type): + if code & skipped_code: + print(f"{check_str} {' ' * spacing}- {Colors.Fg.cyan}[SKIPPED]{Colors.reset}") + elif code & return_exit_code: + print(f"{check_str} {' ' * spacing}- {Colors.Fg.red}[FAIL]{Colors.reset}") + else: + print(f"{check_str} {' ' * spacing}- {Colors.Fg.green}[PASS]{Colors.reset}") + elif check != 'image': + print(f"{check_str} {' ' * spacing}- {Colors.Fg.cyan}[SKIPPED]{Colors.reset}") @staticmethod - def create_failed_unittests_file(failed_unittests, outfile): - """ - Creates a file with failed unittests. - The file will be read in slack_notifier script - which will send the failed unittests to the content-team - channel. - """ - with open(outfile, "w") as failed_unittests_file: - failed_unittests_file.write('\n'.join(failed_unittests)) - - def _print_final_results(self, good_pkgs: List[str], fail_pkgs: List[str]) -> int: - """Print the results of parallel lint command. + def report_failed_lint_checks(lint_status: dict, pkgs_status: dict, return_exit_code: int): + """ Log failed lint log if exsits Args: - good_pkgs (list): A list of packages that passed lint. - fail_pkgs (list): A list of packages that failed lint - - Returns: - int. 0 on success and 1 if any package failed + lint_status(dict): Overall lint status + pkgs_status(dict): All pkgs status dict + return_exit_code(int): exit code will indicate which lint or test failed """ - if self.outfile: - self.create_failed_unittests_file(fail_pkgs, self.outfile) + for check in ["flake8", "bandit", "mypy", "vulture"]: + if EXIT_CODES[check] & return_exit_code: + sentence = f" {check.capitalize()} errors " + print(f"\n{Colors.Fg.red}{'#' * len(sentence)}{Colors.reset}") + print(f"{Colors.Fg.red}{sentence}{Colors.reset}") + print(f"{Colors.Fg.red}{'#' * len(sentence)}{Colors.reset}\n") + for fail_pack in lint_status[f"fail_packs_{check}"]: + print(f"{Colors.Fg.red}{pkgs_status[fail_pack]['pkg']}{Colors.reset}") + print(pkgs_status[fail_pack][f"{check}_errors"]) + + for check in ["pylint", "pwsh_analyze", "pwsh_test"]: + check_str = check.capitalize().replace('_', ' ') + if EXIT_CODES[check] & return_exit_code: + sentence = f" {check_str} errors " + print(f"\n{Colors.Fg.red}{'#' * len(sentence)}{Colors.reset}") + print(f"{Colors.Fg.red}{sentence}{Colors.reset}") + print(f"{Colors.Fg.red}{'#' * len(sentence)}{Colors.reset}\n") + for fail_pack in lint_status[f"fail_packs_{check}"]: + print(f"{Colors.Fg.red}{fail_pack}{Colors.reset}") + print(pkgs_status[fail_pack]["images"][0][f"{check}_errors"]) + + def report_unit_tests(self, lint_status: dict, pkgs_status: dict, return_exit_code: int): + """ Log failed unit-tests , if verbosity specified will log also success unit-tests - if fail_pkgs: - print_color("\n******* FAIL PKGS: *******", LOG_COLORS.RED) - print_color("\n\t{}\n".format("\n\t".join(fail_pkgs)), LOG_COLORS.RED) + Args: + lint_status(dict): Overall lint status + pkgs_status(dict): All pkgs status dict + return_exit_code(int): exit code will indicate which lint or test failed + """ + # Indentation config + preferred_width = 100 + pack_indent = 2 + pack_prefix = " " * pack_indent + "- Package: " + wrapper_pack = textwrap.TextWrapper(initial_indent=pack_prefix, + width=preferred_width, + subsequent_indent=' ' * len(pack_prefix)) + docker_indent = 6 + docker_prefix = " " * docker_indent + "- Image: " + wrapper_docker_image = textwrap.TextWrapper(initial_indent=docker_prefix, + width=preferred_width, + subsequent_indent=' ' * len(docker_prefix)) + test_indent = 9 + test_prefix = " " * test_indent + "- " + wrapper_test = textwrap.TextWrapper(initial_indent=test_prefix, width=preferred_width, + subsequent_indent=' ' * len(test_prefix)) + error_indent = 9 + error_first_prefix = " " * error_indent + " Error: " + error_sec_prefix = " " * error_indent + " " + wrapper_first_error = textwrap.TextWrapper(initial_indent=error_first_prefix, width=preferred_width, + subsequent_indent=' ' * len(error_first_prefix)) + wrapper_sec_error = textwrap.TextWrapper(initial_indent=error_sec_prefix, width=preferred_width, + subsequent_indent=' ' * len(error_sec_prefix)) + + # Log passed unit-tests + headline_printed = False + passed_printed = False + for pkg, status in pkgs_status.items(): + if status.get("images"): + if status.get("images")[0].get("pytest_json", {}).get("report", {}).get("tests"): + if (not headline_printed and self._verbose) and (EXIT_CODES["pytest"] & return_exit_code): + # Log unit-tests + sentence = " Unit Tests " + print(f"\n{Colors.Fg.cyan}{'#' * len(sentence)}") + print(f"{sentence}") + print(f"{'#' * len(sentence)}{Colors.reset}") + headline_printed = True + if not passed_printed: + print_v(f"\n{Colors.Fg.green}Passed Unit-tests:{Colors.reset}", log_verbose=self._verbose) + passed_printed = True + print_v(wrapper_pack.fill(f"{Colors.Fg.green}{pkg}{Colors.reset}"), log_verbose=self._verbose) + for image in status["images"]: + if not image.get("image_errors"): + tests = image.get("pytest_json", {}).get("report", {}).get("tests") + if tests: + print_v(wrapper_docker_image.fill(image['image']), log_verbose=self._verbose) + for test_case in tests: + if test_case.get("call", {}).get("outcome") != "failed": + name = re.sub(pattern=r"\[.*\]", + repl="", + string=test_case.get("name")) + print_v(wrapper_test.fill(name), log_verbose=self._verbose) + + # Log failed unit-tests + if EXIT_CODES["pytest"] & return_exit_code: + if not headline_printed: + # Log unit-tests + sentence = " Unit Tests " + print(f"\n{Colors.Fg.cyan}{'#' * len(sentence)}") + print(f"{sentence}") + print(f"{'#' * len(sentence)}{Colors.reset}") + print(f"\n{Colors.Fg.red}Failed Unit-tests:{Colors.reset}") + for fail_pack in lint_status["fail_packs_pytest"]: + print(wrapper_pack.fill(f"{Colors.Fg.red}{fail_pack}{Colors.reset}")) + for image in pkgs_status[fail_pack]["images"]: + tests = image.get("pytest_json", {}).get("report", {}).get("tests") + for test_case in tests: + if test_case.get("call", {}).get("outcome") == "failed": + name = re.sub(pattern=r"\[.*\]", + repl="", + string=test_case.get("name")) + print(wrapper_test.fill(name)) + if test_case.get("call", {}).get("longrepr"): + for i in range(len(test_case.get("call", {}).get("longrepr"))): + if i == 0: + print(wrapper_first_error.fill( + test_case.get("call", {}).get("longrepr")[i])) + else: + print(wrapper_sec_error.fill(test_case.get("call", {}).get("longrepr")[i])) + print('\n') - if good_pkgs: - print_color("\n******* SUCCESS PKGS: *******", LOG_COLORS.GREEN) - print_color("\n\t{}\n".format("\n\t".join(good_pkgs)), LOG_COLORS.GREEN) + @staticmethod + def report_failed_image_creation(lint_status: dict, pkgs_status: dict, return_exit_code: int): + """ Log failed image creation if occured - if not good_pkgs and not fail_pkgs: - print_color("\n******* No changed packages found *******\n", LOG_COLORS.YELLOW) + Args: + lint_status(dict): Overall lint status + pkgs_status(dict): All pkgs status dict + return_exit_code(int): exit code will indicate which lint or test failed + """ + # Indentation config + preferred_width = 100 + indent = 2 + pack_prefix = " " * indent + "- Package: " + wrapper_pack = textwrap.TextWrapper(initial_indent=pack_prefix, + width=preferred_width, + subsequent_indent=' ' * len(pack_prefix)) + image_prefix = " " * indent + " Image: " + wrapper_image = textwrap.TextWrapper(initial_indent=image_prefix, width=preferred_width, + subsequent_indent=' ' * len(image_prefix)) + indent_error = 4 + error_prefix = " " * indent_error + " Error: " + wrapper_error = textwrap.TextWrapper(initial_indent=error_prefix, width=preferred_width, + subsequent_indent=' ' * len(error_prefix)) + # Log failed images creation + if EXIT_CODES["image"] & return_exit_code: + sentence = f" Image creation errors " + print(f"\n{Colors.Fg.red}{'#' * len(sentence)}{Colors.reset}") + print(f"{Colors.Fg.red}{sentence}{Colors.reset}") + print(f"{Colors.Fg.red}{'#' * len(sentence)}{Colors.reset}") + for fail_pack in lint_status["fail_packs_image"]: + print(wrapper_pack.fill(f"{Colors.Fg.cyan}{fail_pack}{Colors.reset}")) + for image in pkgs_status[fail_pack]["images"]: + print(wrapper_image.fill(image["image"])) + print(wrapper_error.fill(image["image_errors"])) + + def report_summary(self, lint_status: dict): + """ Log failed image creation if occured - if fail_pkgs: - return 1 + Args: + lint_status(dict): Overall lint status + """ + preferred_width = 100 + fail_pack_indent = 3 + fail_pack_prefix = " " * fail_pack_indent + "- " + wrapper_fail_pack = textwrap.TextWrapper(initial_indent=fail_pack_prefix, width=preferred_width, + subsequent_indent=' ' * len(fail_pack_prefix)) + # intersection of all failed packages + failed = set() + for packs in lint_status.values(): + failed = failed.union(packs) + # Log unit-tests summary + sentence = " Summary " + print(f"\n{Colors.Fg.cyan}{'#' * len(sentence)}") + print(f"{sentence}") + print(f"{'#' * len(sentence)}{Colors.reset}") + print(f"Packages: {len(self._pkgs)}") + print(f"Packages PASS: {Colors.Fg.green}{len(self._pkgs) - len(failed)}{Colors.reset}") + print(f"Packages FAIL: {Colors.Fg.red}{len(failed)}{Colors.reset}") + if failed: + print(f"Failed packages:") + for fail_pack in failed: + print(f"{Colors.Fg.red}{wrapper_fail_pack.fill(fail_pack)}{Colors.reset}") - else: - return 0 + @staticmethod + def _create_report(pkgs_status: dict, path: str): + if path: + json_path = Path(path) / "lint_report.json" + json.dump(fp=json_path.open(mode='w'), + obj=pkgs_status, + indent=4, + sort_keys=True) diff --git a/demisto_sdk/commands/lint/linter.py b/demisto_sdk/commands/lint/linter.py index 0e177536ba..5937b1df35 100644 --- a/demisto_sdk/commands/lint/linter.py +++ b/demisto_sdk/commands/lint/linter.py @@ -1,533 +1,850 @@ -import os -import io -import sys -import yaml -import time -import shutil +# STD python packages import hashlib -import threading -import subprocess -from datetime import datetime -import requests -from typing import List - -from demisto_sdk.commands.common.constants import Errors, TYPE_TO_EXTENSION, TYPE_PWSH, TYPE_PYTHON -from demisto_sdk.commands.unify.unifier import Unifier -from demisto_sdk.commands.common.configuration import Configuration -from demisto_sdk.commands.common.tools import print_v, get_all_docker_images, get_python_version, \ - print_error, print_color, LOG_COLORS, get_yml_paths_in_dir, run_command, get_log_verbose, \ - print_warning, get_common_server_path, get_common_server_path_pwsh +import io +import json +import logging +import os +from typing import List, Optional, Tuple + +# 3-rd party packages +import docker +import docker.errors +import docker.models.containers +import requests.exceptions +import urllib3.exceptions +from demisto_sdk.commands.common.constants import TYPE_PWSH, TYPE_PYTHON +# Local packages +from demisto_sdk.commands.common.tools import (get_all_docker_images, + run_command_os) +from demisto_sdk.commands.lint.commands_builder import ( + build_bandit_command, build_flake8_command, build_mypy_command, + build_pwsh_analyze_command, build_pwsh_test_command, build_pylint_command, + build_pytest_command, build_vulture_command) +from demisto_sdk.commands.lint.helpers import (EXIT_CODES, FAIL, RERUN, RL, + SUCCESS, add_tmp_lint_files, + add_typing_module, + copy_dir_to_container, + get_file_from_container, + get_python_version_from_image, + stream_docker_container_output) +from jinja2 import Environment, FileSystemLoader, exceptions +from ruamel.yaml import YAML +from wcmatch.pathlib import NEGATE, Path + +logger = logging.getLogger('demisto-sdk') class Linter: - """Linter used to activate lint command. + """ Linter used to activate lint command on single package Attributes: - project_dir (str): The directory to run lint on. - no_test (bool): Whether to skip pytest. - no_pylint (bool): Whether to skip pylint. - no_flake8 (bool): Whether to skip flake8. - no_mypy (bool): Whether to skip mypy. - verbose (bool): Whether to output a detailed response. - root (bool): Whether to run pytest container with root user. - keep_container (bool): Whether to keep the test container. - cpu_num (int): Number of CPUs to run pytest on. - configuration (Configuration): The system configuration. - lock (threading.Lock): A mutex lock to be used for multi-thread lint. - """ - common_server_target_path = "CommonServerPython.py" - common_server_pack_remote_path = "https://raw.githubusercontent.com/demisto/content/master/Packs/Base/Scripts/" \ - "CommonServerPython/CommonServerPython.py" - - def __init__(self, project_dir: str, no_test: bool = False, no_pylint: bool = False, no_flake8: bool = False, - no_mypy: bool = False, root: bool = False, keep_container: bool = False, - cpu_num: int = 0, configuration: Configuration = Configuration(), - lock: threading.Lock = threading.Lock(), no_bandit: bool = False, no_pslint: bool = False, - requirements_3: str = '', - requirements_2: str = '', no_vulture: bool = False): - - if no_test and no_pylint and no_flake8 and no_mypy and no_bandit and no_vulture: - raise ValueError("Nothing to run as all --no-* options specified.") - - self.configuration = configuration - dev_scripts_dir = os.path.join(self.configuration.sdk_env_dir, 'common', 'dev_sh_scripts') - self.run_dev_tasks_script_name = 'run_dev_tasks.sh' - self.run_dev_tasks_script_pwsh_name = 'run_dev_tasks_pwsh.sh' - self.run_mypy_script_name = 'run_mypy.sh' - self.container_setup_script_name = 'pkg_dev_container_setup.sh' - self.container_setup_script_pwsh_name = 'pkg_dev_container_setup_pwsh.sh' - self.cert_file = os.path.join(dev_scripts_dir, 'panw-cert.crt') - self.run_dev_tasks_script = os.path.join(dev_scripts_dir, self.run_dev_tasks_script_name) - self.run_dev_tasks_script_pwsh = os.path.join(dev_scripts_dir, self.run_dev_tasks_script_pwsh_name) - self.container_setup_script = os.path.join(dev_scripts_dir, self.container_setup_script_name) - self.container_setup_script_pwsh = os.path.join(dev_scripts_dir, self.container_setup_script_pwsh_name) - self.run_mypy_script = os.path.join(dev_scripts_dir, self.run_mypy_script_name) - self.docker_login_completed = False - self.project_dir = os.path.abspath(os.path.join(self.configuration.env_dir, project_dir)) - if self.project_dir[-1] != os.sep: - self.project_dir = os.path.join(self.project_dir, '') - - self.root = root - self.keep_container = keep_container - self.cpu_num = cpu_num - self.common_server_created = False - self.run_args = { - 'pylint': not no_pylint, - 'flake8': not no_flake8, - 'mypy': not no_mypy, - 'bandit': not no_bandit, - 'tests': not no_test, - 'pslint': not no_pslint, - 'vulture': not no_vulture, + pack_dir(Path): Pack to run lint on. + content_repo(Path): Git repo object of content repo. + req_2(list): requirements for docker using python2. + req_3(list): requirements for docker using python3. + docker_engine(bool): Wheter docker engine detected by docker-sdk. + """ + + def __init__(self, pack_dir: Path, content_repo: Path, req_3: list, req_2: list, docker_engine: bool): + self._req_3 = req_3 + self._req_2 = req_2 + self._content_repo = content_repo + self._pack_abs_dir = pack_dir + self._pack_name = None + # Docker client init + if docker_engine: + self._docker_client: docker.DockerClient = docker.from_env() + self._docker_hub_login = self._docker_login() + # Facts gathered regarding pack lint and test + self._facts = { + "images": [], + "python_version": 0, + "env_vars": {}, + "test": False, + "lint_files": [], + "additional_requirements": [], + "docker_engine": docker_engine + } + # Pack lint status object - visualize it + self._pkg_lint_status = { + "pkg": None, + "pack_type": None, + "path": str(self._content_repo), + "errors": [], + "images": [], + "flake8_errors": None, + "bandit_errors": None, + "mypy_errors": None, + "vulture_errors": None, + "exit_code": SUCCESS } - self.lock = lock - self.requirements_3 = requirements_3 - self.requirements_2 = requirements_2 - # load yaml - _, yml_path = get_yml_paths_in_dir(self.project_dir, Errors.no_yml_file(self.project_dir)) - if not yml_path: - raise ValueError(f'yml path failed for: {self.project_dir}') - print_v('Using yaml file: {}'.format(yml_path)) - with open(yml_path, 'r') as yml_file: - yml_data = yaml.safe_load(yml_file) - self.script_obj = yml_data - if isinstance(self.script_obj.get('script'), dict): - self.script_obj = self.script_obj.get('script') - self.script_type = self.script_obj.get('type') - - def get_common_server_python(self) -> bool: - """Getting common server python in not exists changes self.common_server_created to True if needed. + + def run_dev_packages(self, no_flake8: bool, no_bandit: bool, no_mypy: bool, no_pylint: bool, no_vulture: bool, + no_pwsh_analyze: bool, no_pwsh_test: bool, no_test: bool, modules: dict, keep_container: bool, + test_xml: str) -> dict: + """ Run lint and tests on single package + Perfroming the follow: + 1. Run the lint on OS - flake8, bandit, mypy. + 2. Run in package docker - pylint, pytest. + + Args: + no_flake8(bool): Whether to skip flake8 + no_bandit(bool): Whether to skip bandit + no_mypy(bool): Whether to skip mypy + no_vulture(bool): Whether to skip vulture + no_pylint(bool): Whether to skip pylint + no_test(bool): Whether to skip pytest + no_pwsh_analyze(bool): Whether to skip powershell code analyzing + no_pwsh_test(bool): whether to skip powershell tests + modules(dict): Mandatory modules to locate in pack path (CommonServerPython.py etc) + keep_container(bool): Whether to keep the test container + test_xml(str): Path for saving pytest xml results Returns: - bool. True if exists/created, else False + dict: lint and test all status, pkg status) """ - # If not CommonServerPython is dir - if not os.path.isfile(os.path.join(self.project_dir, self.common_server_target_path)): - # Get file from git + # Gather information for lint check information + skip = self._gather_facts(modules) + # If not python pack - skip pack + if skip: + return self._pkg_lint_status + + # Locate mandatory files in pack path - for more info checkout the context manager LintFiles + with add_tmp_lint_files(content_repo=self._content_repo, + pack_path=self._pack_abs_dir, + lint_files=self._facts["lint_files"], + modules=modules, + pack_type=self._pkg_lint_status["pack_type"]): + # Run lint check on host - flake8, bandit, mypy + if self._pkg_lint_status["pack_type"] == TYPE_PYTHON: + self._run_lint_in_host(no_flake8=no_flake8, + no_bandit=no_bandit, + no_mypy=no_mypy, + no_vulture=no_vulture) + # Run lint and test check on pack docker image + if self._facts["docker_engine"]: + self._run_lint_on_docker_image(no_pylint=no_pylint, + no_test=no_test, + no_pwsh_analyze=no_pwsh_analyze, + no_pwsh_test=no_pwsh_test, + keep_container=keep_container, + test_xml=test_xml) + + return self._pkg_lint_status + + def _gather_facts(self, modules: dict) -> bool: + """ Gathering facts about the package - python version, docker images, vaild docker image, yml parsing + Args: + modules(dict): Test mandatory modules to be ignore in lint check + + Returns: + bool: Indicating if to continue further or not, if False exit Thread, Else continue. + """ + # Loooking for pkg yaml + yml_file: Optional[Path] = self._pack_abs_dir.glob([rf'*.yaml', rf'*.yml', r'!*unified*.yml'], + flags=NEGATE) + + if not yml_file: + logger.info(f"{self._pack_abs_dir} - Skiping no yaml file found {yml_file}") + self._pkg_lint_status["errors"].append('Unable to find yml file in package') + return True + else: try: - res = requests.get(self.common_server_pack_remote_path, verify=False, timeout=10) - with open(os.path.join(self.project_dir, self.common_server_target_path), "w+") as f: - f.write(res.text) - self.common_server_created = True - except requests.exceptions.RequestException: - print_error(Errors.no_common_server_python(self.common_server_pack_remote_path)) - return False - return True - - def remove_common_server_python(self): - """checking if file exists and removing it.""" - if self.common_server_created: - os.remove(os.path.join(self.project_dir, self.common_server_target_path)) - - def run_py_non_docker_tasks(self, dockers: List[str]) -> int: - return_code = 0 - py_num = get_python_version(dockers[0]) - self._setup_dev_files_py(py_num) - if self.run_args['flake8']: - result_val = self.run_flake8(py_num) - if result_val: - return_code = result_val - - if self.run_args['mypy']: - result_val = self.run_mypy(py_num) - if result_val: - return_code = result_val - - if self.run_args['bandit']: - result_val = self.run_bandit(py_num) - if result_val: - return_code = result_val - - if self.run_args['vulture']: - result_val = self.run_vulture(py_num) - if result_val: - return_code = result_val - - return return_code - - def run_dev_packages(self) -> int: - return_code = 0 - supported_types = (TYPE_PYTHON, TYPE_PWSH) - if self.script_type not in supported_types: - print_warning(f'Script is not of types: {supported_types}. Found type: {self.script_type}. Nothing to do.') - return 0 - - dockers = get_all_docker_images(self.script_obj) - print_color("============ Starting process for: {} ============\n".format(self.project_dir), - LOG_COLORS.YELLOW) - if self.script_type == TYPE_PYTHON: - return_code = self.run_py_non_docker_tasks(dockers) - if self.script_type == TYPE_PWSH: - self._setup_dev_files_pwsh() - - for docker in dockers: - for try_num in (1, 2): - print_v("Using docker image: {}".format(docker)) + yml_file = next(yml_file) + except StopIteration: + return True + # Get pack name + self._pack_name = yml_file.stem + log_prompt = f"{self._pack_name} - Facts" + self._pkg_lint_status["pkg"] = yml_file.stem + logger.info(f"{log_prompt} - Using yaml file {yml_file}") + # Parsing pack yaml - inorder to verify if check needed + try: + script_obj: dict = {} + yml_obj: dict = YAML().load(yml_file) + if isinstance(yml_obj, dict): + script_obj: dict = yml_obj.get('script') if isinstance(yml_obj.get('script'), dict) else yml_obj + self._pkg_lint_status["pack_type"] = script_obj.get('type') + except (FileNotFoundError, IOError, KeyError): + self._pkg_lint_status["errors"].append('Unable to parse package yml') + return True + # return no check needed if not python pack + if self._pkg_lint_status["pack_type"] not in (TYPE_PYTHON, TYPE_PWSH): + logger.info(f"{log_prompt} - Skippring due to not Python, Powershell package - Pack is" + f" {self._pkg_lint_status['pack_type']}") + return True + # Docker images + if self._facts["docker_engine"]: + logger.info(f"{log_prompt} - Pulling docker images, can take up to 1-2 minutes if not exists locally ") + self._facts["images"] = [[image, -1] for image in get_all_docker_images(script_obj=script_obj)] + # Gather enviorment variables for docker execution + self._facts["env_vars"] = { + "CI": os.getenv("CI", False), + "DEMISTO_LINT_UPDATE_CERTS": os.getenv('DEMISTO_LINT_UPDATE_CERTS', "yes") + } + lint_files = set() + # Facts for python pack + if self._pkg_lint_status["pack_type"] == TYPE_PYTHON: + if self._facts["docker_engine"]: + # Getting python version from docker image - verfying if not valid docker image configured + for image in self._facts["images"]: + py_num: float = get_python_version_from_image(image=image[0]) + image[1] = py_num + logger.info(f"{self._pack_name} - Facts - {image[0]} - Python {py_num}") + if not self._facts["python_version"]: + self._facts["python_version"] = py_num + # Checking wheter *test* exsits in package + self._facts["test"] = True if next(self._pack_abs_dir.glob([r'test_*.py', r'*_test.py']), None) else False + if self._facts["test"]: + logger.info(f"{log_prompt} - Tests found") + else: + logger.info(f"{log_prompt} - Tests not found") + # Gather package requirements embeded test-requirements.py file + test_requirements = self._pack_abs_dir / 'test-requirements.txt' + if test_requirements.exists(): + try: + additional_req = test_requirements.read_text(encoding='utf-8') + self._facts["additinal_requirements"].extend(additional_req) + logger.info(f"{log_prompt} - Additional package Pypi packages found - {additional_req}") + except (FileNotFoundError, IOError): + self._pkg_lint_status["errors"].append('Unable to parse test-requirements.txt in package') + # Get lint files + lint_files = set(self._pack_abs_dir.glob(["*.py", "!*_test.py", "!test_*.py", "!__init__.py", "!*.tmp"], + flags=NEGATE)) + # Facts for Powershell pack + elif self._pkg_lint_status["pack_type"] == TYPE_PWSH: + # Get lint files + lint_files = set( + self._pack_abs_dir.glob(["*.ps1", "!*Tests.ps1", "CommonServerPowerShell.ps1", "demistomock.ps1'"], + flags=NEGATE)) + if 'commonserver' in self._pack_abs_dir.name.lower(): + if self._pkg_lint_status["pack_type"] == TYPE_PWSH: + self._facts["lint_files"] = [Path(self._pack_abs_dir / 'CommonServerPowerShell.ps1')] + else: + test_modules = {self._pack_abs_dir / module.name for module in modules.keys()} + lint_files = lint_files.difference(test_modules) + self._facts["lint_files"] = list(lint_files) + if self._facts["lint_files"]: + for lint_file in self._facts["lint_files"]: + logger.info(f"{log_prompt} - Lint file {lint_file}") + else: + logger.info(f"{log_prompt} - Lint files not found") - try: - if self.run_args['tests'] or self.run_args['pylint']: - docker_image_created = self._docker_image_create(docker) - output, status_code = self._docker_run(docker_image_created) - with self.lock: - print_color("\n========== Running tests/pylint for: {} =========".format(self.project_dir), - LOG_COLORS.YELLOW) - if status_code == 1: - raise subprocess.CalledProcessError(*output) - else: - print(output) - print_color("============ Finished process for: {} " - "with docker: {} ============\n".format(self.project_dir, docker), - LOG_COLORS.GREEN) - - break # all is good no need to retry - except subprocess.CalledProcessError as ex: - with self.lock: - if ex.output: - print_color("=========================== ERROR IN {}===========================" - "\n{}\n".format(self.project_dir, ex.output), LOG_COLORS.RED) - else: - print_color("========= Test Failed on {}, Look at the error/s above ========\n".format( - self.project_dir), LOG_COLORS.RED) - return_code = 1 - - if not get_log_verbose(): - sys.stderr.write("Need a more detailed log? try running with the -v options as so: \n{} -v\n\n" - .format(" ".join(sys.argv[:]))) - - # circle ci docker setup sometimes fails on - if try_num > 1 or not ex.output or 'read: connection reset by peer' not in ex.output: - return 2 - else: - sys.stderr.write("Retrying as failure seems to be docker communication related...\n") - - finally: - sys.stdout.flush() - sys.stderr.flush() - - return return_code - - def run_flake8(self, py_num) -> int: - """Runs flake8 + return False + + def _run_lint_in_host(self, no_flake8: bool, no_bandit: bool, no_mypy: bool, no_vulture: bool): + """ Run lint check on host + + Args: + no_flake8(bool): Whether to skip flake8. + no_bandit(bool): Whether to skip bandit. + no_mypy(bool): Whether to skip mypy. + no_vulture(bool): Whether to skip Vulture. + """ + if self._facts["lint_files"]: + for lint_check in ["flake8", "bandit", "mypy", "vulture"]: + exit_code: int = SUCCESS + output: str = "" + if lint_check == "flake8" and not no_flake8: + exit_code, output = self._run_flake8(py_num=self._facts["images"][0][1], + lint_files=self._facts["lint_files"]) + elif lint_check == "bandit" and not no_bandit: + exit_code, output = self._run_bandit(lint_files=self._facts["lint_files"]) + elif lint_check == "mypy" and not no_mypy and self._facts["docker_engine"]: + exit_code, output = self._run_mypy(py_num=self._facts["images"][0][1], + lint_files=self._facts["lint_files"]) + elif lint_check == "vulture" and not no_vulture and self._facts["docker_engine"]: + exit_code, output = self._run_vulture(py_num=self._facts["python_version"], + lint_files=self._facts["lint_files"]) + if exit_code: + self._pkg_lint_status["exit_code"] |= EXIT_CODES[lint_check] + self._pkg_lint_status[f"{lint_check}_errors"] = output + + def _run_flake8(self, py_num: float, lint_files: List[Path]) -> Tuple[int, str]: + """ Runs flake8 in pack dir Args: - py_num (int): The python version in use + py_num(float): The python version in use + lint_files(List[Path]): file to perform lint Returns: - int. 0 if flake8 is successful, 1 otherwise. + int: 0 on successful else 1, errors + str: Bandit errors """ - lint_files = self._get_lint_files() - python_exe = 'python2' if py_num < 3 else 'python3' - print_v('Using: {} to run flake8'.format(python_exe)) - output = run_command(f'{python_exe} -m flake8 {self.project_dir}', cwd=self.configuration.env_dir) - with self.lock: - print("\n========= Running flake8 on: {}===============".format(lint_files)) - if len(output) == 0: - print_color("flake8 completed for: {}\n".format(lint_files), LOG_COLORS.GREEN) - return 0 + log_prompt = f"{self._pack_name} - Flake8" + logger.info(f"{log_prompt} - Start") + stdout, stderr, exit_code = run_command_os(command=build_flake8_command(lint_files, py_num), + cwd=self._content_repo) + logger.debug(f"{log_prompt} - Finshed exit-code: {exit_code}") + logger.debug(f"{log_prompt} - Finshed stdout: {RL if stdout else ''}{stdout}") + logger.debug(f"{log_prompt} - Finshed stderr: {RL if stderr else ''}{stderr}") + if stderr or exit_code: + logger.info(f"{log_prompt}- Finshed Finshed errors found") + if stderr: + return FAIL, stderr else: - print_error(output) - return 1 + return FAIL, stdout + + logger.info(f"{log_prompt} - Successfully finished") + + return SUCCESS, "" - def run_mypy(self, py_num) -> int: - """Runs mypy + def _run_bandit(self, lint_files: List[Path]) -> Tuple[int, str]: + """ Run bandit in pack dir Args: - py_num: The python version in use + lint_files(List[Path]): file to perform lint Returns: - int. 0 on successful mypy run, 1 otherwise. + int: 0 on successful else 1, errors + str: Bandit errors """ - self.get_common_server_python() - lint_files = self._get_lint_files() - sys.stdout.flush() - script_path = os.path.abspath(os.path.join(self.configuration.sdk_env_dir, self.run_mypy_script)) - output = run_command(' '.join(['bash', script_path, str(py_num), lint_files]), cwd=self.project_dir) - with self.lock: - print("========= Running mypy on: {} ===============".format(lint_files)) - if 'Success: no issues found in 1 source file' in output: - print(output) - print_color("mypy completed for: {}\n".format(lint_files), LOG_COLORS.GREEN) - self.remove_common_server_python() - return 0 + log_prompt = f"{self._pack_name} - Bandit" + logger.info(f"{log_prompt} - Start") + stdout, stderr, exit_code = run_command_os(command=build_bandit_command(lint_files), + cwd=self._pack_abs_dir) + logger.debug(f"{log_prompt} - Finshed exit-code: {exit_code}") + logger.debug(f"{log_prompt} - Finshed stdout: {RL if stdout else ''}{stdout}") + logger.debug(f"{log_prompt} - Finshed stderr: {RL if stderr else ''}{stderr}") + if stderr or exit_code: + logger.info(f"{log_prompt}- Finshed Finshed errors found") + if stderr: + return FAIL, stderr else: - print_error(output) - self.remove_common_server_python() - return 1 + return FAIL, stdout - def run_bandit(self, py_num) -> int: - """Run bandit + logger.info(f"{log_prompt} - Successfully finished") + + return SUCCESS, "" + + def _run_mypy(self, py_num: float, lint_files: List[Path]) -> Tuple[int, str]: + """ Run mypy in pack dir Args: - py_num: The python version in use + py_num(float): The python version in use + lint_files(List[Path]): file to perform lint Returns: - int. 0 on successful bandit run, 1 otherwise. + int: 0 on successful else 1, errors + str: Bandit errors """ - lint_files = self._get_lint_files() - python_exe = 'python2' if py_num < 3 else 'python3' - output = run_command(' '.join([python_exe, '-m', 'bandit', '-lll', '-iii', '-q', lint_files]), - cwd=self.project_dir) - with self.lock: - print("========= Running bandit on: {} ===============".format(lint_files)) - print_v('Using: {} to run bandit'.format(python_exe)) - if len(output) == 0: - print_color("bandit completed for: {}\n".format(lint_files), LOG_COLORS.GREEN) - return 0 + log_prompt = f"{self._pack_name} - Mypy" + logger.info(f"{log_prompt} - Start") + with add_typing_module(lint_files=lint_files, python_version=py_num): + stdout, stderr, exit_code = run_command_os(command=build_mypy_command(files=lint_files, version=py_num), + cwd=self._pack_abs_dir) + logger.debug(f"{log_prompt} - Finshed exit-code: {exit_code}") + logger.debug(f"{log_prompt} - Finshed stdout: {RL if stdout else ''}{stdout}") + logger.debug(f"{log_prompt} - Finshed stderr: {RL if stderr else ''}{stderr}") + if stderr or exit_code: + logger.info(f"{log_prompt}- Finshed Finshed errors found") + if stderr: + return FAIL, stderr else: - print_error(output) - return 1 + return FAIL, stdout + + logger.info(f"{log_prompt} - Successfully finished") - def run_vulture(self, py_num) -> int: - """Run vulture + return SUCCESS, "" + + def _run_vulture(self, py_num: float, lint_files: List[Path]) -> Tuple[int, str]: + """ Run mypy in pack dir Args: - py_num: The python version in use + py_num(float): The python version in use + lint_files(List[Path]): file to perform lint Returns: - int. 0 on successful vulture run, 1 otherwise. + int: 0 on successful else 1, errors + str: Vulture errors """ - lint_files = self._get_lint_files() - python_exe = 'python2' if py_num < 3 else 'python3' - cmd_args = [python_exe, '-m', 'vulture', lint_files, '--min-confidence', - os.environ.get('VULTURE_MIN_CONFIDENCE_LEVEL', '100')] - vulture_whitelist_path = os.path.join(self.project_dir, '.vulture_whitelist.py') - if os.path.isfile(vulture_whitelist_path): - cmd_args.insert(4, vulture_whitelist_path) - output = run_command(' '.join(cmd_args), cwd=self.project_dir) - self.lock.acquire() - print("========= Running vulture on: {} ===============".format(lint_files)) - print_v('Using: {} to run vulture'.format(python_exe)) - if len(output) == 0: - print_color("vulture completed for: {}\n".format(lint_files), LOG_COLORS.GREEN) - if self.lock.locked(): - self.lock.release() - return 0 + log_prompt = f"{self._pack_name} - Vulture" + logger.info(f"{log_prompt} - Start") + stdout, stderr, exit_code = run_command_os(command=build_vulture_command(files=lint_files, + pack_path=self._pack_abs_dir, + py_num=py_num), + cwd=self._pack_abs_dir) + logger.debug(f"{log_prompt} - Finshed exit-code: {exit_code}") + logger.debug(f"{log_prompt} - Finshed stdout: {RL if stdout else ''}{stdout}") + logger.debug(f"{log_prompt} - Finshed stderr: {RL if stderr else ''}{stderr}") + if stderr or exit_code: + logger.info(f"{log_prompt}- Finshed Finshed errors found") + if stderr: + return FAIL, stderr + else: + return FAIL, stdout - else: - print_error(output) - if self.lock.locked(): - self.lock.release() - return 1 + logger.info(f"{log_prompt} - Successfully finished") - def _docker_login(self): - if self.docker_login_completed: - return True - docker_user = os.getenv('DOCKERHUB_USER', None) - if not docker_user: - print_v('DOCKERHUB_USER not set. Not trying to login to dockerhub') - return False - docker_pass = os.getenv('DOCKERHUB_PASSWORD', None) - # pass is optional for local testing scenario. allowing password to be passed via stdin - cmd = ['docker', 'login', '-u', docker_user] - if docker_pass: - cmd.append('--password-stdin') - res = subprocess.run(cmd, input=docker_pass, capture_output=True, text=True) - if res.returncode != 0: - print("Failed docker login: {}".format(res.stderr)) + return SUCCESS, "" + + def _run_lint_on_docker_image(self, no_pylint: list, no_test: bool, no_pwsh_analyze: bool, no_pwsh_test: bool, + keep_container: bool, test_xml: str): + """ Run lint check on docker image + + Args: + no_pylint(bool): Whether to skip pylint + no_test(bool): Whether to skip pytest + no_pwsh_analyze(bool): Whether to skip powershell code analyzing + no_pwsh_test(bool): whether to skip powershell tests + keep_container(bool): Whether to keep the test container + test_xml(str): Path for saving pytest xml results + """ + for image in self._facts["images"]: + # Docker image status - visualize + status = { + "image": image[0], + "image_errors": "", + "pylint_errors": "", + "pytest_errors": "", + "pytest_json": {}, + "pwsh_analyze_errors": "", + "pwsh_test_errors": "" + } + # Creating image if pylint specifie or found tests and tests specified + image_id = "" + errors = "" + for trial in range(2): + image_id, errors = self._docker_image_create(docker_base_image=image) + if not errors: + break + + if image_id and not errors: + # Set image creation status + for check in ["pylint", "pytest", "pwsh_analyze", "pwsh_test"]: + exit_code = SUCCESS + output = "" + for trial in range(2): + if self._pkg_lint_status["pack_type"] == TYPE_PYTHON: + # Perform pylint + if not no_pylint and check == "pylint" and self._facts["lint_files"]: + exit_code, output = self._docker_run_pylint(test_image=image_id, + keep_container=keep_container) + # Perform pytest + elif not no_test and self._facts["test"] and check == "pytest": + exit_code, test_json = self._docker_run_pytest(test_image=image_id, + keep_container=keep_container, + test_xml=test_xml) + status["pytest_json"]: dict = test_json + elif self._pkg_lint_status["pack_type"] == TYPE_PWSH: + # Perform powershell analyze + if not no_pwsh_analyze and check == "pwsh_analyze" and self._facts["lint_files"]: + exit_code, output = self._docker_run_pwsh_analyze(test_image=image_id, + keep_container=keep_container) + # Perform powershell test + elif not no_pwsh_test and check == "pwsh_test": + exit_code, output = self._docker_run_pwsh_test(test_image=image_id, + keep_container=keep_container) + + if (exit_code != RERUN or trial == 2) and exit_code: + self._pkg_lint_status["exit_code"] |= EXIT_CODES[check] + status[f"{check}_errors"] = output + break + elif exit_code != RERUN: + break + else: + status["image_errors"] = str(errors) + self._pkg_lint_status["exit_code"] += EXIT_CODES["image"] + + # Add image status to images + self._pkg_lint_status["images"].append(status) + try: + self._docker_client.images.remove(image_id) + except (docker.errors.ImageNotFound, docker.errors.APIError): + pass + + def _docker_login(self) -> bool: + """ Login to docker-hub using enviorment varaibles: + 1. DOCKERHUB_USER - User for docker hub. + 2. DOCKERHUB_PASSWORD - Password for docker-hub. + Used in Circle-CI for pushing into repo devtestdemisto + + Returns: + bool: True if logged in successfully. + """ + docker_user = os.getenv('DOCKERHUB_USER') + docker_pass = os.getenv('DOCKERHUB_PASSWORD') + try: + self._docker_client.login(username=docker_user, + password=docker_pass, + registry="https://index.docker.io/v1") + return self._docker_client.ping() + except docker.errors.APIError: return False - print_v("Completed docker login") - self.docker_login_completed = True - return True - def _docker_image_create(self, docker_base_image): - """Create the docker image with dev dependencies. Will check if already existing. - Uses a hash of the requirements to determine the image tag + def _docker_image_create(self, docker_base_image: str) -> str: + """ Create docker image: + 1. Installing 'build base' if required in alpine images version - https://wiki.alpinelinux.org/wiki/GCC + 2. Installing pypi packs - if only pylint required - only pylint installed otherwise all pytest and pylint + installed, packages which being install can be found in path demisto_sdk/commands/lint/dev_envs + 3. The docker image build done by Dockerfile template located in + demisto_sdk/commands/lint/templates/dockerfile.jinja2 - Arguments: - docker_base_image (string): docker image to use as base for installing dev deps + Args: + docker_base_image(str): docker image to use as base for installing dev deps.. Returns: string. image name to use """ - if ':' not in docker_base_image: - docker_base_image += ':latest' - setup_script = self.container_setup_script - setup_script_name = self.container_setup_script_name - if self.script_type == TYPE_PWSH: - setup_script = self.container_setup_script_pwsh - setup_script_name = self.container_setup_script_pwsh_name - docker_input = None + log_prompt = f"{self._pack_name} - Image create" + test_image_id = "" + # Get requirements file for image + requirements = [] + if 2 < docker_base_image[1] < 3: + requirements = self._req_2 + elif docker_base_image[1] > 3: + requirements = self._req_3 + # Using DockerFile template + file_loader = FileSystemLoader(Path(__file__).parent / 'templates') + env = Environment(loader=file_loader, lstrip_blocks=True, trim_blocks=True) + template = env.get_template('dockerfile.jinja2') + try: + dockerfile = template.render(image=docker_base_image[0], + pypi_packs=requirements + self._facts["additional_requirements"], + pack_type=self._pkg_lint_status["pack_type"]) + except exceptions.TemplateError as e: + logger.debug(f"{log_prompt} - Error when build image - {e.message()}") + return test_image_id, str(e) + # Trying to pull image based on dockerfile hash, will check if something changed + errors = "" + test_image_name = f'devtest{docker_base_image[0]}-{hashlib.md5(dockerfile.encode("utf-8")).hexdigest()}' + test_image = None + try: + logger.info(f"{log_prompt} - Trying to pull existing image {test_image_name}") + test_image = self._docker_client.images.pull(test_image_name) + except (docker.errors.APIError, docker.errors.ImageNotFound): + logger.info(f"{log_prompt} - Unable to find image {test_image_name}") + # Creatng new image if existing image isn't found + if not test_image: + logger.info(f"{log_prompt} - Creating image based on {docker_base_image[0]} - Could take 2-3 minutes at first " + f"time") + try: + with io.BytesIO() as f: + f.write(dockerfile.encode('utf-8')) + f.seek(0) + self._docker_client.images.build(fileobj=f, + tag=test_image_name, + forcerm=True) + + if self._docker_hub_login: + for trial in range(2): + try: + self._docker_client.images.push(test_image_name) + logger.info(f"{log_prompt} - Image {test_image_name} pushed to repository") + break + except (requests.exceptions.ConnectionError, urllib3.exceptions.ReadTimeoutError): + logger.info(f"{log_prompt} - Unable to push image {test_image_name} to repository") + + except (docker.errors.BuildError, docker.errors.APIError, Exception) as e: + logger.critical(f"{log_prompt} - Build errors occured {e}") + errors = str(e) else: - py_num = get_python_version(docker_base_image) - if py_num == 2.7: - requirements = self.requirements_2 - else: - requirements = self.requirements_3 - docker_input = requirements + logger.info(f"{log_prompt} - Found existing image {test_image_name}") - with open(setup_script, "rb") as f: - setup_script_data = f.read() - if self.script_type == TYPE_PYTHON: - md5 = hashlib.md5(requirements.encode('utf-8') + setup_script_data).hexdigest() - else: - md5 = hashlib.md5(setup_script_data).hexdigest() - target_image = 'devtest' + docker_base_image + '-' + md5 - lock_file = ".lock-" + target_image.replace("/", "-") - try: - if (time.time() - os.path.getctime(lock_file)) > (60 * 5): - print("{}: Deleting old lock file: {}".format(datetime.now(), lock_file)) - os.remove(lock_file) - except Exception as ex: - print_v("Failed check and delete for lock file: {}. Error: {}".format(lock_file, ex)) - wait_print = True - for x in range(60): - images_ls = run_command(' '.join(['docker', 'image', 'ls', '--format', '{{.Repository}}:{{.Tag}}', - target_image])).strip() - if images_ls == target_image: - print('{}: Using already existing docker image: {}'.format(datetime.now(), target_image)) - return target_image - if wait_print: - print("{}: Existing image: {} not found will obtain lock file or wait for image".format(datetime.now(), - target_image)) - wait_print = False - print_v("Trying to obtain lock file: " + lock_file) + for trial in range(2): try: - f = open(lock_file, "x") - f.close() - print("{}: Obtained lock file: {}".format(datetime.now(), lock_file)) + logger.info(f"{log_prompt} - Copy pack dir to image {test_image_name}") + container_obj = self._docker_client.containers.create(image=test_image_name, + command="update-ca-certificates") + copy_dir_to_container(container_obj=container_obj, + host_path=self._pack_abs_dir, + container_path=Path('/devwork')) + if self._facts["env_vars"]["DEMISTO_LINT_UPDATE_CERTS"] == "yes" and \ + self._pkg_lint_status["pack_type"] == TYPE_PWSH: + copy_dir_to_container(container_obj=container_obj, + host_path=Path(__file__).parent / 'resources' / 'certificates', + container_path=Path('/usr/local/share/ca-certificates/')) + container_obj.start() + container_obj.wait() + test_image_id = container_obj.commit().short_id + container_obj.remove() break - except Exception as ex: - print_v("Failed getting lock. Will wait {}".format(str(ex))) - time.sleep(5) + except (docker.errors.ImageNotFound, docker.errors.APIError, urllib3.exceptions.ReadTimeoutError) as e: + logger.info(f"{log_prompt} - errors occured when copy pack dir {e}") + if trial == 2: + errors = str(e) + if test_image_id: + logger.info(f"{log_prompt} - Image {test_image_id} created succefully") + + return test_image_id, errors + + def _docker_run_pylint(self, test_image: str, keep_container: bool) -> Tuple[int, str]: + """ Run Pylint in created test image + + Args: + test_image(str): test image id/name + keep_container(bool): True if to keep container after excution finished + + Returns: + int: 0 on successful, errors 1, neet to retry 2 + str: Container log + """ + log_prompt = f'{self._pack_name} - Pylint - Image {test_image}' + logger.info(f"{log_prompt} - Start") + container_name = f"{self._pack_name}-pylint" + # Check if previous run left container a live if it do, we remove it + container_obj: docker.models.containers.Container try: - # try doing a pull - try: - print("{}: Trying to pull image: {}".format(datetime.now(), target_image)) - pull_res = subprocess.check_output(['docker', 'pull', target_image], - stderr=subprocess.STDOUT, universal_newlines=True) - print("Pull succeeded with output: {}".format(pull_res)) - return target_image - except subprocess.CalledProcessError as cpe: - print_v("Failed docker pull (will create image) with status: {}. Output: {}".format(cpe.returncode, - cpe.output)) - print("{}: Creating docker image: {} (this may take a minute or two...)".format(datetime.now(), - target_image)) - update_cert = os.getenv('DEMISTO_LINT_UPDATE_CERTS', 'yes') - docker_create = ['docker', 'create', '-e', f'DEMISTO_LINT_UPDATE_CERTS={update_cert}', '-i', - docker_base_image, 'sh', '/' + setup_script_name] - print_v(f'running: {docker_create}') - container_id = subprocess.check_output(docker_create, universal_newlines=True).strip() - print_v(f'created container with id: {container_id}') - subprocess.check_call(['docker', 'cp', setup_script, - container_id + ':/' + setup_script_name]) - if self.script_type == TYPE_PWSH: - if update_cert == 'yes': - subprocess.check_call(['docker', 'cp', self.cert_file, - container_id + ':/usr/local/share/ca-certificates/custom.crt']) - print_v(subprocess.check_output(['docker', 'start', '-a', '-i', container_id], - input=docker_input, stderr=subprocess.STDOUT, - universal_newlines=True)) - print_v(subprocess.check_output(['docker', 'commit', container_id, target_image], stderr=subprocess.STDOUT, - universal_newlines=True)) - print_v(subprocess.check_output(['docker', 'rm', container_id], stderr=subprocess.STDOUT, - universal_newlines=True)) - if self._docker_login(): - print("{}: Pushing image: {} to docker hub".format(datetime.now(), target_image)) - print_v(subprocess.check_output(['docker', 'push', target_image], stderr=subprocess.STDOUT, - universal_newlines=True)) - except subprocess.CalledProcessError as err: - print("Failed executing command with error: {} Output: \n{}".format(err, err.output)) - raise err - finally: - try: - os.remove(lock_file) - except Exception as ex: - print("{}: Error removing file: {}".format(datetime.now(), ex)) - print('{}: Done creating docker image: {}'.format(datetime.now(), target_image)) - return target_image - - def _docker_run(self, docker_image): - workdir = '/devwork' # this is setup in CONTAINER_SETUP_SCRIPT - lint_files = os.path.basename(self._get_lint_files()) - - run_params = ['docker', 'create', '-w', workdir, - '-e', 'PYLINT_FILES={}'.format(lint_files)] - run_params.extend(['-e', f'PS_LINT_FILES={lint_files}']) - if not self.root: - run_params.extend(['-u', '{}:4000'.format(os.getuid())]) - if not self.run_args['tests']: - run_params.extend(['-e', 'PYTEST_SKIP=1']) - run_params.extend(['-e', 'PS_TEST_SKIP=1']) - if not self.run_args['pylint']: - run_params.extend(['-e', 'PYLINT_SKIP=1']) - if not self.run_args['pslint']: - run_params.extend(['-e', 'PS_LINT_SKIP=1']) - run_params.extend(['-e', 'CPU_NUM={}'.format(self.cpu_num)]) - run_params.extend(['-e', 'CI={}'.format(os.getenv("CI", "false"))]) - run_script_name = self.run_dev_tasks_script_name if self.script_type == TYPE_PYTHON else self.run_dev_tasks_script_pwsh_name - run_script = self.run_dev_tasks_script if self.script_type == TYPE_PYTHON else self.run_dev_tasks_script_pwsh - run_params.extend([docker_image, 'sh', './{}'.format(run_script_name)]) - print_v(f'container create: {run_params}') - output = subprocess.check_output(run_params, stderr=subprocess.STDOUT, universal_newlines=True) - container_id = output.strip() + container_obj = self._docker_client.containers.get(container_name) + container_obj.remove(force=True) + except docker.errors.NotFound: + pass + + # Run container + exit_code = SUCCESS + output = "" try: - output = output + '\n' + subprocess.check_output(['docker', 'cp', self.project_dir + '/.', container_id + - ':' + workdir], stderr=subprocess.STDOUT, universal_newlines=True) - output = output + '\n' + subprocess.check_output(['docker', 'cp', run_script, container_id + - ':' + workdir], universal_newlines=True, stderr=subprocess.STDOUT) - output = output + '\n' + subprocess.check_output(['docker', 'start', '-a', container_id], - stderr=subprocess.STDOUT, - universal_newlines=True) - return output, 0 - finally: - if not self.keep_container: - run_command(f'docker rm {container_id}') + container_obj = self._docker_client.containers.run(name=container_name, + image=test_image, + command=[build_pylint_command(self._facts["lint_files"])], + user=f"{os.getuid()}:4000", + detach=True, + environment=self._facts["env_vars"]) + stream_docker_container_output(container_obj.logs(stream=True)) + # wait for container to finish + container_status = container_obj.wait(condition="exited") + # Get container exit code + container_exit_code = container_status.get("StatusCode") + # Getting container logs + container_log = container_obj.logs().decode("utf-8") + logger.info(f"{log_prompt} - exit-code: {container_exit_code}") + if container_exit_code in [1, 2]: + # 1-fatal message issued + # 2-Error message issued + exit_code = FAIL + output = container_log + logger.info(f"{log_prompt} - Finished errors found") + elif container_exit_code in [4, 8, 16]: + # 4-Warning message issued + # 8-refactor message issued + # 16-convention message issued + logger.info(f"{log_prompt} - Successfully finished - warnings found") + exit_code = SUCCESS + elif container_exit_code == 32: + # 32-usage error + logger.critical(f"{log_prompt} - Finished - Usage error") + exit_code = RERUN + else: + logger.info(f"{log_prompt} - Successfully finished") + # Keeping container if needed or remove it + if keep_container: + print(f"{log_prompt} - container name {container_name}") else: - print("Test container [{}] was left available".format(container_id)) + try: + container_obj.remove(force=True) + except docker.errors.NotFound as e: + logger.critical(f"{log_prompt} - Unable to delete container - {e}") + except (docker.errors.ImageNotFound, docker.errors.APIError) as e: + logger.critical(f"{log_prompt} - Unable to run pylint - {e}") + exit_code = RERUN + output = str(e) + + return exit_code, output + + def _docker_run_pytest(self, test_image: str, keep_container: bool, test_xml: str) -> Tuple[int, str]: + """ Run Pytest in created test image - def _setup_dev_files_py(self, py_num): - # copy demistomock and common server + Args: + test_image(str): Test image id/name + keep_container(bool): True if to keep container after excution finished + test_xml(str): Xml saving path + + Returns: + int: 0 on successful, errors 1, neet to retry 2 + str: Unit test json report + """ + log_prompt = f'{self._pack_name} - Pytest - Image {test_image}' + logger.info(f"{log_prompt} - Start") + container_name = f"{self._pack_name}-pytest" + # Check if previous run left container a live if it does, Remove it + container_obj: docker.models.containers.Container try: - shutil.copy(self.configuration.env_dir + '/Tests/demistomock/demistomock.py', self.project_dir) - open(self.project_dir + '/CommonServerUserPython.py', 'a').close() # create empty file - shutil.rmtree(self.project_dir + '/__pycache__', ignore_errors=True) - shutil.copy(self.configuration.env_dir + '/Tests/scripts/dev_envs/pytest/conftest.py', self.project_dir) - self.check_api_module_imports(py_num) - if "/Scripts/CommonServerPython" not in self.project_dir: - # Otherwise we already have the CommonServerPython.py file - common_server_path = get_common_server_path(self.configuration.env_dir) - shutil.copy(common_server_path, self.project_dir) - except Exception as e: - print_v('Could not copy demistomock and CommonServer files: {}'.format(str(e))) - - def _setup_dev_files_pwsh(self): - # copy common server + container_obj = self._docker_client.containers.get(container_name) + container_obj.remove(force=True) + except docker.errors.NotFound: + pass + # Collect tests + exit_code = SUCCESS + test_json = {} try: - shutil.copy(self.configuration.env_dir + '/Tests/demistomock/demistomock.ps1', self.project_dir) - if "/Scripts/CommonServerPowerShell" not in self.project_dir: - # Otherwise we already have the CommonServerPowerShell.py file - shutil.copy(get_common_server_path_pwsh(self.configuration.env_dir), - self.project_dir) - except Exception as e: - print('Could not copy CommonServerPowerShell.ps1: {}'.format(str(e))) - raise - - def check_api_module_imports(self, py_num): - """ - Checks if the integration imports an API module and if so pastes the module in the package. - :param py_num: The python version - api modules are in python 3 + # Running pytest container + container_obj = self._docker_client.containers.run(name=container_name, + image=test_image, + command=[build_pytest_command(test_xml=test_xml, json=True)], + user=f"{os.getuid()}:4000", + detach=True, + environment=self._facts["env_vars"]) + stream_docker_container_output(container_obj.logs(stream=True)) + # Waiting for container to be finished + container_status: dict = container_obj.wait(condition="exited") + # Getting container exit code + container_exit_code: int = container_status.get("StatusCode") + # Getting container logs + logger.info(f"{log_prompt} - exit-code: {container_exit_code}") + if container_exit_code in [0, 1, 2, 5]: + # 0-All tests passed + # 1-Tests were collected and run but some of the tests failed + # 2-Test execution was interrupted by the user + # 5-No tests were collected + if test_xml: + test_data_xml: bytes = get_file_from_container(container_obj=container_obj, + container_path="/devwork/report_pytest.xml") + xml_apth = Path(test_xml) / f'{self._pack_name}_pytest.xml' + with open(file=xml_apth, mode='bw') as f: + f.write(test_data_xml) + + test_json: dict = json.loads(get_file_from_container(container_obj=container_obj, + container_path="/devwork/report_pytest.json", + encoding="utf-8")) + for test in test_json.get('report', {}).get("tests"): + if test.get("call", {}).get("longrepr"): + test["call"]["longrepr"] = test["call"]["longrepr"].split('\n') + if container_exit_code in [0, 5]: + logger.info(f"{log_prompt} - Successfully finished") + exit_code = SUCCESS + else: + logger.info(f"{log_prompt} - Finished errors found") + exit_code = FAIL + elif container_exit_code in [3, 4]: + # 3-Internal error happened while executing tests + # 4-pytest command line usage error + logger.critical(f"{log_prompt} - Usage error") + exit_code = RERUN + # Remove container if not needed + if keep_container: + print(f"{log_prompt} - Conatiner name {container_name}") + else: + try: + container_obj.remove(force=True) + except docker.errors.NotFound as e: + logger.critical(f"{log_prompt} - Unable to remove container {e}") + except (docker.errors.ImageNotFound, docker.errors.APIError) as e: + logger.critical(f"{log_prompt} - Unable to run pytest container {e}") + exit_code = RERUN + + return exit_code, test_json + + def _docker_run_pwsh_analyze(self, test_image: str, keep_container: bool) -> Tuple[int, str]: + """ Run Powershell code analyze in created test image + + Args: + test_image(str): test image id/name + keep_container(bool): True if to keep container after excution finished + + Returns: + int: 0 on successful, errors 1, neet to retry 2 + str: Container log """ - if py_num > 3: - unifier = Unifier(self.project_dir) - code_file_path = unifier.get_code_file('.py') + log_prompt = f'{self._pack_name} - Powershell analyze - Image {test_image}' + logger.info(f"{log_prompt} - Start") + container_name = f"{self._pack_name}-pwsh-analyze" + # Check if previous run left container a live if it do, we remove it + container_obj: docker.models.containers.Container + try: + container_obj = self._docker_client.containers.get(container_name) + container_obj.remove(force=True) + except docker.errors.NotFound: + pass + + # Run container + exit_code = SUCCESS + output = "" + try: + container_obj = self._docker_client.containers.run(name=container_name, + image=test_image, + command=build_pwsh_analyze_command( + self._facts["lint_files"][0]), + user=f"{os.getuid()}:4000", + detach=True, + environment=self._facts["env_vars"]) + stream_docker_container_output(container_obj.logs(stream=True)) + # wait for container to finish + container_status = container_obj.wait(condition="exited") + # Get container exit code + container_exit_code = container_status.get("StatusCode") + # Getting container logs + container_log = container_obj.logs().decode("utf-8") + logger.info(f"{log_prompt} - exit-code: {container_exit_code}") + if container_exit_code: + # 1-fatal message issued + # 2-Error message issued + logger.info(f"{log_prompt} - Finshed errors found") + output = container_log + exit_code = FAIL + else: + logger.info(f"{log_prompt} - Successfully finished") + # Keeping container if needed or remove it + if keep_container: + print(f"{log_prompt} - container name {container_name}") + else: + try: + container_obj.remove(force=True) + except docker.errors.NotFound as e: + logger.critical(f"{log_prompt} - Unable to delete container - {e}") + except (docker.errors.ImageNotFound, docker.errors.APIError) as e: + logger.critical(f"{log_prompt} - Unable to run powershell test - {e}") + exit_code = RERUN - try: - # Look for an import to an API module in the code. If there is such import, we need to copy the correct - # module file to the package directory. - with io.open(code_file_path, mode='r', encoding='utf-8') as script_file: - _, module_name = unifier.check_api_module_imports(script_file.read()) - if module_name: - module_path = os.path.join(self.configuration.env_dir, 'Packs', 'ApiModules', 'Scripts', - module_name, module_name + '.py') - print_v('Copying ' + os.path.join(self.configuration.env_dir, 'Scripts', module_path)) - if not os.path.exists(module_path): - raise ValueError('API Module {} not found, you might be outside of the content repository' - ' or this API module does not exist'.format(module_name)) - shutil.copy(os.path.join(module_path), self.project_dir) - except Exception as e: - print_v('Unable to retrieve the module file {}: {}'.format(module_name, str(e))) - - def _get_lint_files(self): - unifier = Unifier(self.project_dir) - code_file = unifier.get_code_file(TYPE_TO_EXTENSION[self.script_type]) - return os.path.abspath(code_file) + return exit_code, output + + def _docker_run_pwsh_test(self, test_image: str, keep_container: bool) -> Tuple[int, str]: + """ Run Powershell tests in created test image + + Args: + test_image(str): test image id/name + keep_container(bool): True if to keep container after excution finished + + Returns: + int: 0 on successful, errors 1, neet to retry 2 + str: Container log + """ + log_prompt = f'{self._pack_name} - Powershell test - Image {test_image}' + logger.info(f"{log_prompt} - Start") + container_name = f"{self._pack_name}-pwsh-test" + # Check if previous run left container a live if it do, we remove it + container_obj: docker.models.containers.Container + try: + container_obj = self._docker_client.containers.get(container_name) + container_obj.remove(force=True) + except docker.errors.NotFound: + pass + + # Run container + exit_code = SUCCESS + output = "" + try: + container_obj = self._docker_client.containers.run(name=container_name, + image=test_image, + command=build_pwsh_test_command(), + user=f"{os.getuid()}:4000", + detach=True, + environment=self._facts["env_vars"]) + stream_docker_container_output(container_obj.logs(stream=True)) + # wait for container to finish + container_status = container_obj.wait(condition="exited") + # Get container exit code + container_exit_code = container_status.get("StatusCode") + # Getting container logs + container_log = container_obj.logs().decode("utf-8") + logger.info(f"{log_prompt} - exit-code: {container_exit_code}") + if container_exit_code: + # 1-fatal message issued + # 2-Error message issued + logger.info(f"{log_prompt} - Finshed errors found") + output = container_log + exit_code = FAIL + else: + logger.info(f"{log_prompt} - Successfully finished") + # Keeping container if needed or remove it + if keep_container: + print(f"{log_prompt} - container name {container_name}") + else: + try: + container_obj.remove(force=True) + except docker.errors.NotFound as e: + logger.critical(f"{log_prompt} - Unable to delete container - {e}") + except (docker.errors.ImageNotFound, docker.errors.APIError) as e: + logger.critical(f"{log_prompt} - Unable to run powershell test - {e}") + exit_code = RERUN + + return exit_code, output diff --git a/demisto_sdk/commands/common/dev_sh_scripts/panw-cert.crt b/demisto_sdk/commands/lint/resources/certificates/panw-cert.crt similarity index 100% rename from demisto_sdk/commands/common/dev_sh_scripts/panw-cert.crt rename to demisto_sdk/commands/lint/resources/certificates/panw-cert.crt diff --git a/demisto_sdk/commands/lint/resources/pipfile_python2/Pipfile b/demisto_sdk/commands/lint/resources/pipfile_python2/Pipfile new file mode 100644 index 0000000000..a1ac925f87 --- /dev/null +++ b/demisto_sdk/commands/lint/resources/pipfile_python2/Pipfile @@ -0,0 +1,17 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] +pylint = "*" +pytest = "*" +pytest-mock = "*" +requests-mock = "*" +pytest-xdist = "*" +pytest-json = "*" + +[packages] + +[requires] +python_version = "2.7" diff --git a/demisto_sdk/commands/lint/resources/pipfile_python2/Pipfile.lock b/demisto_sdk/commands/lint/resources/pipfile_python2/Pipfile.lock new file mode 100644 index 0000000000..9c6aa954ea --- /dev/null +++ b/demisto_sdk/commands/lint/resources/pipfile_python2/Pipfile.lock @@ -0,0 +1,347 @@ +{ + "_meta": { + "hash": { + "sha256": "17cd8f31f8b4bdfa7fedfae87e0eca4d9e1a602f54d991932f84ea3d2084e8f6" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "2.7" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": {}, + "develop": { + "apipkg": { + "hashes": [ + "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", + "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c" + ], + "version": "==1.5" + }, + "astroid": { + "hashes": [ + "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756", + "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7" + ], + "version": "==1.6.6" + }, + "atomicwrites": { + "hashes": [ + "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", + "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" + ], + "version": "==1.3.0" + }, + "attrs": { + "hashes": [ + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + ], + "version": "==19.3.0" + }, + "backports.functools-lru-cache": { + "hashes": [ + "sha256:0bada4c2f8a43d533e4ecb7a12214d9420e66eb206d54bf2d682581ca4b80848", + "sha256:8fde5f188da2d593bd5bc0be98d9abc46c95bb8a9dde93429570192ee6cc2d4a" + ], + "markers": "python_version < '3.2'", + "version": "==1.6.1" + }, + "certifi": { + "hashes": [ + "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3", + "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f" + ], + "version": "==2019.11.28" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "configparser": { + "hashes": [ + "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", + "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" + ], + "markers": "python_version < '3'", + "version": "==4.0.2" + }, + "contextlib2": { + "hashes": [ + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" + ], + "markers": "python_version < '3'", + "version": "==0.6.0.post1" + }, + "enum34": { + "hashes": [ + "sha256:13ef9a1c478203252107f66c25b99b45b1865693ca1284aab40dafa7e1e7ac17", + "sha256:708aabfb3d5898f99674c390d360d59efdd08547019763622365f19e84a7fef4", + "sha256:98df1f1937840b7d8012fea7f0b36392a3e6fd8a2f429c48a3ff4b1aad907f3f" + ], + "markers": "python_version < '3.4'", + "version": "==1.1.9" + }, + "execnet": { + "hashes": [ + "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50", + "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547" + ], + "version": "==1.7.1" + }, + "funcsigs": { + "hashes": [ + "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", + "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + ], + "markers": "python_version < '3.0'", + "version": "==1.0.2" + }, + "futures": { + "hashes": [ + "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16", + "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794" + ], + "markers": "python_version < '3.2'", + "version": "==3.3.0" + }, + "idna": { + "hashes": [ + "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", + "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" + ], + "version": "==2.9" + }, + "importlib-metadata": { + "hashes": [ + "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", + "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" + ], + "markers": "python_version < '3.8'", + "version": "==1.5.0" + }, + "isort": { + "hashes": [ + "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", + "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" + ], + "version": "==4.3.21" + }, + "lazy-object-proxy": { + "hashes": [ + "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d", + "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449", + "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08", + "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a", + "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50", + "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd", + "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239", + "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb", + "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea", + "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e", + "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156", + "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142", + "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442", + "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62", + "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db", + "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531", + "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383", + "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a", + "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357", + "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4", + "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0" + ], + "version": "==1.4.3" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "mock": { + "hashes": [ + "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", + "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" + ], + "markers": "python_version < '3.0'", + "version": "==3.0.5" + }, + "more-itertools": { + "hashes": [ + "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", + "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", + "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + ], + "markers": "python_version <= '2.7'", + "version": "==5.0.0" + }, + "packaging": { + "hashes": [ + "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", + "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" + ], + "version": "==20.1" + }, + "pathlib2": { + "hashes": [ + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" + ], + "markers": "python_version < '3'", + "version": "==2.3.5" + }, + "pluggy": { + "hashes": [ + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + ], + "version": "==0.13.1" + }, + "py": { + "hashes": [ + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" + ], + "version": "==1.8.1" + }, + "pylint": { + "hashes": [ + "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42", + "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300" + ], + "index": "pypi", + "version": "==1.9.5" + }, + "pyparsing": { + "hashes": [ + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" + ], + "version": "==2.4.6" + }, + "pytest": { + "hashes": [ + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" + ], + "index": "pypi", + "version": "==4.6.9" + }, + "pytest-forked": { + "hashes": [ + "sha256:1805699ed9c9e60cb7a8179b8d4fa2b8898098e82d229b0825d8095f0f261100", + "sha256:1ae25dba8ee2e56fb47311c9638f9e58552691da87e82d25b0ce0e4bf52b7d87" + ], + "version": "==1.1.3" + }, + "pytest-json": { + "hashes": [ + "sha256:8bf4e1be1691f4416bc12b14785b5ad9e842887b0b2b2d61b37dcb555b208630" + ], + "index": "pypi", + "version": "==0.4.0" + }, + "pytest-mock": { + "hashes": [ + "sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f", + "sha256:cb67402d87d5f53c579263d37971a164743dc33c159dfb4fb4a86f37c5552307" + ], + "index": "pypi", + "version": "==2.0.0" + }, + "pytest-xdist": { + "hashes": [ + "sha256:0f46020d3d9619e6d17a65b5b989c1ebbb58fc7b1da8fb126d70f4bac4dfeed1", + "sha256:7dc0d027d258cd0defc618fb97055fbd1002735ca7a6d17037018cf870e24011" + ], + "index": "pypi", + "version": "==1.31.0" + }, + "requests": { + "hashes": [ + "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", + "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" + ], + "version": "==2.23.0" + }, + "requests-mock": { + "hashes": [ + "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010", + "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646" + ], + "index": "pypi", + "version": "==1.7.0" + }, + "scandir": { + "hashes": [ + "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", + "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", + "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", + "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", + "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", + "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", + "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", + "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", + "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", + "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", + "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" + ], + "markers": "python_version < '3.5'", + "version": "==1.10.0" + }, + "singledispatch": { + "hashes": [ + "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c", + "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8" + ], + "markers": "python_version < '3.4'", + "version": "==3.4.0.3" + }, + "six": { + "hashes": [ + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + ], + "version": "==1.14.0" + }, + "urllib3": { + "hashes": [ + "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", + "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" + ], + "version": "==1.25.8" + }, + "wcwidth": { + "hashes": [ + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" + ], + "version": "==0.1.8" + }, + "wrapt": { + "hashes": [ + "sha256:0ec40d9fd4ec9f9e3ff9bdd12dbd3535f4085949f4db93025089d7a673ea94e8" + ], + "version": "==1.12.0" + }, + "zipp": { + "hashes": [ + "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1", + "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921" + ], + "version": "==1.2.0" + } + } +} diff --git a/demisto_sdk/commands/lint/dev_envs/default_python3/Pipfile b/demisto_sdk/commands/lint/resources/pipfile_python3/Pipfile similarity index 94% rename from demisto_sdk/commands/lint/dev_envs/default_python3/Pipfile rename to demisto_sdk/commands/lint/resources/pipfile_python3/Pipfile index 87365b917b..4261fd143d 100644 --- a/demisto_sdk/commands/lint/dev_envs/default_python3/Pipfile +++ b/demisto_sdk/commands/lint/resources/pipfile_python3/Pipfile @@ -12,6 +12,7 @@ pytest-asyncio = "*" pytest-xdist = "*" pytest-datadir-ng = "*" freezegun = "*" +pytest-json = "*" [packages] diff --git a/demisto_sdk/commands/lint/dev_envs/default_python3/Pipfile.lock b/demisto_sdk/commands/lint/resources/pipfile_python3/Pipfile.lock similarity index 65% rename from demisto_sdk/commands/lint/dev_envs/default_python3/Pipfile.lock rename to demisto_sdk/commands/lint/resources/pipfile_python3/Pipfile.lock index cc37c4fdc6..f67a054dc6 100644 --- a/demisto_sdk/commands/lint/dev_envs/default_python3/Pipfile.lock +++ b/demisto_sdk/commands/lint/resources/pipfile_python3/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "78de325ac8977e10eb7a8c420f45ae4f30ef63f3cc33dd7323061d79eb840415" + "sha256": "ad93cb442695af299ced5f803aa15e40c1407e2da4d34c19b550caee5bdbb98a" }, "pipfile-spec": 6, "requires": { @@ -68,26 +68,26 @@ }, "freezegun": { "hashes": [ - "sha256:2a4d9c8cd3c04a201e20c313caf8b6338f1cfa4cda43f46a94cc4a9fd13ea5e7", - "sha256:edfdf5bc6040969e6ed2e36eafe277963bdc8b7c01daeda96c5c8594576c9390" + "sha256:82c757a05b7c7ca3e176bfebd7d6779fd9139c7cb4ef969c38a28d74deef89b2", + "sha256:e2062f2c7f95cc276a834c22f1a17179467176b624cc6f936e8bc3be5535ad1b" ], "index": "pypi", - "version": "==0.3.12" + "version": "==0.3.15" }, "idna": { "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" + "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", + "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" ], - "version": "==2.8" + "version": "==2.9" }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", + "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.5.0" }, "isort": { "hashes": [ @@ -131,17 +131,17 @@ }, "more-itertools": { "hashes": [ - "sha256:b84b238cce0d9adad5ed87e745778d20a3f8487d0f0cb8b8a586816c7496458d", - "sha256:c833ef592a0324bcc6a60e48440da07645063c453880c9477ceb22490aec1564" + "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c", + "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507" ], - "version": "==8.0.2" + "version": "==8.2.0" }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", + "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" ], - "version": "==19.2" + "version": "==20.1" }, "pluggy": { "hashes": [ @@ -152,10 +152,10 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pylint": { "hashes": [ @@ -203,13 +203,20 @@ ], "version": "==1.1.3" }, + "pytest-json": { + "hashes": [ + "sha256:8bf4e1be1691f4416bc12b14785b5ad9e842887b0b2b2d61b37dcb555b208630" + ], + "index": "pypi", + "version": "==0.4.0" + }, "pytest-mock": { "hashes": [ - "sha256:67e414b3caef7bff6fc6bd83b22b5bc39147e4493f483c2679bc9d4dc485a94d", - "sha256:e24a911ec96773022ebcc7030059b57cd3480b56d4f5d19b7c370ec635e6aed5" + "sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f", + "sha256:cb67402d87d5f53c579263d37971a164743dc33c159dfb4fb4a86f37c5552307" ], "index": "pypi", - "version": "==1.13.0" + "version": "==2.0.0" }, "pytest-xdist": { "hashes": [ @@ -228,10 +235,10 @@ }, "requests": { "hashes": [ - "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", - "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" + "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", + "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" ], - "version": "==2.22.0" + "version": "==2.23.0" }, "requests-mock": { "hashes": [ @@ -243,50 +250,51 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "typed-ast": { "hashes": [ - "sha256:1170afa46a3799e18b4c977777ce137bb53c7485379d9706af8a59f2ea1aa161", - "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e", - "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e", - "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0", - "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c", - "sha256:48e5b1e71f25cfdef98b013263a88d7145879fbb2d5185f2a0c79fa7ebbeae47", - "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631", - "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4", - "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34", - "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b", - "sha256:7954560051331d003b4e2b3eb822d9dd2e376fa4f6d98fee32f452f52dd6ebb2", - "sha256:838997f4310012cf2e1ad3803bce2f3402e9ffb71ded61b5ee22617b3a7f6b6e", - "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a", - "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233", - "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1", - "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36", - "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d", - "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a", - "sha256:fdc1c9bbf79510b76408840e009ed65958feba92a88833cdceecff93ae8fff66", - "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12" + "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", + "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", + "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", + "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", + "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", + "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", + "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", + "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", + "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", + "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", + "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", + "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", + "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", + "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", + "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", + "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", + "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", + "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", + "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", + "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", + "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" ], "markers": "implementation_name == 'cpython' and python_version < '3.8'", - "version": "==1.4.0" + "version": "==1.4.1" }, "urllib3": { "hashes": [ - "sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293", - "sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745" + "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", + "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" ], - "version": "==1.25.7" + "version": "==1.25.8" }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "wrapt": { "hashes": [ @@ -296,10 +304,10 @@ }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2", + "sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a" ], - "version": "==0.6.0" + "version": "==3.0.0" } } } diff --git a/demisto_sdk/commands/lint/templates/dockerfile.jinja2 b/demisto_sdk/commands/lint/templates/dockerfile.jinja2 new file mode 100644 index 0000000000..dd660d1060 --- /dev/null +++ b/demisto_sdk/commands/lint/templates/dockerfile.jinja2 @@ -0,0 +1,27 @@ +{# Declare image to build from #} +FROM {{ image }} +{# Creating test main directory #} +RUN mkdir -p /devwork/ +{# Define workir #} +WORKDIR /devwork +{# Build for python based image #} +{% if pack_type == 'python' %} +{# Change group owner and permissions - Due to contaier security issues #} +RUN chown -R :4000 /devwork/ +RUN chmod -R 775 /devwork +{# Install requirments and missing deps #} +RUN printf "{{ pypi_packs | join('\\n') }}" > /devwork/test-requirements.txt +RUN OS_RELEASE=$(cat /etc/os-release); if echo "$OS_RELEASE" | grep -q "alpine"; \ +then apk add --no-cache --virtual .build-deps python3-dev gcc build-base; fi; \ +pip install --no-cache-dir -r /devwork/test-requirements.txt; \ +if echo "$OS_RELEASE" | grep -q "alpine"; then apk del .build-deps; fi; +{# Container entry point (Every command will start with /bin/sh) #} +ENTRYPOINT ["/bin/sh", "-c"] +{# Build for python based image #} +{% elif pack_type == "powershell" %} +{# Install powershell requirements for analyze and tests #} +RUN pwsh -Command Set-PSRepository -name PSGallery -installationpolicy trusted +RUN pwsh -Command Install-Module -Name Pester -Scope AllUsers -Force | pwsh -Command Out-Null +RUN pwsh -Command Install-Module -Name PSScriptAnalyzer -Scope AllUsers -Force | pwsh -Command Out-Null +{# Container entry point (Every command will start with /bin/sh) #} +{% endif %} diff --git a/demisto_sdk/commands/lint/tests/__init__.py b/demisto_sdk/commands/lint/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/demisto_sdk/commands/lint/tests/command_builder_test.py b/demisto_sdk/commands/lint/tests/command_builder_test.py new file mode 100644 index 0000000000..cb6b27d132 --- /dev/null +++ b/demisto_sdk/commands/lint/tests/command_builder_test.py @@ -0,0 +1,103 @@ +from pathlib import Path +from unittest.mock import MagicMock + +import pytest + +values = [[Path("file1.py")], [Path("file1.py"), Path("file2.py")]] + + +@pytest.mark.parametrize(argnames="py_num , expected_exec", argvalues=[(3.7, 'python3'), (2.7, 'python')]) +def test_get_python_exec(py_num, expected_exec): + """Get python exec""" + from demisto_sdk.commands.lint.commands_builder import get_python_exec + assert expected_exec == get_python_exec(py_num) + + +@pytest.mark.parametrize(argnames="files", argvalues=values) +def test_build_flak8_command(files): + """Build flake8 command""" + from demisto_sdk.commands.lint.commands_builder import build_flake8_command + output = build_flake8_command(files, 3.8) + files = [str(file) for file in files] + expected = f"python3 -m flake8 {' '.join(files)}" + assert output == expected + + +@pytest.mark.parametrize(argnames="files", argvalues=values) +def test_build_bandit_command(files): + """Build bandit command""" + from demisto_sdk.commands.lint.commands_builder import build_bandit_command + output = build_bandit_command(files) + files = [str(file) for file in files] + expected = f"python3 -m bandit -lll -iii -a file --exclude=CommonServerPython.py,demistomock.py," \ + f"CommonServerUserPython.py," \ + f"conftest.py,venv -q -r {','.join(files)}" + assert expected == output + + +@pytest.mark.parametrize(argnames="files, py_num", argvalues=[(values[0], "2.7"), (values[1], "3.7")]) +def test_build_mypy_command(files, py_num): + """Build Mypy command""" + from demisto_sdk.commands.lint.commands_builder import build_mypy_command + output = build_mypy_command(files, py_num) + files = [str(file) for file in files] + expected = f"python3 -m mypy --python-version {py_num} --check-untyped-defs --ignore-missing-imports " \ + f"--follow-imports=silent --show-column-numbers --show-error-codes --pretty --allow-redefinition " \ + f"--cache-dir=/dev/null {' '.join(files)}" + assert expected == output + + +@pytest.mark.parametrize(argnames="files", argvalues=values) +def test_build_vulture_command(files, mocker): + """Build bandit command""" + from demisto_sdk.commands.lint.commands_builder import build_vulture_command + from demisto_sdk.commands.lint import commands_builder + mocker.patch.object(commands_builder, 'os') + commands_builder.os.environ.get.return_value = 20 + output = build_vulture_command(files, Path('~/dev/content/'), 2.7) + files = [str(item) for item in files] + expected = f"python -m vulture --min-confidence 20 --exclude=CommonServerPython.py,demistomock.py," \ + f"CommonServerUserPython.py,conftest.py,venv {' '.join(files)}" + assert expected == output + + +@pytest.mark.parametrize(argnames="files", argvalues=values) +def test_build_pylint_command(files): + """Build Pylint command""" + from demisto_sdk.commands.lint.commands_builder import build_pylint_command + output = build_pylint_command(files) + files = [str(file) for file in files] + expected = "python -m pylint --ignore=CommonServerPython.py,demistomock.py,CommonServerUserPython.py," \ + "conftest.py,venv -E -d duplicate-string-formatting-argument" \ + f" --generated-members=requests.packages.urllib3,requests.codes.ok {' '.join(files)}" + assert expected == output + + +def test_build_pytest_command_1(): + """Build Pytest command without json""" + from demisto_sdk.commands.lint.commands_builder import build_pytest_command + command = "python -m pytest --junitxml=/devwork/report_pytest.xml" + assert command == build_pytest_command(test_xml="test") + + +def test_build_pytest_command_2(): + """Build Pytest command with json""" + from demisto_sdk.commands.lint.commands_builder import build_pytest_command + command = "python -m pytest --junitxml=/devwork/report_pytest.xml --json=/devwork/report_pytest.json" + assert command == build_pytest_command(test_xml="test", + json=True) + + +def test_build_pwsh_analyze(): + """Build Pytest command with json""" + from demisto_sdk.commands.lint.commands_builder import build_pwsh_analyze_command + file = MagicMock() + command = f"pwsh -Command Invoke-ScriptAnalyzer -EnableExit -Path {file.name}" + assert command == build_pwsh_analyze_command(file) + + +def test_build_pwsh_test(): + """Build Pytest command with json""" + from demisto_sdk.commands.lint.commands_builder import build_pwsh_test_command + command = "pwsh -Command Invoke-Pester -EnableExit" + assert command == build_pwsh_test_command() diff --git a/demisto_sdk/commands/lint/tests/helper_test.py b/demisto_sdk/commands/lint/tests/helper_test.py new file mode 100644 index 0000000000..92ef287c63 --- /dev/null +++ b/demisto_sdk/commands/lint/tests/helper_test.py @@ -0,0 +1,46 @@ +import pytest + + +def test_validate_env(mocker) -> None: + from demisto_sdk.commands.lint import helpers + mocker.patch.object(helpers, 'run_command_os') + helpers.run_command_os.side_effect = [('python2', '', ''), ('flake8, mypy, vultue', '', '')] + helpers.validate_env() + + assert helpers.run_command_os.call_count == 2 + + +EXIT_CODES = { + "flake8": 0b1, + "bandit": 0b10, + "mypy": 0b100, + "vulture": 0b1000, + "pytest": 0b10000, + "pylint": 0b100000, + "image": 0b1000000, + "pwsh_analyze": 0b10000000, + "pwsh_test": 0b100000000 +} + + +@pytest.mark.parametrize(argnames="no_flake8, no_bandit, no_mypy, no_pylint, no_vulture, no_test, no_pwsh_analyze, " + "no_pwsh_test, docker_engine, expected_value", + argvalues=[(True, True, True, True, True, True, True, True, True, 0b11111111), + (True, False, True, True, True, True, True, True, True, 0b11111101), + (True, False, True, True, True, True, False, True, True, 0b10111101)]) +def test_build_skipped_exit_code(no_flake8: bool, no_bandit: bool, no_mypy: bool, no_pylint: bool, no_vulture: bool, + no_test: bool, no_pwsh_analyze: bool, no_pwsh_test: bool, docker_engine: bool, + expected_value: int) -> bool: + from demisto_sdk.commands.lint.helpers import build_skipped_exit_code + + assert expected_value == build_skipped_exit_code(no_flake8, no_bandit, no_mypy, no_pylint, no_vulture, no_test, + no_pwsh_analyze, no_pwsh_test, docker_engine) + + +@pytest.mark.parametrize(argnames="image, output, expected", argvalues=[('alpine', b'3.7\n', 3.7), + ('alpine-3', b'2.7\n', 2.7)]) +def test_get_python_version_from_image(image: str, output: bytes, expected: float, mocker): + from demisto_sdk.commands.lint import helpers + mocker.patch.object(helpers, 'docker') + helpers.docker.from_env().containers.run().logs.return_value = output + assert expected == helpers.get_python_version_from_image(image) diff --git a/demisto_sdk/commands/lint/tests/lint_manager_test.py b/demisto_sdk/commands/lint/tests/lint_manager_test.py deleted file mode 100644 index 2706db8d6a..0000000000 --- a/demisto_sdk/commands/lint/tests/lint_manager_test.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -from mock import patch - - -class TestCreateFailedUnitTestsFile: - def setup(self): - self.outfile = '' - - def teardown(self): - if self.outfile: - os.remove(self.outfile) - - def test_sanity(self): - from demisto_sdk.commands.lint.lint_manager import LintManager - - self.outfile = 'single_failed_package.txt' - - LintManager.create_failed_unittests_file(['asdf'], self.outfile) - assert os.path.isfile(self.outfile) - with open(self.outfile) as file_: - file_content = file_.read() - - assert file_content == 'asdf' - - def test_several_tests_failures(self): - from demisto_sdk.commands.lint.lint_manager import LintManager - - self.outfile = 'several_failed_packages.txt' - - LintManager.create_failed_unittests_file(['test', 'test2', 'test'], self.outfile) - assert os.path.isfile(self.outfile) - with open(self.outfile) as file_: - file_content = file_.read() - - assert file_content == 'test\ntest2\ntest' - - def test_no_test_failures(self): - from demisto_sdk.commands.lint.lint_manager import LintManager - - self.outfile = 'several_failed_packages.txt' - - LintManager.create_failed_unittests_file([], self.outfile) - assert os.path.isfile(self.outfile) - with open(self.outfile) as file_: - file_content = file_.read() - - assert file_content == '' - - @patch('demisto_sdk.commands.lint.lint_manager.get_dev_requirements') - @patch('demisto_sdk.commands.lint.lint_manager.LintManager.create_failed_unittests_file') - def test_no_outfile_set(self, create_failed_unittests_file, get_dev_requirements): - _ = get_dev_requirements # unused - from demisto_sdk.commands.lint.lint_manager import LintManager - lint_manager = LintManager('../../../../tests') - lint_manager._print_final_results(['test'], ['test2']) - assert create_failed_unittests_file.call_count == 0 - assert not os.path.isfile(self.outfile) diff --git a/demisto_sdk/commands/lint/tests/linter_manager_test.py b/demisto_sdk/commands/lint/tests/linter_manager_test.py new file mode 100644 index 0000000000..66a4ad031d --- /dev/null +++ b/demisto_sdk/commands/lint/tests/linter_manager_test.py @@ -0,0 +1,31 @@ +from unittest.mock import MagicMock, patch + +import pytest +from demisto_sdk.commands.common.constants import TYPE_PWSH, TYPE_PYTHON + + +@patch('builtins.print') +@pytest.mark.parametrize(argnames="return_exit_code, skipped_code, pkgs_type", + argvalues=[(0b0, 0b0, [TYPE_PWSH, TYPE_PYTHON])]) +def test_report_pass_lint_checks(mocker, return_exit_code: int, skipped_code: int, pkgs_type: list): + from demisto_sdk.commands.lint import lint_manager + lint_manager.LintManager.report_pass_lint_checks(return_exit_code, skipped_code, pkgs_type) + assert mocker.call_count == 8 + + +def test_report_failed_image_creation(): + from demisto_sdk.commands.lint import lint_manager + from demisto_sdk.commands.lint.helpers import EXIT_CODES + pkgs_status = MagicMock() + lint_status = { + "fail_packs_image": ['pack'] + } + pkgs_status.return_value = { + 'pack': { + "images": [{"image": 'alpine', "image_errors": "some_errors"}] + } + } + lint_manager.LintManager.report_failed_image_creation(lint_status=lint_status, + pkgs_status=pkgs_status, + return_exit_code=EXIT_CODES["image"]) + assert not pkgs_status.called diff --git a/demisto_sdk/commands/lint/tests/linter_test.py b/demisto_sdk/commands/lint/tests/linter_test.py deleted file mode 100644 index 295c3d7c8e..0000000000 --- a/demisto_sdk/commands/lint/tests/linter_test.py +++ /dev/null @@ -1,33 +0,0 @@ -import pytest - -from demisto_sdk.commands.lint.linter import Linter -from demisto_sdk.commands.common.git_tools import git_path - - -class TestLinter: - DIR_LIST = [ - f"{git_path()}/demisto_sdk/tests/test_files/fake_integration" - ] - - @pytest.mark.parametrize("directory", DIR_LIST) - def test_get_common_server_python(self, directory): - linter = Linter(directory) - ans = linter.get_common_server_python() - linter.remove_common_server_python() - assert ans - - @pytest.mark.skip(reason="No mypy") - @pytest.mark.parametrize("directory", DIR_LIST) - def test_run_mypy(self, directory): - linter = Linter(directory) - linter.run_mypy("2.7") - - @pytest.mark.parametrize("directory", DIR_LIST) - def test_run_bandit(self, directory): - linter = Linter(directory) - linter.run_bandit(3.7) - - @pytest.mark.parametrize("directory", DIR_LIST) - def test_run_vulture(self, directory): - linter = Linter(directory) - linter.run_vulture(3.7) diff --git a/demisto_sdk/commands/lint/tests/test_linter/conftest.py b/demisto_sdk/commands/lint/tests/test_linter/conftest.py new file mode 100644 index 0000000000..2537e042cf --- /dev/null +++ b/demisto_sdk/commands/lint/tests/test_linter/conftest.py @@ -0,0 +1,123 @@ +from typing import Callable, List, Optional + +import pytest +from demisto_sdk.commands.lint import linter +from demisto_sdk.commands.lint.linter import Linter +from ruamel.yaml import YAML +from wcmatch.pathlib import Path + + +@pytest.fixture +def linter_obj(mocker) -> Linter: + mocker.patch.object(linter, 'docker') + return Linter(pack_dir=Path(__file__).parent / 'data' / 'Integration' / 'intergration_sample', + content_repo=Path(__file__).parent / 'data', + req_3=["pytest==3.0"], + req_2=["pytest==2.0"], + docker_engine=True) + + +@pytest.fixture(scope='session') +def lint_files() -> List[Path]: + return [Path(__file__).parent / 'test_data' / 'Integration' / 'intergration_sample' / 'intergration_sample.py'] + + +@pytest.fixture +def demisto_content() -> Callable: + import shutil + # Init git repo + content_path = Path(__file__).parent / 'content' + + # Create file structure + dirs = ['Integrations', 'Scripts', 'Beta_Integrations'] + for dir_n in dirs: + (content_path / dir_n).mkdir(parents=True) + (content_path / 'Packs' / 'Sample' / dir_n).mkdir(parents=True) + + yield content_path + + shutil.rmtree(content_path) + + +@pytest.fixture +def create_integration(mocker) -> Callable: + def _create_integration(content_path: Path, path: str = 'Integrations', no_lint_file: bool = False, flake8: bool = False, + bandit: bool = False, mypy: bool = False, vulture: bool = False, pylint: bool = False, + test: bool = False, no_tests: bool = False, yml: bool = False, js_type: bool = False, + type_script_key: bool = False, image: bool = "", image_py_num: float = 3.7) -> Path: + """ Creates tmp content repositry for integration test + + Args: + content_path(Path): Content path from demisto_content fixture. + path(str): Path to create integration. + no_lint_file(bool): True for not creating pack.py file. + flake8(bool): True for creating flake8 error. + bandit(bool): True for creating bandit error. + mypy(bool): True for creating mypy error. + vulture(bool): True for creating vulture error. + pylint(bool): True for creating pylint error. + test(bool): True for creating test error. + no_tests(bool): True for not creating tests in pack. + yml(bool): True for creating yml structure error. + js_type(bool): True for definig pack as JavaScript in yml. + type_script_key(bool): True for define type in script key. + image(str): Image to define in yml. + image_py_num(float): Image python version. + + Returns: + Path: Path to tmp integration + """ + integration_name = 'Sample_integration' + integration_path = Path(content_path / path / integration_name) + integration_path.mkdir() + files_ext = ['.py', '.yml', '_description.md', '_image.png', '_test.py'] + for ext in files_ext: + if (ext == '_test.py' and no_tests) or (ext == '.py' and no_lint_file): + continue + (integration_path / f'{integration_name}{ext}').touch() + if flake8: + (integration_path / f'{integration_name}.py').write_text('\nfrom typing import *') + if bandit: + (integration_path / f'{integration_name}.py').write_text('\nimport os\n os.chmod(\'/etc/hosts\', 0o777)') + if mypy: + (integration_path / f'{integration_name}.py').write_text('\nx: int = "hello"') + if vulture: + (integration_path / f'{integration_name}.py').write_text('\nfrom typing import *') + if pylint: + (integration_path / f'{integration_name}.py').write_text('\ntest()') + if test and not no_tests: + (integration_path / f'{integration_name}_test.py').write_text('\nassert False') + yml_file = integration_path / f'{integration_name}.yml' + if yml: + yml_file.write_text('') + else: + yml_dict = {} + if js_type: + if type_script_key: + yml_dict['script'] = {'type': 'javascript'} + yml_dict['type'] = 'javascript' + else: + if type_script_key: + yml_dict['script'] = {'type': 'python'} + yml_dict['type'] = 'python' + if image: + yml_dict['dockerimage'] = image + from demisto_sdk.commands.lint import linter + mocker.patch.object(linter, 'get_python_version_from_image') + linter.get_python_version_from_image.return_value = image_py_num + + yaml = YAML() + yaml.dump(stream=yml_file.open(mode='w'), data=yml_dict) + + return integration_path + + return _create_integration + + +@pytest.fixture +def docker_mock(mocker): + def _docker_mock(BuildException: Optional[Exception] = None, image_id: str = "image-id"): + from demisto_sdk.commands.lint import linter + import docker + mocker.patch.object(docker, 'from_env') + mocker.patch.object(linter, '') diff --git a/demisto_sdk/commands/lint/tests/test_linter/docker_runner_test.py b/demisto_sdk/commands/lint/tests/test_linter/docker_runner_test.py new file mode 100644 index 0000000000..006d4b677d --- /dev/null +++ b/demisto_sdk/commands/lint/tests/test_linter/docker_runner_test.py @@ -0,0 +1,150 @@ +from unittest.mock import DEFAULT + +import pytest +from demisto_sdk.commands.common.constants import TYPE_PWSH, TYPE_PYTHON +from demisto_sdk.commands.lint import linter +from demisto_sdk.commands.lint.linter import Linter + + +class TestCreateImage: + def test_build_image_no_errors(self, linter_obj: Linter, mocker): + # Expected returns + exp_test_image_id = 'test-image' + exp_errors = "" + # Jinja2 mocking + mocker.patch.multiple(linter, Environment=DEFAULT, FileSystemLoader=DEFAULT, exceptions=DEFAULT, hashlib=DEFAULT, + copy_dir_to_container=DEFAULT) + # Facts mocking + mocker.patch.dict(linter_obj._facts, { + "images": [], + "python_version": 0, + "test": False, + "lint_files": [], + "additional_requirements": [], + "docker_engine": True, + "env_vars": { + "CI": True, + "DEMISTO_LINT_UPDATE_CERTS": "yes" + } + }) + mocker.patch.object(linter, 'io') + # Docker client mocking + mocker.patch.object(linter_obj, '_docker_client') + docker_build_response = mocker.MagicMock() + docker_build_response.short_id = exp_test_image_id + linter_obj._docker_client.containers.create().commit().short_id = exp_test_image_id + + act_test_image_id, act_errors = linter_obj._docker_image_create(docker_base_image=[exp_test_image_id, 3.7]) + + assert act_test_image_id == exp_test_image_id + assert act_errors == exp_errors + assert linter_obj._docker_client.images.build.call_count == 0 + + +class TestPylint: + def test_run_pylint_no_errors(self, mocker, linter_obj: Linter): + # Expected values + exp_container_exit_code = 0 + exp_container_log = "" + + # Docker client mocking + mocker.patch.object(linter_obj, '_docker_client') + linter_obj._docker_client.containers.run().wait.return_value = {"StatusCode": exp_container_exit_code} + linter_obj._docker_client.containers.run().logs.return_value = exp_container_log.encode('utf-8') + act_container_exit_code, act_container_log = linter_obj._docker_run_pylint(test_image='test-image', + keep_container=False) + + assert exp_container_exit_code == act_container_exit_code + assert exp_container_log == act_container_log + + @pytest.mark.parametrize(argnames="exp_container_exit_code, exp_container_log, exp_exit_code, exp_output", + argvalues=[(1, "test", 1, "test"), + (2, "test", 1, "test"), + (4, "test", 0, ""), + (8, "test", 0, ""), + (16, "test", 0, ""), + (32, "test", 2, "")]) + def test_run_pylint_with_errors(self, mocker, linter_obj: Linter, exp_container_exit_code: int, exp_container_log: str, + exp_exit_code: int, exp_output: str): + # Docker client mocking + mocker.patch.object(linter_obj, '_docker_client') + linter_obj._docker_client.containers.run().wait.return_value = {"StatusCode": exp_container_exit_code} + linter_obj._docker_client.containers.run().logs.return_value = exp_container_log.encode('utf-8') + act_exit_code, act_output = linter_obj._docker_run_pylint(test_image='test-image', + keep_container=False) + + assert act_exit_code == exp_exit_code + assert act_output == exp_output + + +class TestPytest: + @pytest.mark.parametrize(argnames="exp_container_exit_code, exp_exit_code", + argvalues=[(0, 0), + (1, 1), + (2, 1), + (5, 0)]) + def test_run_pytest(self, mocker, linter_obj: Linter, exp_container_exit_code: int, exp_exit_code: int): + exp_test_json = mocker.MagicMock() + + # Docker client mocking + mocker.patch.object(linter_obj, '_docker_client') + linter_obj._docker_client.containers.run().wait.return_value = {"StatusCode": exp_container_exit_code} + + # Docker related mocking + mocker.patch.object(linter, 'json') + linter.json.loads.return_value = exp_test_json + mocker.patch.object(linter, 'get_file_from_container') + + act_container_exit_code, act_test_json = linter_obj._docker_run_pytest(test_image='test-image', + keep_container=False, + test_xml="") + + assert exp_exit_code == act_container_exit_code + assert exp_test_json == act_test_json + + +class TestRunLintInContainer: + """Pylint/Pytest""" + + @pytest.mark.parametrize(argnames="no_test, no_pylint, no_pwsh_analyze, no_pwsh_test, pack_type", + argvalues=[(True, True, False, False, TYPE_PYTHON), + (False, True, True, True, TYPE_PYTHON), + (True, False, True, False, TYPE_PYTHON), + (False, False, False, False, TYPE_PYTHON)]) + def test_run_one_lint_check_success(self, mocker, linter_obj, lint_files, no_test: bool, no_pylint: bool, + no_pwsh_analyze: bool, no_pwsh_test: bool, pack_type: str): + mocker.patch.dict(linter_obj._facts, { + "images": [["image", "3.7"]], + "test": True, + "version_two": False, + "lint_files": lint_files, + "additional_requirements": [] + }) + mocker.patch.dict(linter_obj._pkg_lint_status, { + "pack_type": pack_type, + }) + mocker.patch.object(linter_obj, '_docker_image_create') + linter_obj._docker_image_create.return_value = ("test-image", "") + mocker.patch.object(linter_obj, '_docker_run_pytest') + linter_obj._docker_run_pytest.return_value = (0b0, {}) + mocker.patch.object(linter_obj, '_docker_run_pylint') + linter_obj._docker_run_pylint.return_value = (0b0, '') + mocker.patch.object(linter_obj, '_docker_run_pwsh_analyze') + linter_obj._docker_run_pwsh_analyze.return_value = (0b0, {}) + mocker.patch.object(linter_obj, '_docker_run_pwsh_test') + linter_obj._docker_run_pwsh_test.return_value = (0b0, '') + linter_obj._run_lint_on_docker_image(no_pylint=no_pylint, + no_test=no_test, + no_pwsh_analyze=no_pwsh_analyze, + no_pwsh_test=no_pwsh_test, + test_xml="", + keep_container=False) + assert linter_obj._pkg_lint_status.get("exit_code") == 0b0 + if not no_test and pack_type == TYPE_PYTHON: + linter_obj._docker_run_pytest.assert_called_once() + elif not no_pylint and pack_type == TYPE_PYTHON: + linter_obj._docker_run_pylint.assert_called_once() + elif not no_pwsh_analyze and pack_type == TYPE_PWSH: + linter_obj._docker_run_pwsh_analyze.assert_called_once() + elif not no_pwsh_test and pack_type == TYPE_PWSH: + linter_obj._docker_run_pwsh_test.assert_called_once() diff --git a/demisto_sdk/commands/lint/tests/test_linter/gather_facts_test.py b/demisto_sdk/commands/lint/tests/test_linter/gather_facts_test.py new file mode 100644 index 0000000000..f7e3365b02 --- /dev/null +++ b/demisto_sdk/commands/lint/tests/test_linter/gather_facts_test.py @@ -0,0 +1,172 @@ +from typing import Callable + + +class TestYamlParse: + def test_valid_yaml_key_script_is_dict(self, demisto_content, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + integration_path: Path = create_integration(content_path=demisto_content, + type_script_key=True) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=False) + assert not runner._gather_facts(modules={}) + + def test_valid_yaml_key_script_is_not_dict(self, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + integration_path: Path = create_integration(content_path=demisto_content, + type_script_key=False) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=False) + assert not runner._gather_facts(modules={}) + + def test_not_valid_yaml(self, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + integration_path: Path = create_integration(content_path=demisto_content, + yml=True) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=False) + assert runner._gather_facts(modules={}) + + +class TestPythonPack: + def test_package_is_python_pack(self, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + integration_path: Path = create_integration(content_path=demisto_content, + js_type=False) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=False) + assert not runner._gather_facts(modules={}) + + def test_package_is_not_python_pack(self, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + integration_path: Path = create_integration(content_path=demisto_content, + js_type=True) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=False) + assert runner._gather_facts(modules={}) + + +class TestDockerImagesCollection: + def test_docker_images_exists(self, mocker, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + exp_image = "test-image:12.0" + exp_py_num = 2.7 + mocker.patch.object(linter.Linter, '_docker_login') + linter.Linter._docker_login.return_value = False + integration_path: Path = create_integration(content_path=demisto_content, + image=exp_image, + image_py_num=exp_py_num) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=True) + runner._gather_facts(modules={}) + + assert runner._facts["images"][0][0] == exp_image + assert runner._facts["images"][0][1] == exp_py_num + + def test_docker_images_not_exists(self, mocker, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + exp_image = "demisto/python:1.3-alpine" + exp_py_num = 2.7 + mocker.patch.object(linter.Linter, '_docker_login') + linter.Linter._docker_login.return_value = False + integration_path: Path = create_integration(content_path=demisto_content, + image="", + image_py_num=exp_py_num) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=True) + + runner._gather_facts(modules={}) + + assert runner._facts["images"][0][0] == exp_image + assert runner._facts["images"][0][1] == exp_py_num + + +class TestTestsCollection: + def test_tests_exists(self, mocker, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + mocker.patch.object(linter.Linter, '_docker_login') + linter.Linter._docker_login.return_value = False + integration_path: Path = create_integration(content_path=demisto_content, + no_tests=False) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=True) + runner._gather_facts(modules={}) + assert runner._facts["test"] + + def test_tests_not_exists(self, mocker, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + mocker.patch.object(linter.Linter, '_docker_login') + linter.Linter._docker_login.return_value = False + integration_path: Path = create_integration(content_path=demisto_content, + no_tests=True) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=True) + runner._gather_facts(modules={}) + assert not runner._facts["test"] + + +class TestLintFilesCollection: + def test_lint_files_exists(self, mocker, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + mocker.patch.object(linter.Linter, '_docker_login') + linter.Linter._docker_login.return_value = False + integration_path: Path = create_integration(content_path=demisto_content, + no_lint_file=False) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=True) + runner._gather_facts(modules={}) + assert runner._facts["lint_files"][0] == integration_path / f'{integration_path.name}.py' + + def test_lint_files_not_exists(self, mocker, demisto_content: Callable, create_integration: Callable): + from demisto_sdk.commands.lint import linter + from wcmatch.pathlib import Path + mocker.patch.object(linter.Linter, '_docker_login') + linter.Linter._docker_login.return_value = False + integration_path: Path = create_integration(content_path=demisto_content, + no_lint_file=True) + runner = linter.Linter(content_repo=demisto_content, + pack_dir=integration_path, + req_2=[], + req_3=[], + docker_engine=True) + runner._gather_facts(modules={}) + assert not runner._facts["lint_files"] diff --git a/demisto_sdk/commands/lint/tests/test_linter/os_runner_test.py b/demisto_sdk/commands/lint/tests/test_linter/os_runner_test.py new file mode 100644 index 0000000000..4502d027cf --- /dev/null +++ b/demisto_sdk/commands/lint/tests/test_linter/os_runner_test.py @@ -0,0 +1,300 @@ +from pathlib import Path +from typing import List + +import pytest +from demisto_sdk.commands.lint.linter import Linter + + +class TestFlake8: + def test_run_flake8_success(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + linter.run_command_os.return_value = ('', '', 0) + + exit_code, output = linter_obj._run_flake8(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b0, "Exit code should be 0" + assert output == '', "Output should be empty" + + def test_run_flake8_fail_lint(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = (expected_output, '', 1) + + exit_code, output = linter_obj._run_flake8(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + def test_run_flake8_usage_stderr(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = ('not good', expected_output, 1) + + exit_code, output = linter_obj._run_flake8(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + +class TestBandit: + def test_run_bandit_success(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + linter.run_command_os.return_value = ('', '', 0) + + exit_code, output = linter_obj._run_bandit(lint_files=lint_files) + + assert exit_code == 0b0, "Exit code should be 0" + assert output == '', "Output should be empty" + + def test_run_bandit_fail_lint(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = (expected_output, '', 1) + + exit_code, output = linter_obj._run_bandit(lint_files=lint_files) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + def test_run_bandit_usage_stderr(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = ('not good', expected_output, 1) + + exit_code, output = linter_obj._run_bandit(lint_files=lint_files) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + +class TestMypy: + def test_run_mypy_success(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + linter.run_command_os.return_value = ('Success: no issues found', '', 0) + + exit_code, output = linter_obj._run_mypy(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b0, "Exit code should be 0" + assert output == '', "Output should be empty" + + def test_run_mypy_fail_lint(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = (expected_output, '', 1) + + exit_code, output = linter_obj._run_mypy(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + def test_run_mypy_usage_stderr(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = ('not good', expected_output, 1) + + exit_code, output = linter_obj._run_mypy(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + +class TestVulture: + def test_run_vulture_success(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + linter.run_command_os.return_value = ('', '', 0) + + exit_code, output = linter_obj._run_vulture(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b0, "Exit code should be 0" + assert output == '', "Output should be empty" + + def test_run_vulture_fail_lint(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = (expected_output, '', 1) + + exit_code, output = linter_obj._run_vulture(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + def test_run_vulture_usage_stderr(self, linter_obj: Linter, lint_files: List[Path], mocker): + from demisto_sdk.commands.lint import linter + + mocker.patch.object(linter, 'run_command_os') + expected_output = 'Error code found' + linter.run_command_os.return_value = ('not good', expected_output, 1) + + exit_code, output = linter_obj._run_vulture(lint_files=lint_files, py_num=3.7) + + assert exit_code == 0b1, "Exit code should be 1" + assert output == expected_output, "Output should be empty" + + +class TestRunLintInHost: + """Flake8/Bandit/Mypy/Vulture""" + + @pytest.mark.parametrize(argnames="no_flake8, no_bandit, no_mypy, no_vulture", + argvalues=[(True, True, True, False), + (False, True, True, True), + (True, True, False, True), + (True, False, True, True)]) + @pytest.mark.usefixtures("linter_obj", "mocker", "lint_files") + def test_run_one_lint_check_success(self, mocker, linter_obj, lint_files, no_flake8: bool, + no_bandit: bool, no_mypy: bool, no_vulture: bool): + mocker.patch.dict(linter_obj._facts, { + "images": [["image", "3.7"]], + "test": False, + "version_two": False, + "lint_files": lint_files, + "additional_requirements": [] + }) + mocker.patch.object(linter_obj, '_run_flake8') + linter_obj._run_flake8.return_value = (0b0, '') + mocker.patch.object(linter_obj, '_run_bandit') + linter_obj._run_bandit.return_value = (0b0, '') + mocker.patch.object(linter_obj, '_run_mypy') + linter_obj._run_mypy.return_value = (0b0, '') + mocker.patch.object(linter_obj, '_run_vulture') + linter_obj._run_vulture.return_value = (0b0, '') + linter_obj._run_lint_in_host(no_flake8=no_flake8, + no_bandit=no_bandit, + no_mypy=no_mypy, + no_vulture=no_vulture) + assert linter_obj._pkg_lint_status.get("exit_code") == 0b0 + if not no_flake8: + linter_obj._run_flake8.assert_called_once() + assert linter_obj._pkg_lint_status.get("flake8_errors") is None + elif not no_bandit: + linter_obj._run_bandit.assert_called_once() + assert linter_obj._pkg_lint_status.get("bandit_errors") is None + elif not no_mypy: + linter_obj._run_mypy.assert_called_once() + assert linter_obj._pkg_lint_status.get("mypy_errors") is None + elif not no_vulture: + linter_obj._run_vulture.assert_called_once() + assert linter_obj._pkg_lint_status.get("vulture_errors") is None + + @pytest.mark.parametrize(argnames="no_flake8, no_bandit, no_mypy, no_vulture", + argvalues=[(True, True, True, False), + (False, True, True, True), + (True, True, False, True), + (True, False, True, True)]) + @pytest.mark.usefixtures("linter_obj", "mocker", "lint_files") + def test_run_one_lint_check_fail(self, mocker, linter_obj, lint_files, no_flake8: bool, no_bandit: bool, + no_mypy: bool, no_vulture: bool): + from demisto_sdk.commands.lint.linter import EXIT_CODES + mocker.patch.dict(linter_obj._facts, { + "images": [["image", "3.7"]], + "test": False, + "version_two": False, + "lint_files": lint_files, + "additional_requirements": [] + }) + mocker.patch.object(linter_obj, '_run_flake8') + linter_obj._run_flake8.return_value = (0b1, 'Error') + mocker.patch.object(linter_obj, '_run_bandit') + linter_obj._run_bandit.return_value = (0b1, 'Error') + mocker.patch.object(linter_obj, '_run_mypy') + linter_obj._run_mypy.return_value = (0b1, 'Error') + mocker.patch.object(linter_obj, '_run_vulture') + linter_obj._run_vulture.return_value = (0b1, 'Error') + linter_obj._run_lint_in_host(no_flake8=no_flake8, + no_bandit=no_bandit, + no_mypy=no_mypy, + no_vulture=no_vulture) + if not no_flake8: + linter_obj._run_flake8.assert_called_once() + assert linter_obj._pkg_lint_status.get("flake8_errors") == 'Error' + assert linter_obj._pkg_lint_status.get("exit_code") == EXIT_CODES['flake8'] + elif not no_bandit: + linter_obj._run_bandit.assert_called_once() + assert linter_obj._pkg_lint_status.get("bandit_errors") == 'Error' + assert linter_obj._pkg_lint_status.get("exit_code") == EXIT_CODES['bandit'] + elif not no_mypy: + linter_obj._run_mypy.assert_called_once() + assert linter_obj._pkg_lint_status.get("mypy_errors") == 'Error' + assert linter_obj._pkg_lint_status.get("exit_code") == EXIT_CODES['mypy'] + elif not no_vulture: + linter_obj._run_vulture.assert_called_once() + assert linter_obj._pkg_lint_status.get("vulture_errors") == 'Error' + assert linter_obj._pkg_lint_status.get("exit_code") == EXIT_CODES['vulture'] + + @pytest.mark.usefixtures("linter_obj", "mocker", "lint_files") + def test_run_all_lint_fail_all(self, mocker, linter_obj, lint_files): + from demisto_sdk.commands.lint.linter import EXIT_CODES + mocker.patch.dict(linter_obj._facts, { + "images": [["image", "3.7"]], + "test": False, + "version_two": False, + "lint_files": lint_files, + "additional_requirements": [] + }) + mocker.patch.object(linter_obj, '_run_flake8') + linter_obj._run_flake8.return_value = (0b1, 'Error') + mocker.patch.object(linter_obj, '_run_bandit') + linter_obj._run_bandit.return_value = (0b1, 'Error') + mocker.patch.object(linter_obj, '_run_mypy') + linter_obj._run_mypy.return_value = (0b1, 'Error') + mocker.patch.object(linter_obj, '_run_vulture') + linter_obj._run_vulture.return_value = (0b1, 'Error') + linter_obj._run_lint_in_host(no_flake8=False, + no_bandit=False, + no_mypy=False, + no_vulture=False) + linter_obj._run_flake8.assert_called_once() + assert linter_obj._pkg_lint_status.get("flake8_errors") == 'Error' + linter_obj._run_bandit.assert_called_once() + assert linter_obj._pkg_lint_status.get("bandit_errors") == 'Error' + linter_obj._run_mypy.assert_called_once() + assert linter_obj._pkg_lint_status.get("mypy_errors") == 'Error' + linter_obj._run_vulture.assert_called_once() + assert linter_obj._pkg_lint_status.get("vulture_errors") == 'Error' + assert linter_obj._pkg_lint_status.get("exit_code") == EXIT_CODES['flake8'] + EXIT_CODES['bandit'] + \ + EXIT_CODES['mypy'] + EXIT_CODES['vulture'] + + def test_no_lint_files(self, mocker, linter_obj): + """No lint files exsits - not running any lint check""" + mocker.patch.dict(linter_obj._facts, { + "images": [["image", "3.7"]], + "test": False, + "version_two": False, + "lint_files": [], + "additional_requirements": [] + }) + mocker.patch.object(linter_obj, '_run_flake8') + mocker.patch.object(linter_obj, '_run_bandit') + mocker.patch.object(linter_obj, '_run_mypy') + mocker.patch.object(linter_obj, '_run_vulture') + + linter_obj._run_lint_in_host(no_flake8=False, + no_bandit=False, + no_mypy=False, + no_vulture=False) + + linter_obj._run_flake8.assert_not_called() + linter_obj._run_bandit.assert_not_called() + linter_obj._run_mypy.assert_not_called() + linter_obj._run_vulture.assert_not_called() diff --git a/demisto_sdk/commands/run_cmd/runner.py b/demisto_sdk/commands/run_cmd/runner.py index 521fd1be12..456bdda5e3 100644 --- a/demisto_sdk/commands/run_cmd/runner.py +++ b/demisto_sdk/commands/run_cmd/runner.py @@ -1,8 +1,10 @@ -import re -import demisto_client import ast +import re -from demisto_sdk.commands.common.tools import print_error, print_color, LOG_COLORS, print_v, print_warning +import demisto_client +from demisto_sdk.commands.common.tools import (LOG_COLORS, print_color, + print_error, print_v, + print_warning) class DemistoRunTimeError(RuntimeError): diff --git a/demisto_sdk/commands/run_playbook/playbook_runner.py b/demisto_sdk/commands/run_playbook/playbook_runner.py index e3e6cfd36e..e8f965ef4f 100644 --- a/demisto_sdk/commands/run_playbook/playbook_runner.py +++ b/demisto_sdk/commands/run_playbook/playbook_runner.py @@ -1,8 +1,10 @@ import os import time + import demisto_client from demisto_client.demisto_api.rest import ApiException -from demisto_sdk.commands.common.tools import print_error, print_color, LOG_COLORS +from demisto_sdk.commands.common.tools import (LOG_COLORS, print_color, + print_error) class PlaybookRunner: diff --git a/demisto_sdk/commands/secrets/secrets.py b/demisto_sdk/commands/secrets/secrets.py index 6ea35fdf0d..6947b1350e 100644 --- a/demisto_sdk/commands/secrets/secrets.py +++ b/demisto_sdk/commands/secrets/secrets.py @@ -1,19 +1,24 @@ import io -import os -import math import json +import math +import os import string -import PyPDF2 +import PyPDF2 from bs4 import BeautifulSoup -from demisto_sdk.commands.common.constants import re, REQUIRED_YML_FILE_TYPES, PACKS_DIR, PACKS_WHITELIST_FILE_NAME, \ - INTEGRATION_README_REGEX, EXTERNAL_PR_REGEX -from demisto_sdk.commands.common.tools import run_command, print_error, print_color, LOG_COLORS, checked_type, \ - is_file_path_in_pack, get_pack_name, print_warning - # secrets settings # Entropy score is determined by shanon's entropy algorithm, most English words will score between 1.5 and 3.5 from demisto_sdk.commands.common.configuration import Configuration +from demisto_sdk.commands.common.constants import (EXTERNAL_PR_REGEX, + INTEGRATION_README_REGEX, + PACKS_DIR, + PACKS_WHITELIST_FILE_NAME, + REQUIRED_YML_FILE_TYPES, re) +from demisto_sdk.commands.common.tools import (LOG_COLORS, checked_type, + get_pack_name, + is_file_path_in_pack, + print_color, print_error, + print_warning, run_command) ENTROPY_THRESHOLD = 4.0 ACCEPTED_FILE_STATUSES = ['m', 'a'] @@ -201,7 +206,7 @@ def search_potential_secrets(self, secrets_file_paths: list, ignore_entropy: boo if high_entropy_strings or secrets_found_with_regex: # uniquify identical matches between lists file_secrets = list(set(high_entropy_strings + secrets_found_with_regex)) - secrets_found[file_name] = file_secrets + secrets_found[file_path] = file_secrets return secrets_found diff --git a/demisto_sdk/commands/secrets/tests/secrets_test.py b/demisto_sdk/commands/secrets/tests/secrets_test.py index ed5fc4e32e..8f31f5350b 100644 --- a/demisto_sdk/commands/secrets/tests/secrets_test.py +++ b/demisto_sdk/commands/secrets/tests/secrets_test.py @@ -1,9 +1,10 @@ -import os -from demisto_sdk.commands.secrets.secrets import SecretsValidator import io -import shutil import json +import os +import shutil + from demisto_sdk.commands.common.git_tools import git_path +from demisto_sdk.commands.secrets.secrets import SecretsValidator def create_whitelist_secrets_file(file_path, urls=[], ips=[], files=[], generic_strings=[]): @@ -89,7 +90,7 @@ def test_search_potential_secrets__secrets_found(self): ''') secrets_found = validator.search_potential_secrets([self.TEST_FILE_WITH_SECRETS]) - assert secrets_found['file_with_secrets_in_it.yml'] == ['OIifdsnsjkgnj3254nkdfsjKNJD0345'] + assert secrets_found[self.TEST_FILE_WITH_SECRETS] == ['OIifdsnsjkgnj3254nkdfsjKNJD0345'] def test_ignore_entropy(self): """ @@ -125,7 +126,43 @@ def test_ignore_entropy(self): ''') secrets_found = validator.search_potential_secrets([self.TEST_FILE_WITH_SECRETS], True) - assert secrets_found['file_with_secrets_in_it.yml'] == ['fooo@someorg.com'] + assert secrets_found[self.TEST_FILE_WITH_SECRETS] == ['fooo@someorg.com'] + + def test_two_files_with_same_name(self): + """ + - no items in the whitelist + - file contains 1 secret: + - email + + - run validate secrets with --ignore-entropy=True + + - ensure secret is found in two files from different directories with the same base name + """ + create_empty_whitelist_secrets_file(os.path.join(TestSecrets.TEMP_DIR, TestSecrets.WHITE_LIST_FILE_NAME)) + dir1_path = os.path.join(TestSecrets.TEMP_DIR, "dir1") + dir2_path = os.path.join(TestSecrets.TEMP_DIR, "dir2") + os.mkdir(dir1_path) + os.mkdir(dir2_path) + validator = SecretsValidator(is_circle=True, + ignore_entropy=True, + white_list_path=os.path.join(TestSecrets.TEMP_DIR, + TestSecrets.WHITE_LIST_FILE_NAME)) + + file_name = 'README.md' + file1_path = os.path.join(dir1_path, file_name) + file2_path = os.path.join(dir2_path, file_name) + for file_path in [file1_path, file2_path]: + with io.open(file_path, 'w') as f: + f.write(''' +print('This is our dummy code') + +my_email = "fooo@someorg.com" + + +''') + secrets_found = validator.search_potential_secrets([file1_path, file2_path], True) + assert secrets_found[os.path.join(dir1_path, file_name)] == ['fooo@someorg.com'] + assert secrets_found[os.path.join(dir2_path, file_name)] == ['fooo@someorg.com'] def test_remove_white_list_regex(self): white_list = '155.165.45.232' diff --git a/demisto_sdk/commands/split_yml/extractor.py b/demisto_sdk/commands/split_yml/extractor.py index 7499ef8234..5df0d710b7 100644 --- a/demisto_sdk/commands/split_yml/extractor.py +++ b/demisto_sdk/commands/split_yml/extractor.py @@ -6,14 +6,17 @@ from io import open import yaml +from demisto_sdk.commands.common.configuration import Configuration +from demisto_sdk.commands.common.constants import (TYPE_PWSH, TYPE_PYTHON, + TYPE_TO_EXTENSION) +from demisto_sdk.commands.common.tools import (LOG_COLORS, + get_all_docker_images, + get_pipenv_dir, + get_python_version, pascal_case, + print_color, print_error) from ruamel.yaml import YAML from ruamel.yaml.scalarstring import SingleQuotedScalarString -from demisto_sdk.commands.common.configuration import Configuration -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS, get_python_version, \ - get_pipenv_dir, get_all_docker_images, print_error, pascal_case -from demisto_sdk.commands.common.constants import TYPE_TO_EXTENSION, TYPE_PYTHON, TYPE_PWSH - class Extractor: """Extractor is a class that's designed to split a yml file to it's components. @@ -85,6 +88,13 @@ def extract_to_package_format(self) -> int: del yaml_obj['image'] if 'detaileddescription' in yaml_obj: del yaml_obj['detaileddescription'] + script_obj['script'] = SingleQuotedScalarString('') + code_type = script_obj['type'] + if code_type == TYPE_PWSH and not yaml_obj.get('fromversion'): + print("Setting fromversion for PowerShell to: 5.5.0") + yaml_obj['fromversion'] = "5.5.0" + with open(yaml_out, 'w') as yf: + ryaml.dump(yaml_obj, yf) # check if there is a README yml_readme = os.path.splitext(self.input)[0] + '_README.md' readme = output_path + '/README.md' @@ -98,12 +108,9 @@ def extract_to_package_format(self) -> int: else: with open(changelog, 'wt', encoding='utf-8') as changelog_file: changelog_file.write("## [Unreleased]\n-\n") - script_obj['script'] = SingleQuotedScalarString('') - with open(yaml_out, 'w') as yf: - ryaml.dump(yaml_obj, yf) # Python code formatting and dev env setup - code_type = script_obj['type'] if code_type == TYPE_PYTHON: + code_file += '.py' print("Running autopep8 on file: {} ...".format(code_file)) try: subprocess.call(["autopep8", "-i", "--max-line-length", "130", code_file]) @@ -112,6 +119,14 @@ def extract_to_package_format(self) -> int: "Make sure to install it with: pip install autopep8.\n" "Then run: autopep8 -i {}".format(code_file), LOG_COLORS.YELLOW) + print("Running isort on file: {} ...".format(code_file)) + try: + subprocess.call(["isort", code_file]) + except FileNotFoundError: + print_color("isort skipped! It doesn't seem you have isort installed.\n" + "Make sure to install it with: pip install isort.\n" + "Then run: isort {}".format(code_file), LOG_COLORS.YELLOW) + print("Detecting python version and setting up pipenv files ...") docker = get_all_docker_images(script_obj)[0] py_ver = get_python_version(docker, self.config.log_verbose) diff --git a/demisto_sdk/commands/split_yml/tests/extractor_test.py b/demisto_sdk/commands/split_yml/tests/extractor_test.py index 83fea2f349..d94abc6b73 100644 --- a/demisto_sdk/commands/split_yml/tests/extractor_test.py +++ b/demisto_sdk/commands/split_yml/tests/extractor_test.py @@ -1,9 +1,11 @@ +import base64 +import os + +import yaml from demisto_sdk.commands.common.configuration import Configuration from demisto_sdk.commands.common.constants import DEFAULT_IMAGE_BASE64 from demisto_sdk.commands.common.git_tools import git_path from demisto_sdk.commands.split_yml.extractor import Extractor -import os -import base64 def test_extract_long_description(tmpdir): @@ -107,3 +109,27 @@ def test_extract_to_package_format_pwsh(tmpdir): with open(out.join('PowerShellRemotingOverSSH').join('README.md'), 'r') as f: file_data = f.read() assert 'This is a sample test README' in file_data + with open(out.join('PowerShellRemotingOverSSH').join('PowerShellRemotingOverSSH.yml'), 'r') as f: + yaml_obj = yaml.safe_load(f) + assert yaml_obj['fromversion'] == '5.5.0' + assert not yaml_obj['script']['script'] + + +def test_extract_to_package_format_py(tmpdir, mocker): + mocker.patch( + 'demisto_sdk.commands.split_yml.extractor.get_python_version', + return_value='2.7' + ) + mocker.patch( + 'demisto_sdk.commands.split_yml.extractor.get_pipenv_dir', + return_value=os.path.join(git_path(), 'demisto_sdk/tests/test_files/default_python2') + ) + out = tmpdir.join('Integrations') + extractor = Extractor(input=f'{git_path()}/demisto_sdk/tests/test_files/integration-Zoom.yml', + output=str(out), file_type='integration') + extractor.extract_to_package_format() + with open(out.join('Zoom').join('Zoom.py'), 'r', encoding='utf-8') as f: + file_data = f.read() + # check imports are sorted + assert 'import datetime\nimport json\nimport shutil\nfrom zipfile import ZipFile\n\nimport requests\n\n' \ + 'import demistomock as demisto\nimport jwt\nfrom CommonServerPython import *\n' in file_data diff --git a/demisto_sdk/commands/unify/tests/unifier_test.py b/demisto_sdk/commands/unify/tests/unifier_test.py index 3c137f3cc4..f4de10f363 100644 --- a/demisto_sdk/commands/unify/tests/unifier_test.py +++ b/demisto_sdk/commands/unify/tests/unifier_test.py @@ -1,14 +1,14 @@ -import os +import base64 import copy -import pytest -from mock import patch +import os import shutil + +import pytest import yaml -import base64 -from demisto_sdk.commands.common.git_tools import git_path import yamlordereddictloader - +from demisto_sdk.commands.common.git_tools import git_path from demisto_sdk.commands.common.tools import get_yaml +from mock import patch TEST_VALID_CODE = '''import demistomock as demisto from CommonServerPython import * diff --git a/demisto_sdk/commands/unify/unifier.py b/demisto_sdk/commands/unify/unifier.py index 0b99b83fbd..3d27bb7e0a 100644 --- a/demisto_sdk/commands/unify/unifier.py +++ b/demisto_sdk/commands/unify/unifier.py @@ -1,20 +1,24 @@ -import os -import io -import glob import base64 -import re import copy +import glob +import io +import os +import re from typing import Tuple +from demisto_sdk.commands.common.constants import (DEFAULT_IMAGE_PREFIX, + DIR_TO_PREFIX, + INTEGRATIONS_DIR, + SCRIPTS_DIR, + TYPE_TO_EXTENSION, Errors) +from demisto_sdk.commands.common.tools import (LOG_COLORS, get_yaml, + get_yml_paths_in_dir, + print_color, print_error, + print_warning, + server_version_compare) from ruamel.yaml import YAML from ruamel.yaml.scalarstring import FoldedScalarString -from demisto_sdk.commands.common.constants import Errors -from demisto_sdk.commands.common.tools import get_yaml, server_version_compare, get_yml_paths_in_dir, print_error,\ - print_warning, print_color, LOG_COLORS -from demisto_sdk.commands.common.constants import TYPE_TO_EXTENSION, INTEGRATIONS_DIR, DIR_TO_PREFIX, \ - DEFAULT_IMAGE_PREFIX, SCRIPTS_DIR - class Unifier: diff --git a/demisto_sdk/commands/upload/uploader.py b/demisto_sdk/commands/upload/uploader.py index 04153bc894..707e372ca4 100644 --- a/demisto_sdk/commands/upload/uploader.py +++ b/demisto_sdk/commands/upload/uploader.py @@ -1,7 +1,8 @@ -import demisto_client import os -from demisto_sdk.commands.common.tools import print_color, LOG_COLORS, print_v, print_error +import demisto_client +from demisto_sdk.commands.common.tools import (LOG_COLORS, print_color, + print_error, print_v) from demisto_sdk.commands.unify.unifier import Unifier diff --git a/demisto_sdk/commands/validate/file_validator.py b/demisto_sdk/commands/validate/file_validator.py index 49bf8d2565..cf405396f9 100644 --- a/demisto_sdk/commands/validate/file_validator.py +++ b/demisto_sdk/commands/validate/file_validator.py @@ -14,35 +14,52 @@ import os import re -from demisto_sdk.commands.common.hook_validations.dashboard import DashboardValidator -from demisto_sdk.commands.common.hook_validations.incident_type import IncidentTypeValidator -from demisto_sdk.commands.common.hook_validations.pack_unique_files import PackUniqueFilesValidator from demisto_sdk.commands.common.configuration import Configuration -from demisto_sdk.commands.common.constants import CODE_FILES_REGEX, OLD_YML_FORMAT_FILE, SCHEMA_REGEX, \ - KNOWN_FILE_STATUSES, IGNORED_TYPES_REGEXES, INTEGRATION_REGEX, BETA_INTEGRATION_REGEX, BETA_INTEGRATION_YML_REGEX, \ - SCRIPT_REGEX, IMAGE_REGEX, TEST_PLAYBOOK_REGEX, DIR_LIST_FOR_REGULAR_ENTETIES, \ - PACKAGE_SUPPORTING_DIRECTORIES, YML_BETA_INTEGRATIONS_REGEXES, PACKAGE_SCRIPTS_REGEXES, YML_INTEGRATION_REGEXES, \ - PACKS_DIR, PACKS_DIRECTORIES, Errors, PLAYBOOKS_REGEXES_LIST, JSON_INDICATOR_AND_INCIDENT_FIELDS, PLAYBOOK_REGEX, \ - JSON_ALL_LAYOUT_REGEXES, REPUTATION_REGEX, CHECKED_TYPES_REGEXES, JSON_ALL_DASHBOARDS_REGEXES, \ - JSON_ALL_INCIDENT_TYPES_REGEXES, TESTS_DIRECTORIES -from demisto_sdk.commands.common.hook_validations.conf_json import ConfJsonValidator -from demisto_sdk.commands.common.hook_validations.description import DescriptionValidator +from demisto_sdk.commands.common.constants import ( + BETA_INTEGRATION_REGEX, BETA_INTEGRATION_YML_REGEX, CHECKED_TYPES_REGEXES, + CODE_FILES_REGEX, DIR_LIST_FOR_REGULAR_ENTETIES, IGNORED_TYPES_REGEXES, + IMAGE_REGEX, INTEGRATION_REGEX, JSON_ALL_DASHBOARDS_REGEXES, + JSON_ALL_INCIDENT_TYPES_REGEXES, JSON_ALL_LAYOUT_REGEXES, + JSON_INDICATOR_AND_INCIDENT_FIELDS, KNOWN_FILE_STATUSES, + OLD_YML_FORMAT_FILE, PACKAGE_SCRIPTS_REGEXES, + PACKAGE_SUPPORTING_DIRECTORIES, PACKS_DIR, PACKS_DIRECTORIES, + PLAYBOOK_REGEX, PLAYBOOKS_REGEXES_LIST, REPUTATION_REGEX, SCHEMA_REGEX, + SCRIPT_REGEX, TEST_PLAYBOOK_REGEX, TESTS_DIRECTORIES, + YML_BETA_INTEGRATIONS_REGEXES, YML_INTEGRATION_REGEXES, Errors) +from demisto_sdk.commands.common.hook_validations.conf_json import \ + ConfJsonValidator +from demisto_sdk.commands.common.hook_validations.dashboard import \ + DashboardValidator from demisto_sdk.commands.common.hook_validations.id import IDSetValidator from demisto_sdk.commands.common.hook_validations.image import ImageValidator -from demisto_sdk.commands.common.hook_validations.incident_field import IncidentFieldValidator -from demisto_sdk.commands.common.hook_validations.integration import IntegrationValidator -from demisto_sdk.commands.common.hook_validations.reputation import ReputationValidator -from demisto_sdk.commands.common.hook_validations.script import ScriptValidator -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator -from demisto_sdk.commands.common.hook_validations.playbook import PlaybookValidator +from demisto_sdk.commands.common.hook_validations.incident_field import \ + IncidentFieldValidator +from demisto_sdk.commands.common.hook_validations.incident_type import \ + IncidentTypeValidator +from demisto_sdk.commands.common.hook_validations.integration import \ + IntegrationValidator from demisto_sdk.commands.common.hook_validations.layout import LayoutValidator +from demisto_sdk.commands.common.hook_validations.pack_unique_files import \ + PackUniqueFilesValidator +from demisto_sdk.commands.common.hook_validations.playbook import \ + PlaybookValidator from demisto_sdk.commands.common.hook_validations.readme import ReadMeValidator - -from demisto_sdk.commands.common.tools import checked_type, run_command, print_error, print_warning, print_color, \ - LOG_COLORS, get_yaml, filter_packagify_changes, get_pack_name, is_file_path_in_pack, \ - get_yml_paths_in_dir, find_type +from demisto_sdk.commands.common.hook_validations.release_notes import \ + ReleaseNotesValidator +from demisto_sdk.commands.common.hook_validations.reputation import \ + ReputationValidator +from demisto_sdk.commands.common.hook_validations.script import ScriptValidator +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator +from demisto_sdk.commands.common.tools import (LOG_COLORS, checked_type, + filter_packagify_changes, + find_type, get_pack_name, + get_remote_file, get_yaml, + get_yml_paths_in_dir, + is_file_path_in_pack, + print_color, print_error, + print_warning, run_command) from demisto_sdk.commands.unify.unifier import Unifier -from demisto_sdk.commands.common.hook_validations.release_notes import ReleaseNotesValidator class FilesValidator: @@ -61,7 +78,7 @@ class FilesValidator: configuration (Configuration): Configurations for IDSetValidator. """ - def __init__(self, is_backward_check=True, prev_ver='origin/master', use_git=False, is_circle=False, + def __init__(self, is_backward_check=True, prev_ver=None, use_git=False, is_circle=False, print_ignored_files=False, validate_conf_json=True, validate_id_set=False, file_path=None, configuration=Configuration()): self.branch_name = '' @@ -72,10 +89,6 @@ def __init__(self, is_backward_check=True, prev_ver='origin/master', use_git=Fal print(f'Running validation on branch {self.branch_name}') self.prev_ver = prev_ver - if not self.prev_ver: - # validate against master if no version was provided - self.prev_ver = 'origin/master' - self._is_valid = True self.configuration = configuration self.is_backward_check = is_backward_check @@ -285,14 +298,6 @@ def validate_modified_files(self, modified_files): # noqa: C901 self._is_valid = False elif checked_type(file_path, YML_INTEGRATION_REGEXES): - image_validator = ImageValidator(file_path) - if not image_validator.is_valid(): - self._is_valid = False - - description_validator = DescriptionValidator(file_path) - if not description_validator.is_valid(): - self._is_valid = False - integration_validator = IntegrationValidator(structure_validator) if self.is_backward_check and not integration_validator.is_backward_compatible(): self._is_valid = False @@ -301,14 +306,6 @@ def validate_modified_files(self, modified_files): # noqa: C901 self._is_valid = False elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES): - image_validator = ImageValidator(file_path) - if not image_validator.is_valid(): - self._is_valid = False - - description_validator = DescriptionValidator(file_path) - if not description_validator.is_valid_beta_description(): - self._is_valid = False - integration_validator = IntegrationValidator(structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False @@ -418,22 +415,12 @@ def validate_added_files(self, added_files, file_type: str = None): # noqa: C90 elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE) or file_type == 'playbook': playbook_validator = PlaybookValidator(structure_validator) - if not playbook_validator.is_valid_playbook(): + if not playbook_validator.is_valid_playbook(validate_rn=False): self._is_valid = False elif checked_type(file_path, YML_INTEGRATION_REGEXES) or file_type == 'integration': - image_validator = ImageValidator(file_path) - # if file_type(non git path) the image is not in a separate path - image_validator.file_path = file_path if file_type else image_validator.file_path - if not image_validator.is_valid(): - self._is_valid = False - - description_validator = DescriptionValidator(file_path) - if not description_validator.is_valid(): - self._is_valid = False - integration_validator = IntegrationValidator(structure_validator) - if not integration_validator.is_valid_file(validate_rn=not file_type): + if not integration_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, PACKAGE_SCRIPTS_REGEXES) or file_type == 'script': @@ -443,17 +430,13 @@ def validate_added_files(self, added_files, file_type: str = None): # noqa: C90 structure_validator.file_path = yml_path script_validator = ScriptValidator(structure_validator) - if not script_validator.is_valid_file(validate_rn=not file_type): + if not script_validator.is_valid_file(validate_rn=False): self._is_valid = False elif re.match(BETA_INTEGRATION_REGEX, file_path, re.IGNORECASE) or \ - re.match(BETA_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE): - description_validator = DescriptionValidator(file_path) - if not description_validator.is_valid_beta_description(): - self._is_valid = False - + re.match(BETA_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE) or file_type == 'betaintegration': integration_validator = IntegrationValidator(structure_validator) - if not integration_validator.is_valid_beta_integration(): + if not integration_validator.is_valid_beta_integration(validate_rn=False): self._is_valid = False elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE): @@ -462,30 +445,32 @@ def validate_added_files(self, added_files, file_type: str = None): # noqa: C90 self._is_valid = False # incident fields and indicator fields are using the same scheme. + # TODO: add validation for classification(21630) and set validate_rn to False after issue #23398 is fixed. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS) or \ file_type in ('incidentfield', 'indicatorfield'): incident_field_validator = IncidentFieldValidator(structure_validator) - if not incident_field_validator.is_valid_file(validate_rn=not file_type): + if not incident_field_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, [REPUTATION_REGEX]) or file_type == 'reputation': reputation_validator = ReputationValidator(structure_validator) - if not reputation_validator.is_valid_file(validate_rn=not file_type): + if not reputation_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES) or file_type == 'layout': layout_validator = LayoutValidator(structure_validator) + # TODO: set validate_rn to False after issue #23398 is fixed. if not layout_validator.is_valid_layout(validate_rn=not file_type): self._is_valid = False elif checked_type(file_path, JSON_ALL_DASHBOARDS_REGEXES) or file_type == 'dashboard': dashboard_validator = DashboardValidator(structure_validator) - if not dashboard_validator.is_valid_dashboard(validate_rn=not file_type): + if not dashboard_validator.is_valid_dashboard(validate_rn=False): self._is_valid = False - elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES): + elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES) or file_type == 'incidenttype': incident_type_validator = IncidentTypeValidator(structure_validator) - if not incident_type_validator.is_valid_incident_type(validate_rn=not file_type): + if not incident_type_validator.is_valid_incident_type(validate_rn=False): self._is_valid = False elif 'CHANGELOG' in file_path: @@ -613,7 +598,6 @@ def is_valid_structure(self): not self.branch_name.startswith('20.')): print('Validates only committed files') self.validate_committed_files() - self.validate_against_previous_version(no_error=True) else: self.validate_against_previous_version(no_error=True) print('Validates all of Content repo directories according to their schemas') @@ -635,17 +619,24 @@ def validate_against_previous_version(self, no_error=False): Args: no_error (bool): If set to true will restore self._is_valid after run (will not return new errors) """ - if self.prev_ver and self.prev_ver != 'master': - print_color('Starting validation against {}'.format(self.prev_ver), LOG_COLORS.GREEN) - modified_files, _, _, _ = self.get_modified_and_added_files(self.prev_ver) - prev_self_valid = self._is_valid - self.validate_modified_files(modified_files) - if no_error: - self._is_valid = prev_self_valid + if not self.prev_ver: + content_release_branch_id = self.get_content_release_identifier() + if not content_release_branch_id: + print_warning('could\'t get content\'s release branch ID. Skipping validation.') + return + else: + self.prev_ver = content_release_branch_id + + print_color('Starting validation against {}'.format(self.prev_ver), LOG_COLORS.GREEN) + modified_files, _, _, _ = self.get_modified_and_added_files(self.prev_ver) + prev_self_valid = self._is_valid + self.validate_modified_files(modified_files) + if no_error: + self._is_valid = prev_self_valid # parser.add_argument('-t', '--test-filter', type=str2bool, default=False, # help='Check that tests are valid.') - # TODO: after validation there was a step to run the configure_tests script to check that each changed file + # TODO: after validation there was a step to run the configure_tests script to check each changed file # had a relevant test - was used as part of the hooks. @staticmethod @@ -663,3 +654,11 @@ def _is_py_script_or_integration(file_path): return True return False + + def get_content_release_identifier(self): + try: + file_content = get_remote_file('.circleci/config.yml', tag=self.branch_name) + except Exception: + return + else: + return file_content.get('jobs').get('build').get('environment').get('GIT_SHA1') diff --git a/demisto_sdk/commands/validate/tests/validators_test.py b/demisto_sdk/commands/validate/tests/validators_test.py index 4963ddbfb7..d84191783a 100644 --- a/demisto_sdk/commands/validate/tests/validators_test.py +++ b/demisto_sdk/commands/validate/tests/validators_test.py @@ -1,35 +1,55 @@ +import json import os from shutil import copyfile from typing import Any, Type import pytest - from demisto_sdk.commands.common.constants import DIR_LIST -from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator -from demisto_sdk.commands.common.hook_validations.dashboard import DashboardValidator -from demisto_sdk.commands.common.hook_validations.incident_field import IncidentFieldValidator +from demisto_sdk.commands.common.hook_validations.base_validator import \ + BaseValidator +from demisto_sdk.commands.common.hook_validations.dashboard import \ + DashboardValidator +from demisto_sdk.commands.common.hook_validations.image import ImageValidator +from demisto_sdk.commands.common.hook_validations.incident_field import \ + IncidentFieldValidator +from demisto_sdk.commands.common.hook_validations.integration import \ + IntegrationValidator from demisto_sdk.commands.common.hook_validations.layout import LayoutValidator -from demisto_sdk.commands.common.hook_validations.release_notes import ReleaseNotesValidator -from demisto_sdk.commands.common.hook_validations.reputation import ReputationValidator +from demisto_sdk.commands.common.hook_validations.playbook import \ + PlaybookValidator +from demisto_sdk.commands.common.hook_validations.release_notes import \ + ReleaseNotesValidator +from demisto_sdk.commands.common.hook_validations.reputation import \ + ReputationValidator from demisto_sdk.commands.common.hook_validations.script import ScriptValidator -from demisto_sdk.commands.common.hook_validations.structure import StructureValidator -from demisto_sdk.commands.common.hook_validations.playbook import PlaybookValidator -from demisto_sdk.commands.common.hook_validations.integration import IntegrationValidator - -from demisto_sdk.tests.constants_test import VALID_LAYOUT_PATH, INVALID_LAYOUT_PATH, \ - VALID_REPUTATION_PATH, INVALID_REPUTATION_PATH, VALID_WIDGET_PATH, INVALID_WIDGET_PATH, VALID_DASHBOARD_PATH, \ - VALID_SCRIPT_PATH, INVALID_SCRIPT_PATH, INVALID_DASHBOARD_PATH, VALID_INCIDENT_FIELD_PATH, \ - INVALID_INCIDENT_FIELD_PATH, VALID_INTEGRATION_TEST_PATH, VALID_ONE_LINE_CHANGELOG_PATH, \ - VALID_ONE_LINE_LIST_CHANGELOG_PATH, VALID_MULTI_LINE_CHANGELOG_PATH, VALID_MULTI_LINE_LIST_CHANGELOG_PATH, \ - INVALID_ONE_LINE_1_CHANGELOG_PATH, INVALID_ONE_LINE_2_CHANGELOG_PATH, INVALID_ONE_LINE_LIST_1_CHANGELOG_PATH, \ - INVALID_ONE_LINE_LIST_2_CHANGELOG_PATH, INVALID_MULTI_LINE_1_CHANGELOG_PATH, INVALID_MULTI_LINE_2_CHANGELOG_PATH, \ - LAYOUT_TARGET, WIDGET_TARGET, DASHBOARD_TARGET, INTEGRATION_TARGET, \ - INCIDENT_FIELD_TARGET, SCRIPT_TARGET, SCRIPT_RELEASE_NOTES_TARGET, INTEGRATION_RELEASE_NOTES_TARGET, \ - VALID_TEST_PLAYBOOK_PATH, PLAYBOOK_TARGET, INVALID_PLAYBOOK_PATH, INVALID_PLAYBOOK_ID_PATH, \ - INVALID_PLAYBOOK_CONDITION_1, INVALID_PLAYBOOK_CONDITION_2, VALID_PLAYBOOK_CONDITION, VALID_INTEGRATION_ID_PATH, \ - INVALID_INTEGRATION_ID_PATH, INVALID_PLAYBOOK_PATH_FROM_ROOT, VALID_NO_HIDDEN_PARAMS, INVALID_NO_HIDDEN_PARAMS - +from demisto_sdk.commands.common.hook_validations.structure import \ + StructureValidator from demisto_sdk.commands.common.hook_validations.widget import WidgetValidator +from demisto_sdk.commands.unify.unifier import Unifier +from demisto_sdk.commands.validate.file_validator import FilesValidator +from demisto_sdk.tests.constants_test import ( + BETA_INTEGRATION_TARGET, DASHBOARD_TARGET, GIT_HAVE_MODIFIED_AND_NEW_FILES, + INCIDENT_FIELD_TARGET, INCIDENT_TYPE_TARGET, + INTEGRATION_RELEASE_NOTES_TARGET, INTEGRATION_TARGET, + INVALID_DASHBOARD_PATH, INVALID_INCIDENT_FIELD_PATH, + INVALID_INTEGRATION_ID_PATH, INVALID_LAYOUT_PATH, + INVALID_MULTI_LINE_1_CHANGELOG_PATH, INVALID_MULTI_LINE_2_CHANGELOG_PATH, + INVALID_NO_HIDDEN_PARAMS, INVALID_ONE_LINE_1_CHANGELOG_PATH, + INVALID_ONE_LINE_2_CHANGELOG_PATH, INVALID_ONE_LINE_LIST_1_CHANGELOG_PATH, + INVALID_ONE_LINE_LIST_2_CHANGELOG_PATH, INVALID_PLAYBOOK_CONDITION_1, + INVALID_PLAYBOOK_CONDITION_2, INVALID_PLAYBOOK_ID_PATH, + INVALID_PLAYBOOK_PATH, INVALID_PLAYBOOK_PATH_FROM_ROOT, + INVALID_REPUTATION_PATH, INVALID_SCRIPT_PATH, INVALID_WIDGET_PATH, + LAYOUT_TARGET, PLAYBOOK_TARGET, REPUTATION_TARGET, + SCRIPT_RELEASE_NOTES_TARGET, SCRIPT_TARGET, VALID_DASHBOARD_PATH, + VALID_INCIDENT_FIELD_PATH, VALID_INCIDENT_TYPE_PATH, + VALID_INTEGRATION_ID_PATH, VALID_INTEGRATION_TEST_PATH, VALID_LAYOUT_PATH, + VALID_MULTI_LINE_CHANGELOG_PATH, VALID_MULTI_LINE_LIST_CHANGELOG_PATH, + VALID_NO_HIDDEN_PARAMS, VALID_ONE_LINE_CHANGELOG_PATH, + VALID_ONE_LINE_LIST_CHANGELOG_PATH, VALID_PLAYBOOK_CONDITION, + VALID_REPUTATION_PATH, VALID_SCRIPT_PATH, VALID_TEST_PLAYBOOK_PATH, + VALID_WIDGET_PATH, WIDGET_TARGET) +from mock import patch class TestValidators: @@ -245,3 +265,92 @@ def test_is_all_params_not_hidden(self, source, answer): structure = StructureValidator(source) validator = IntegrationValidator(structure) assert validator.is_all_params_not_hidden() is answer + + with open(GIT_HAVE_MODIFIED_AND_NEW_FILES, "r") as test_params_file: + tests_params = json.load(test_params_file) + params = [ + (None, tuple(set(i) for i in tests_params['data']['params_with_data']), '123456', True, True), + ('origin/master', tuple(set(i) for i in tests_params['data']['params_with_data']), '123456', True, True), + (None, tuple(set(i) for i in tests_params['data']['params_with_data']), '', True, True), + (None, tuple(set(i) for i in tests_params['data']['params_without_data']), '123456', True, True), + (None, tuple(set(i) for i in tests_params['data']['params_with_data']), '123456', False, False), + ] + + @pytest.mark.parametrize("prev_var, get_modified_and_added_files, release_iden, answer, is_valid", params) + def test_validate_against_previous_version(self, prev_var, get_modified_and_added_files, release_iden, answer, + is_valid, mocker): + file_validator = FilesValidator(validate_conf_json=False, prev_ver=prev_var) + file_validator._is_valid = is_valid + mocker.patch.object(FilesValidator, 'get_modified_and_added_files', return_value=get_modified_and_added_files) + mocker.patch.object(FilesValidator, 'get_content_release_identifier', return_value=release_iden) + mocker.patch.object(FilesValidator, 'validate_modified_files', return_value=None) + + assert file_validator.validate_against_previous_version() is None + assert file_validator._is_valid is answer + + INPUTS_STRUCTURE_VALIDATION = [ + (VALID_INTEGRATION_TEST_PATH, INTEGRATION_TARGET), + (VALID_SCRIPT_PATH, SCRIPT_TARGET), + (VALID_DASHBOARD_PATH, DASHBOARD_TARGET), + (VALID_INCIDENT_FIELD_PATH, INCIDENT_FIELD_TARGET), + (VALID_TEST_PLAYBOOK_PATH, PLAYBOOK_TARGET), + (VALID_REPUTATION_PATH, REPUTATION_TARGET), + (VALID_INCIDENT_TYPE_PATH, INCIDENT_TYPE_TARGET), + (VALID_INTEGRATION_TEST_PATH, BETA_INTEGRATION_TARGET), + (VALID_INTEGRATION_TEST_PATH, INTEGRATION_RELEASE_NOTES_TARGET) + ] + + @pytest.mark.parametrize('source, target', INPUTS_STRUCTURE_VALIDATION) + def test_is_file_structure(self, source, target): + # type: (str, str) -> None + try: + copyfile(source, target) + assert FilesValidator(validate_conf_json=False).is_valid_structure() + finally: + os.remove(target) + + FILE_PATHS = [ + ([VALID_INTEGRATION_TEST_PATH], 'integration'), + ([VALID_TEST_PLAYBOOK_PATH], 'playbook'), + ([VALID_DASHBOARD_PATH], 'dashboard'), + ([VALID_INCIDENT_FIELD_PATH], 'incidentfield'), + ([VALID_REPUTATION_PATH], 'reputation'), + ([VALID_INCIDENT_TYPE_PATH], 'incidenttype'), + ([VALID_INTEGRATION_TEST_PATH], 'betaintegration') + ] + + @pytest.mark.parametrize('file_path, file_type', FILE_PATHS) + def test_is_valid_rn(self, mocker, file_path, file_type): + mocker.patch.object(ReleaseNotesValidator, 'get_master_diff', sreturn_value=None) + mocker.patch.object(StructureValidator, 'is_valid_file', return_value=True) + mocker.patch.object(IntegrationValidator, 'is_valid_subtype', return_value=True) + mocker.patch.object(IntegrationValidator, 'is_valid_feed', return_value=True) + mocker.patch.object(IntegrationValidator, 'is_valid_description', return_value=True) + mocker.patch.object(IntegrationValidator, 'is_valid_version', return_value=True) + mocker.patch.object(ImageValidator, 'is_valid', return_value=True) + mocker.patch.object(DashboardValidator, 'is_id_equals_name', return_value=True) + mocker.patch.object(ReputationValidator, 'is_id_equals_details', return_value=True) + mocker.patch.object(IntegrationValidator, 'is_valid_beta', return_value=True) + file_validator = FilesValidator(validate_conf_json=False) + file_validator.validate_added_files(file_path, file_type) + assert file_validator._is_valid + + FILE_PATH = [ + ([VALID_SCRIPT_PATH], 'script') + ] + + @staticmethod + def mock_unifier(): + def get_script_package_data_mock(*args, **kwargs): + return VALID_SCRIPT_PATH, '' + with patch.object(Unifier, '__init__', lambda a, b: None): + Unifier.get_script_package_data = get_script_package_data_mock + return Unifier('') + + @pytest.mark.parametrize('file_path, file_type', FILE_PATH) + def test_script_valid_rn(self, mocker, file_path, file_type): + mocker.patch.object(ScriptValidator, 'is_valid_name', return_value=True) + self.mock_unifier() + file_validator = FilesValidator(validate_conf_json=False) + file_validator.validate_added_files(file_path, file_type) + assert file_validator._is_valid diff --git a/demisto_sdk/tests/constants_test.py b/demisto_sdk/tests/constants_test.py index 6a6ce09588..59081ab7df 100644 --- a/demisto_sdk/tests/constants_test.py +++ b/demisto_sdk/tests/constants_test.py @@ -47,8 +47,10 @@ PLAYBOOK_TARGET = "Playbooks/playbook-test.yml" INTEGRATION_TARGET = "./Integrations/integration-test.yml" INCIDENT_FIELD_TARGET = "IncidentFields/incidentfield-test.json" +INCIDENT_TYPE_TARGET = "IncidentTypes/incidenttype-valid.json" PLAYBOOK_PACK_TARGET = "Packs/Int/Playbooks/playbook-test.yml" SCRIPT_TARGET = "./Scripts/script-test.yml" +BETA_INTEGRATION_TARGET = "./Beta_Integrations/integration-test.yml" SCRIPT_RELEASE_NOTES_TARGET = "./Scripts/script-test_CHANGELOG.md" INTEGRATION_RELEASE_NOTES_TARGET = "./Integrations/integration-test_CHANGELOG.md" SOURCE_FORMAT_INTEGRATION_COPY = f"{GIT_ROOT}/demisto_sdk/tests/test_files/format_New_Integration_copy.yml" @@ -68,10 +70,12 @@ INVALID_INTEGRATION_YML_4 = f"{GIT_ROOT}/demisto_sdk/tests/test_files/integration-invalid-yml4.yml" VALID_REPUTATION_FILE = f"{GIT_ROOT}/demisto_sdk/tests/test_files/reputation-cidr-valid.json" INVALID_REPUTATION_FILE = f"{GIT_ROOT}/demisto_sdk/tests/test_files/reputation-cidr-invalid.json" +EQUAL_VAL_FORMAT_PLAYBOOK_SOURCE = f"{GIT_ROOT}/demisto_sdk/tests/test_files/playbook-invalid-equal.yml" +EQUAL_VAL_FORMAT_PLAYBOOK_DESTINATION = f"Playbooks/playbook-invalid-equal.yml" +EQUAL_VAL_PATH = f'Playbooks' INVALID_NO_HIDDEN_PARAMS = f"{GIT_ROOT}/demisto_sdk/tests/test_files/invalid-no-hidden-params.yml" VALID_NO_HIDDEN_PARAMS = f"{GIT_ROOT}/demisto_sdk/tests/test_files/valid-no-hidden-params.yml" - - +GIT_HAVE_MODIFIED_AND_NEW_FILES = f"{GIT_ROOT}/demisto_sdk/tests/test_files/git_have_modified_and_new_files.json" SOURCE_FORMAT_INCIDENTFIELD_COPY = f"{GIT_ROOT}/demisto_sdk/tests/test_files/format_incidentfield-copy.json" DESTINATION_FORMAT_INCIDENTFIELD_COPY = f"IncidentFields/incidentfield-copy.json" INCIDENTFIELD_PATH = f"IncidentFields" @@ -94,3 +98,5 @@ SOURCE_FORMAT_DASHBOARD_COPY = f"{GIT_ROOT}/demisto_sdk/tests/test_files/format_dashboard-copy.json" DESTINATION_FORMAT_DASHBOARD_COPY = f"Dashboards/dashboard-copy.json" DASHBOARD_PATH = f"Dashboards" +VALID_MD = f'{git_path()}/demisto_sdk/tests/test_files/README-valid.md' +INVALID_MD = f'{git_path()}/demisto_sdk/tests/test_files/README-invalid.md' diff --git a/demisto_sdk/tests/test_files/CalculateGeoDistance/CalculateGeoDistance.py b/demisto_sdk/tests/test_files/CalculateGeoDistance/CalculateGeoDistance.py index 138653785a..6f493d6e36 100644 --- a/demisto_sdk/tests/test_files/CalculateGeoDistance/CalculateGeoDistance.py +++ b/demisto_sdk/tests/test_files/CalculateGeoDistance/CalculateGeoDistance.py @@ -1,7 +1,7 @@ import demistomock as demisto +import geopy.distance from CommonServerPython import * from CommonServerUserPython import * -import geopy.distance requests.packages.urllib3.disable_warnings() diff --git a/demisto_sdk/tests/test_files/VulnDB/VulnDB.py b/demisto_sdk/tests/test_files/VulnDB/VulnDB.py index 75ca0c7d0c..1a1d778be2 100644 --- a/demisto_sdk/tests/test_files/VulnDB/VulnDB.py +++ b/demisto_sdk/tests/test_files/VulnDB/VulnDB.py @@ -1,11 +1,12 @@ +import urllib.parse + import demistomock as demisto +import requests from CommonServerPython import * from CommonServerUserPython import * ''' IMPORTS ''' -import requests -import urllib.parse # Disable insecure warnings requests.packages.urllib3.disable_warnings() diff --git a/demisto_sdk/tests/test_files/content_repo_example/Integrations/Securonix/Securonix.py b/demisto_sdk/tests/test_files/content_repo_example/Integrations/Securonix/Securonix.py index 66a2d78014..9e7cd2683d 100644 --- a/demisto_sdk/tests/test_files/content_repo_example/Integrations/Securonix/Securonix.py +++ b/demisto_sdk/tests/test_files/content_repo_example/Integrations/Securonix/Securonix.py @@ -1,8 +1,7 @@ from datetime import datetime -from typing import Dict, Tuple, Optional, List, Callable, Any +from typing import Any, Callable, Dict, List, Optional, Tuple import urllib3 - from CommonServerPython import * # Disable insecure warnings diff --git a/demisto_sdk/tests/test_files/content_repo_example/Packs/FeedAzure/Integrations/FeedAzure/FeedAzure.py b/demisto_sdk/tests/test_files/content_repo_example/Packs/FeedAzure/Integrations/FeedAzure/FeedAzure.py index deeef757ba..caeac7cb11 100644 --- a/demisto_sdk/tests/test_files/content_repo_example/Packs/FeedAzure/Integrations/FeedAzure/FeedAzure.py +++ b/demisto_sdk/tests/test_files/content_repo_example/Packs/FeedAzure/Integrations/FeedAzure/FeedAzure.py @@ -1,7 +1,7 @@ import re -import urllib3 from typing import Dict, List, Tuple +import urllib3 from CommonServerPython import * # disable insecure warnings diff --git a/demisto_sdk/commands/lint/dev_envs/default_python2/Pipfile b/demisto_sdk/tests/test_files/default_python2/Pipfile similarity index 100% rename from demisto_sdk/commands/lint/dev_envs/default_python2/Pipfile rename to demisto_sdk/tests/test_files/default_python2/Pipfile diff --git a/demisto_sdk/commands/lint/dev_envs/default_python2/Pipfile.lock b/demisto_sdk/tests/test_files/default_python2/Pipfile.lock similarity index 100% rename from demisto_sdk/commands/lint/dev_envs/default_python2/Pipfile.lock rename to demisto_sdk/tests/test_files/default_python2/Pipfile.lock diff --git a/demisto_sdk/tests/test_files/docs_test/positive_docs_section_end_with_eof.md b/demisto_sdk/tests/test_files/docs_test/positive_docs_section_end_with_eof.md new file mode 100644 index 0000000000..6feda109b3 --- /dev/null +++ b/demisto_sdk/tests/test_files/docs_test/positive_docs_section_end_with_eof.md @@ -0,0 +1,24 @@ +### dxl-send-event +*** +Sends the specified event to the DXL fabric. + + +##### Base Command + +`dxl-send-event` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| topic | The topic for which to publish the message. | Required | +| payload | The event payload. | Required | + + +##### Context Output + +There is no context output for this command. + +##### Command Example +``` ``` + +##### Human Readable Output diff --git a/demisto_sdk/tests/test_files/fake_integration/fake_README.md b/demisto_sdk/tests/test_files/fake_integration/fake_README.md new file mode 100644 index 0000000000..94783c8f7d --- /dev/null +++ b/demisto_sdk/tests/test_files/fake_integration/fake_README.md @@ -0,0 +1,196 @@ +Use the Zoom integration manage your Zoom users and meetings +This integration was integrated and tested with version xx of Zoom +## Configure Zoom on Demisto + +1. Navigate to **Settings** > **Integrations** > **Servers & Services**. +2. Search for Zoom. +3. Click **Add instance** to create and configure a new integration instance. + +| **Parameter** | **Description** | **Required** | +| --- | --- | --- | +| apiKey | | True | +| apiSecret | | True | +| proxy | Use system proxy settings | False | + +4. Click **Test** to validate the URLs, token, and connection. +## Commands +You can execute these commands from the Demisto CLI, as part of an automation, or in a playbook. +After you successfully execute a command, a DBot message appears in the War Room with the command details. +### zoom-create-user +*** +Create a new user in zoom account + + +##### Base Command + +`zoom-create-user` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| first_name | First name of the new user | Required | +| last_name | Last name of the new user | Required | +| email | The email of the new user | Required | +| user_type | The type of the newly created user | Optional | + + +##### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| Zoom.User.id | string | The ID of the created user | +| Zoom.User.first_name | string | First name of the created user | +| Zoom.User.last_name | string | Last name for the created user | +| Zoom.User.email | string | Email of the created user | +| Zoom.User.created_at | date | Created date of the user | +| Zoom.User.type | number | The type of the user | + + +##### Command Example +``` ``` + +##### Human Readable Output + + +### zoom-create-meeting +*** +Create a new zoom meeting (scheduled or instant) + + +##### Base Command + +`zoom-create-meeting` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| type | The type of the meeting | Required | +| user | email address or id of user for meeting | Required | +| topic | The topic of the meeting | Required | +| auto-record-meeting | Record zoom meeting? | Optional | +| start-time | Meeting start time. When using a format like “yyyy-MM-dd’T'HH:mm:ss'Z’”, always use GMT time. When using a format like “yyyy-MM-dd’T'HH:mm:ss”, you should use local time and you will need to specify the time zone. Only used for scheduled meetings and recurring meetings with fixed time. | Optional | +| timezone | Timezone to format start_time. For example, “America/Los_Angeles”. For scheduled meetings only. | Optional | + + +##### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| Zoom.Meeting.join_url | string | Join url for the meeting | +| Zoom.Meeting.id | string | Meeting id of the new meeting that is created | +| Zoom.Meeting.start_url | string | The URL to start the meeting | + + +##### Command Example +``` ``` + +##### Human Readable Output + + +### zoom-fetch-recording +*** +Get meeting record and save as file in the warroom + + +##### Base Command + +`zoom-fetch-recording` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| meeting_id | Meeting id to get the recording | Required | + + +##### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| File.SHA256 | unknown | Attachment&\#x27;s SHA256 | +| File.SHA1 | unknown | Attachment&\#x27;s SHA1 | +| File.MD5 | unknown | Attachment&\#x27;s MD5 | +| File.Name | unknown | Attachment&\#x27;s Name | +| File.Info | unknown | Attachment&\#x27;s Info | +| File.Size | unknown | Attachment&\#x27;s Size \(In Bytes\) | +| File.Extension | unknown | Attachment&\#x27;s Extension | +| File.Type | unknown | Attachment&\#x27;s Type | +| File.EntryID | unknown | Attachment&\#x27;s EntryID | +| File.SSDeep | unknown | Attachment&\#x27;s SSDeep hash | + + +##### Command Example +``` ``` + +##### Human Readable Output + + +### zoom-list-users +*** +List the existing users + + +##### Base Command + +`zoom-list-users` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| status | Which status of users to list | Optional | +| page-size | Number of users to return. Max 300. | Optional | +| page-number | Which page of results to return | Optional | + + +##### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| Zoom.Metadata.Count | number | Total page count available | +| Zoom.Metadata.Number | number | Current page number | +| Zoom.Metadata.Size | number | Number of results in current page | +| Zoom.Metadata.Total | number | Total number of records | +| Zoom.User.id | string | ID of the user | +| Zoom.User.first_name | string | First name of user | +| Zoom.User.last_name | string | Last name of user | +| Zoom.User.email | string | Email of user | +| Zoom.User.type | number | Type of user | +| Zoom.User.created_at | date | Date when user was created | +| Zoom.User.dept | string | Department for user | +| Zoom.User.verified | number | Is the user verified | +| Zoom.User.last_login_time | date | Last login time of the user | +| Zoom.User.timezone | string | Default timezone for the user | +| Zoom.User.pmi | string | PMI of user | +| Zoom.User.group_ids | string | Groups user belongs to | + + +##### Command Example +``` ``` + +##### Human Readable Output + + +### zoom-delete-user +*** +Delete a user from Zoom + + +##### Base Command + +`zoom-delete-user` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| user | The user ID or email to delete | Required | +| action | The action to take | Optional | + + +##### Context Output + +There is no context output for this command. + +##### Command Example +``` ``` + +##### Human Readable Output + diff --git a/demisto_sdk/tests/test_files/format_pwsh_integration.yml b/demisto_sdk/tests/test_files/format_pwsh_integration.yml new file mode 100644 index 0000000000..385830f304 --- /dev/null +++ b/demisto_sdk/tests/test_files/format_pwsh_integration.yml @@ -0,0 +1,50 @@ +commonfields: + id: PowerShell Remoting + version: 11 +name: PowerShell Remoting +display: PowerShell Remoting +category: Utilities +description: |- + Use this integration in order to remote into another machine (windows/linux) using SSH, requires PowerShell core on both ends. + Installation steps needed on target machine can be found in Microsoft's official documentation: + https://docs.microsoft.com/en-us/powershell/scripting/learn/remoting/ssh-remoting-in-powershell-core?view=powershell-6 +configuration: +- display: Domain/IP + name: hostname + defaultvalue: subdomain.example.com + type: 0 + required: true + additionalinfo: Domain or IP of target machine +- display: Username + name: credentials + defaultvalue: "" + type: 9 + required: true + additionalinfo: User name in target machine +script: + script: '' + type: powershell + commands: + - name: pwsh-remoting-query + arguments: + - name: command + required: true + description: PowerShell command (can be single or multiple in order of execution) + to run on the target machine + isArray: true + outputs: + - contextPath: PowerShellSSH.Query + description: Object that contains data about the response of a command run in + target machine + type: unknown + - contextPath: PowerShellSSH.Query.Result + description: The result of the command run from target machine + type: string + - contextPath: PowerShellSSH.Query.Command + description: The command sent to target machine, used as ID of that query + type: string + description: Remote to target machine which has powershell core installed on it + and is set up for remoting + execution: true + dockerimage: demisto/powershell:6.2.3.5563 + runonce: false diff --git a/demisto_sdk/tests/test_files/format_pwsh_script.yml b/demisto_sdk/tests/test_files/format_pwsh_script.yml new file mode 100644 index 0000000000..448826aaae --- /dev/null +++ b/demisto_sdk/tests/test_files/format_pwsh_script.yml @@ -0,0 +1,31 @@ +args: +- default: false + description: JSON string to verfiy. + isArray: false + name: json + required: true + secret: false +- default: false + description: Optional schema against which to validate the JSON input. + isArray: false + name: schema + required: false + secret: false +comment: Verifies if the supplied JSON string is valid and optionally verifies against + a provided schema. The script utilizes Powershell's Test-JSON cmdlet. +commonfields: + id: VerifyJSON + version: 10 +enabled: false +name: VerifyJSON +script: '-' +system: false +outputs: +- contextPath: VerifyJSON.Result + description: Whether the passed JSON was verified. + type: boolean +tags: +- JSON +- Utility +type: powershell +dockerimage: demisto/powershell:6.2.4.6166 diff --git a/demisto_sdk/tests/test_files/git_have_modified_and_new_files.json b/demisto_sdk/tests/test_files/git_have_modified_and_new_files.json new file mode 100644 index 0000000000..06e84c78e0 --- /dev/null +++ b/demisto_sdk/tests/test_files/git_have_modified_and_new_files.json @@ -0,0 +1,18 @@ +{ + "data" : { + "params_without_data" : { + "modified_files1" : [""], + "added_files1" : [""], + "old_format_files1" : [""], + "packs1" : [""] + }, + + "params_with_data" : [ + ["/path/mod_file1", "/path/mod_file2", "/path/mod_file3"], + ["/path/add_file1", "/path/add_file2", "/path/add_file3"], + ["/path/old_file1", "/path/old_file2", "/path/old_file3"], + ["/path/pack_file1", "/path/pack_file2", "/path/pack_file3"] + + ] + } +} diff --git a/demisto_sdk/tests/test_files/playbook-invalid-equal.yml b/demisto_sdk/tests/test_files/playbook-invalid-equal.yml new file mode 100644 index 0000000000..6a119a6657 --- /dev/null +++ b/demisto_sdk/tests/test_files/playbook-invalid-equal.yml @@ -0,0 +1,982 @@ +id: access_investigation_-_generic +version: -1 +name: Access Investigation - Generic +fromversion: 3.6.0 +description: |- + This playbook investigates an access incident by gathering user and IP information. + + The playbook then interacts with the user that triggered the incident to confirm whether or not they initiated the access action. +starttaskid: "0" +tasks: + "0": + id: "0" + taskid: 9191bc70-1a40-4150-83d9-66731d89243f + type: start + task: + id: 9191bc70-1a40-4150-83d9-66731d89243f + version: -1 + name: "" + description: "" + iscommand: false + brand: "" + nexttasks: + '#none#': + - "5" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 592.5, + "y": 50 + } + } + "3": + id: "3" + taskid: c4b25dd8-ef84-429f-8edc-a162b3dbf000 + type: playbook + task: + id: c4b25dd8-ef84-429f-8edc-a162b3dbf000 + version: -1 + name: Account Enrichment - Generic + description: Enrich Accounts using one or more integrations + playbookName: Account Enrichment - Generic + type: playbook + iscommand: false + brand: "" + nexttasks: + '#none#': + - "17" + scriptarguments: + Username: + complex: + root: inputs.Username + transformers: + - operator: general.uniq + reputationcalc: 0 + separatecontext: true + loop: + iscommand: false + exitCondition: "" + wait: 1 + view: |- + { + "position": { + "x": 50, + "y": 485 + } + } + "4": + id: "4" + taskid: e0ffa563-92e4-428b-81ed-c0fe3eeeb2f7 + type: title + task: + id: e0ffa563-92e4-428b-81ed-c0fe3eeeb2f7 + version: -1 + name: Interact with the user + description: "" + type: title + iscommand: false + brand: "" + nexttasks: + '#none#': + - "11" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 377.5, + "y": 1010 + } + } + "5": + id: "5" + taskid: 343205d4-9062-4954-8d57-ac3ab134d5f6 + type: title + task: + id: 343205d4-9062-4954-8d57-ac3ab134d5f6 + version: -1 + name: Enrich indicators + description: "" + type: title + iscommand: false + brand: "" + nexttasks: + '#none#': + - "8" + - "9" + - "10" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 592.5, + "y": 195 + } + } + "8": + id: "8" + taskid: faf6fad5-f040-47b2-8802-73c06b1ef6d2 + type: title + task: + id: faf6fad5-f040-47b2-8802-73c06b1ef6d2 + version: -1 + name: Enrich source IP + description: "" + type: title + iscommand: false + brand: "" + nexttasks: + '#none#': + - "21" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 1022.5, + "y": 675 + } + } + "9": + id: "9" + taskid: f4690b1b-98cd-4309-82be-53e760d502ee + type: title + task: + id: f4690b1b-98cd-4309-82be-53e760d502ee + version: -1 + name: Enrich destination IP + description: "" + type: title + iscommand: false + brand: "" + nexttasks: + '#none#': + - "20" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 592.5, + "y": 675 + } + } + "10": + id: "10" + taskid: f68c6b42-4b24-426c-81e0-417984626f2a + type: title + task: + id: f68c6b42-4b24-426c-81e0-417984626f2a + version: -1 + name: Enrich source user + description: "" + type: title + iscommand: false + brand: "" + nexttasks: + '#none#': + - "3" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 50, + "y": 340 + } + } + "11": + id: "11" + taskid: 7b6dfef6-28ec-4daf-8b44-e0d813674886 + type: condition + task: + id: 7b6dfef6-28ec-4daf-8b44-e0d813674886 + version: -1 + name: Does the source user account have an email address? + description: Verify that the source user account has an associated email address. + type: condition + iscommand: false + brand: "" + nexttasks: + '#default#': + - "12" + "yes": + - "13" + reputationcalc: 0 + separatecontext: false + conditions: + - label: "yes" + condition: + - - operator: general.isExists + left: + value: + complex: + root: Account + filters: + - - operator: string.isEqual + left: + value: + simple: = + iscontext: true + right: + value: + simple: = + iscontext: true + ignorecase: true + accessor: Email.Address + transformers: + - operator: general.uniq + iscontext: true + view: |- + { + "position": { + "x": 377.5, + "y": 1155 + } + } + "12": + id: "12" + taskid: 7a5f3223-0418-418a-8872-ca221ab74c5a + type: title + task: + id: 7a5f3223-0418-418a-8872-ca221ab74c5a + version: -1 + name: Done + description: "" + type: title + iscommand: false + brand: "" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 50, + "y": 2505 + } + } + "13": + id: "13" + taskid: 0e9d5518-3755-4d4b-8254-631b021ad18a + type: regular + task: + id: 0e9d5518-3755-4d4b-8254-631b021ad18a + version: -1 + name: Request user to confirm account activity + description: Send an email to the source user email address to confirm whether + they recognize the suspicious activity. + scriptName: EmailAskUser + type: regular + iscommand: false + brand: "" + nexttasks: + '#none#': + - "14" + scriptarguments: + additionalOptions: {} + attachIds: {} + bcc: {} + bodyType: {} + cc: + complex: + root: ManagerEmailAddress + email: + complex: + root: Account + filters: + - - operator: string.isEqual + left: + value: + simple: = + iscontext: true + right: + value: + simple: = + iscontext: true + ignorecase: true + accessor: Email.Address + transformers: + - operator: general.uniq + message: + simple: "Hi ${incident.srcuser},\n\nWe identified unexpected activity on your + account. \n\nStarting on ${incident.occurred}, there were suspicious log-in + attempts from the ${incident.src} IP address.\n\nPlease confirm whether + or not you recognize this activity.\nReply \"Yes\" to confirm this activity. + \nReply \"No\" otherwise.\n\nRegards,\nYour friendly security team." + option1: + simple: "yes" + option2: + simple: "no" + persistent: {} + replyAddress: {} + replyEntriesTag: {} + retries: {} + roles: {} + subject: + simple: Unexpected account activity + task: + simple: AccessQ1 + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 490, + "y": 1330 + } + } + "14": + id: "14" + taskid: cf2e6c57-c2cb-41cb-822d-eb13f2658fee + type: condition + task: + id: cf2e6c57-c2cb-41cb-822d-eb13f2658fee + version: -1 + name: Get user response + description: Use the user response (yes or no) to direct the playbook. + tags: + - AccessQ1 + type: condition + iscommand: false + brand: "" + nexttasks: + '#default#': + - "23" + "yes": + - "22" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 490, + "y": 1505 + } + } + "17": + id: "17" + taskid: 10c28bdf-caae-47d7-8c1b-c5a7198b2349 + type: condition + task: + id: 10c28bdf-caae-47d7-8c1b-c5a7198b2349 + version: -1 + name: Was the manager's ID returned? + description: Verify that the manager ID (DN) of the source user account was + returned in context. + type: condition + iscommand: false + brand: "" + nexttasks: + '#default#': + - "4" + "yes": + - "19" + reputationcalc: 0 + separatecontext: false + conditions: + - label: "yes" + condition: + - - operator: general.isExists + left: + value: + complex: + root: Account + accessor: Manager + iscontext: true + view: |- + { + "position": { + "x": 50, + "y": 660 + } + } + "19": + id: "19" + taskid: 6ba49d12-a2e7-40bb-85d2-2a7849adc774 + type: regular + task: + id: 6ba49d12-a2e7-40bb-85d2-2a7849adc774 + version: -1 + name: Get manager's info + description: Retrieve the AD account information for the manager of the source + user account. + scriptName: ADGetUser + type: regular + iscommand: false + brand: "" + nexttasks: + '#none#': + - "4" + scriptarguments: + attributes: {} + customFieldData: {} + customFieldType: {} + dn: + complex: + root: Account + accessor: Manager + transformers: + - operator: general.uniq + email: {} + extend-context: + simple: ManagerEmailAddress=mail + headers: {} + limit: {} + name: {} + nestedSearch: {} + userAccountControlOut: {} + username: {} + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 162.5, + "y": 835 + } + } + "20": + id: "20" + taskid: 160c3d44-9b6f-4207-8df9-69f7c6ac3eb7 + type: playbook + task: + id: 160c3d44-9b6f-4207-8df9-69f7c6ac3eb7 + version: -1 + name: IP Enrichment - Generic + playbookName: IP Enrichment - Generic + description: "" + type: playbook + iscommand: false + brand: "" + nexttasks: + '#none#': + - "4" + scriptarguments: + IP: + complex: + root: inputs.DstIP + InternalRange: {} + ResolveIP: + simple: "True" + reputationcalc: 0 + separatecontext: true + loop: + iscommand: false + exitCondition: "" + wait: 1 + view: |- + { + "position": { + "x": 592.5, + "y": 835 + } + } + "21": + id: "21" + taskid: 74b00019-68dc-4e52-8b6e-8eb97721b8cb + type: playbook + task: + id: 74b00019-68dc-4e52-8b6e-8eb97721b8cb + version: -1 + name: IP Enrichment - Generic + description: "" + playbookName: IP Enrichment - Generic + type: playbook + iscommand: false + brand: "" + nexttasks: + '#none#': + - "4" + scriptarguments: + IP: + complex: + root: inputs.SrcIP + InternalRange: {} + ResolveIP: + simple: "True" + reputationcalc: 0 + separatecontext: true + loop: + iscommand: false + exitCondition: "" + wait: 1 + view: |- + { + "position": { + "x": 1022.5, + "y": 835 + } + } + "22": + id: "22" + taskid: e0dc594d-6a63-47e7-85d1-cadb733f3544 + type: title + task: + id: e0dc594d-6a63-47e7-85d1-cadb733f3544 + version: -1 + name: User confirmed account activity + description: "" + type: title + iscommand: false + brand: "" + nexttasks: + '#none#': + - "24" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 275, + "y": 2015 + } + } + "23": + id: "23" + taskid: d78f9aeb-aaab-47c1-8359-4518d529c00a + type: title + task: + id: d78f9aeb-aaab-47c1-8359-4518d529c00a + version: -1 + name: User denied account activity + description: "" + type: title + iscommand: false + brand: "" + nexttasks: + '#none#': + - "26" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 705, + "y": 1680 + } + } + "24": + id: "24" + taskid: 7201e19f-4202-41ec-8b24-246679d7621f + type: regular + task: + id: 7201e19f-4202-41ec-8b24-246679d7621f + version: -1 + name: Set severity to low + description: Set the incident severity to low. + script: Builtin|||setIncident + type: regular + iscommand: true + brand: Builtin + nexttasks: + '#none#': + - "25" + scriptarguments: + addLabels: {} + app: {} + assetid: {} + attachmentcount: {} + attachmentextension: {} + attachmenthash: {} + attachmentid: {} + attachmentitem: {} + attachmentname: {} + attachmentsize: {} + attachmenttype: {} + backupowner: {} + bugtraq: {} + customFields: {} + cve: {} + cvss: {} + daysbetweenreportcreation: {} + dest: {} + destntdomain: {} + details: {} + duration: {} + emailbcc: {} + emailbody: {} + emailbodyformat: {} + emailbodyhtml: {} + emailcc: {} + emailclientname: {} + emailfrom: {} + emailkeywords: {} + emailmessageid: {} + emailreceived: {} + emailreplyto: {} + emailreturnpath: {} + emailsenderip: {} + emailsize: {} + emailsource: {} + emailsubject: {} + emailto: {} + emailtocount: {} + emailurlclicked: {} + eventid: {} + falses: {} + fetchid: {} + fetchtype: {} + filehash: {} + filename: {} + filepath: {} + id: {} + important: {} + importantfield: {} + labels: {} + malwarefamily: {} + mdtest: {} + myfield: {} + name: {} + occurred: {} + owner: {} + phase: {} + replacePlaybook: {} + reporteduser: {} + roles: {} + screenshot: {} + screenshot2: {} + selector: {} + severity: + simple: low + signature: {} + single: {} + single2: {} + sla: {} + source: {} + src: {} + srcntdomain: {} + srcuser: {} + systems: {} + test: {} + test2: {} + testfield: {} + timeassignedtolevel2: {} + timefield1: {} + timelevel1: {} + type: {} + user: {} + username: {} + vendorid: {} + vendorproduct: {} + vulnerabilitycategory: {} + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 275, + "y": 2175 + } + } + "25": + id: "25" + taskid: eb5afea3-4cbc-4ec6-82f5-d9b80ac4ed1b + type: regular + task: + id: eb5afea3-4cbc-4ec6-82f5-d9b80ac4ed1b + version: -1 + name: Close Investigation + description: Close the investigation. + script: Builtin|||closeInvestigation + type: regular + iscommand: true + brand: Builtin + nexttasks: + '#none#': + - "12" + scriptarguments: + assetid: {} + closeNotes: {} + closeReason: + simple: 'User is the source of the suspicious activity ' + id: {} + importantfield: {} + test2: {} + timefield1: {} + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 490, + "y": 2350 + } + } + "26": + id: "26" + taskid: 38e9a862-18da-401c-88e7-9ed6e2b0e9a4 + type: regular + task: + id: 38e9a862-18da-401c-88e7-9ed6e2b0e9a4 + version: -1 + name: Set severity to high + description: Set the incident severity to high. + script: Builtin|||setIncident + type: regular + iscommand: true + brand: Builtin + nexttasks: + '#none#': + - "27" + scriptarguments: + addLabels: {} + app: {} + assetid: {} + attachmentcount: {} + attachmentextension: {} + attachmenthash: {} + attachmentid: {} + attachmentitem: {} + attachmentname: {} + attachmentsize: {} + attachmenttype: {} + backupowner: {} + bugtraq: {} + customFields: {} + cve: {} + cvss: {} + daysbetweenreportcreation: {} + dest: {} + destntdomain: {} + details: {} + duration: {} + emailbcc: {} + emailbody: {} + emailbodyformat: {} + emailbodyhtml: {} + emailcc: {} + emailclientname: {} + emailfrom: {} + emailkeywords: {} + emailmessageid: {} + emailreceived: {} + emailreplyto: {} + emailreturnpath: {} + emailsenderip: {} + emailsize: {} + emailsource: {} + emailsubject: {} + emailto: {} + emailtocount: {} + emailurlclicked: {} + eventid: {} + falses: {} + fetchid: {} + fetchtype: {} + filehash: {} + filename: {} + filepath: {} + id: {} + important: {} + importantfield: {} + labels: {} + malwarefamily: {} + mdtest: {} + myfield: {} + name: {} + occurred: {} + owner: {} + phase: {} + replacePlaybook: {} + reporteduser: {} + roles: {} + screenshot: {} + screenshot2: {} + selector: {} + severity: + simple: high + signature: {} + single: {} + single2: {} + sla: {} + source: {} + src: {} + srcntdomain: {} + srcuser: {} + systems: {} + test: {} + test2: {} + testfield: {} + timeassignedtolevel2: {} + timefield1: {} + timelevel1: {} + type: {} + user: {} + username: {} + vendorid: {} + vendorproduct: {} + vulnerabilitycategory: {} + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 705, + "y": 1825 + } + } + "27": + id: "27" + taskid: b914ceed-2615-4c1b-8d64-74b2a8da5cd0 + type: regular + task: + id: b914ceed-2615-4c1b-8d64-74b2a8da5cd0 + version: -1 + name: Assign to analyst + description: | + Assign the incident to an analyst based on the analyst’s organizational role. + scriptName: AssignAnalystToIncident + type: regular + iscommand: false + brand: "" + nexttasks: + '#none#': + - "28" + scriptarguments: + assignBy: {} + email: {} + roles: + complex: + root: inputs.Role + username: {} + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 705, + "y": 2000 + } + } + "28": + id: "28" + taskid: 8ae1ca42-1f1d-4ae7-883e-814705b8aae2 + type: regular + task: + id: 8ae1ca42-1f1d-4ae7-883e-814705b8aae2 + version: -1 + name: Manually remediate the incident + description: "Review the incident to determine if the account activity is malicious.\n\nIf + malicious, consider the following:\n* Quarantine the account/ endpoint \n* + Revoke the account password\n* Query the account/ IPs logs in the SIEM\n* + Block the external IPs in the firewall/ proxy\n* Check the account's privileges + and change if needed" + type: regular + iscommand: false + brand: "" + nexttasks: + '#none#': + - "25" + reputationcalc: 0 + separatecontext: false + view: |- + { + "position": { + "x": 705, + "y": 2175 + } + } +view: |- + { + "linkLabelsPosition": {}, + "paper": { + "dimensions": { + "height": 2520, + "width": 1352.5, + "x": 50, + "y": 50 + } + } + } +inputs: +- key: SrcIP + value: + complex: + root: incident + accessor: src + required: false + description: The source IP address from which the incident originated. +- key: DstIP + value: + complex: + root: incident + accessor: dest + required: false + description: The target IP address that was accessed. +- key: Username + value: + complex: + root: incident + accessor: srcuser + required: false + description: The username of the account that was used to access the DstIP. +- key: Role + value: + simple: Administrator + required: true + description: The default role to assign the incident to. +outputs: +- contextPath: Account.Email.Address + description: The email address object associated with the Account + type: string +- contextPath: DBotScore + description: Indicator, Score, Type, Vendor + type: unknown +- contextPath: Account.ID + description: The unique Account DN (Distinguished Name) + type: string +- contextPath: Account.Username + description: The Account username + type: string +- contextPath: Account.Email + description: The email address associated with the Account +- contextPath: Account.Type + description: Type of the Account entity + type: string +- contextPath: Account.Groups + description: The groups the Account is part of +- contextPath: Account + description: Account object + type: unknown +- contextPath: Account.DisplayName + description: The Account display name + type: string +- contextPath: Account.Manager + description: The Account's manager + type: string +- contextPath: DBotScore.Indicator + description: The indicator value + type: string +- contextPath: DBotScore.Type + description: The indicator's type + type: string +- contextPath: DBotScore.Vendor + description: The indicator's vendor + type: string +- contextPath: DBotScore.Score + description: The indicator's score + type: number +- contextPath: IP + description: The IP objects + type: unknown +- contextPath: Endpoint + description: The Endpoint's object + type: unknown +- contextPath: Endpoint.Hostname + description: The hostname to enrich + type: string +- contextPath: Endpoint.OS + description: Endpoint OS + type: string +- contextPath: Endpoint.IP + description: List of endpoint IP addresses +- contextPath: Endpoint.MAC + description: List of endpoint MAC addresses +- contextPath: Endpoint.Domain + description: Endpoint domain name + type: string +tests: + - No test diff --git a/requirements-dev.txt b/requirements-dev.txt index d3fcfdc46a..0a6aeb2052 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,4 +5,3 @@ pytest-cov pytest-mock requests_mock tox-pyenv -urllib3 diff --git a/requirements.txt b/requirements.txt index 88551070a0..a070da6776 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,8 +3,11 @@ bs4>=0.0.1 click>=7.0 dateparser>=0.7.2 demisto-py>=2.0.7 +docker>=4.2.0 flake8>=3.7.9 gitpython>=3.1.0 +isort>=4.3.21 +jinja2>=2.11.1 mypy>=0.761 networkx>=2.4 pipenv==2018.11.26 @@ -13,5 +16,7 @@ PyPDF2>=1.26.0 PyYAML>=5.3.1 requests>=2.22.0 ruamel.yaml>=0.16.5 -vulture>=1.4 +urllib3>=1.25.7 +vulture>=1.3 +wcmatch>=5.1 yamlordereddictloader>=0.4.0 diff --git a/setup.py b/setup.py index 9aa122241d..04835f34d1 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ Demisto SDK """ -from setuptools import setup, find_packages # noqa: H301 +from setuptools import find_packages, setup # noqa: H301 NAME = "demisto-sdk"