diff --git a/.changelog/3912.yml b/.changelog/3912.yml deleted file mode 100644 index 2976dc74d0b..00000000000 --- a/.changelog/3912.yml +++ /dev/null @@ -1,4 +0,0 @@ -changes: -- description: Log files will now be saved by default to `$HOME/.demisto-sdk/logs`. This behavior can be overridden by the `--log-file-path` flag, or the `DEMISTO_SDK_LOG_FILE_PATH` environment variable. - type: feature -pr_number: 3912 diff --git a/.changelog/3961.yml b/.changelog/3961.yml new file mode 100644 index 00000000000..1f4f6c11ccb --- /dev/null +++ b/.changelog/3961.yml @@ -0,0 +1,6 @@ +changes: +- description: Log files will now be saved by default to `$HOME/.demisto-sdk/logs`. This behavior can be overridden by the `--log-file-path` flag, or the `DEMISTO_SDK_LOG_FILE_PATH` environment variable. + type: feature +- description: Log file path (can be set by the `--log-file-path` flag or the `DEMISTO_SDK_LOG_FILE_PATH` environment variable) can now only accept directory values. Setting it to a file path is no longer supported (file name is now constantly `demisto_sdk_debug.log` and cannot be changed). The path will now be automatically generated if it doesn't exist. + type: breaking +pr_number: 3912 diff --git a/.changelog/3970.yml b/.changelog/3970.yml new file mode 100644 index 00000000000..d6e5b4c1efa --- /dev/null +++ b/.changelog/3970.yml @@ -0,0 +1,4 @@ +changes: +- description: Fixed an issue in **upload** where customFields with explicitly defined values (e.g., ${}) caused the command to fail. + type: fix +pr_number: 3970 diff --git a/.changelog/3971.yml b/.changelog/3971.yml new file mode 100644 index 00000000000..73a97ad1323 --- /dev/null +++ b/.changelog/3971.yml @@ -0,0 +1,4 @@ +changes: +- description: Fixed an issue where validate command failed with Lists folder containing a data json file. + type: fix +pr_number: 3971 diff --git a/.github/actions/setup_poetry_python_environment/action.yml b/.github/actions/setup_environment/action.yml similarity index 77% rename from .github/actions/setup_poetry_python_environment/action.yml rename to .github/actions/setup_environment/action.yml index fc9f6eeb074..5d45d23311b 100644 --- a/.github/actions/setup_poetry_python_environment/action.yml +++ b/.github/actions/setup_environment/action.yml @@ -1,5 +1,5 @@ -name: 'Setup python environment for demisto-sdk jobs' -description: 'Setup python environment for demisto-sdk jobs' +name: 'Setup testing environment for demisto-sdk jobs' +description: 'Setup testing environment for demisto-sdk jobs - that includes adding python/create-artifacts/install dependencies' author: 'Demisto-SDK' inputs: @@ -17,6 +17,11 @@ inputs: type: string default: "-E generate-unit-tests" description: "the arguments for the poetry install command" + artifacts-dir: + required: false + type: string + default: $GITHUB_JOB + description: "The name of the artifacts dir" runs: using: 'composite' diff --git a/.github/actions/setup_test_environment/action.yml b/.github/actions/setup_test_environment/action.yml index 880d58d05af..03e342a9ade 100644 --- a/.github/actions/setup_test_environment/action.yml +++ b/.github/actions/setup_test_environment/action.yml @@ -12,16 +12,27 @@ inputs: type: string default: "16" description: "The node version to install" + artifacts-dir: + required: false + type: string + default: "artifacts-dir" + description: "The name of the artifacts dir" runs: using: 'composite' steps: + - name: Create Artifacts Dir + run: | + mkdir -p ${{ inputs.artifacts-dir }} + echo "Created directory ${{ inputs.artifacts-dir }} successfully" + shell: bash - name: Python ${{ matrix.python-version }} - Setup Environment - uses: ./.github/actions/setup_poetry_python_environment + uses: ./.github/actions/setup_environment with: python-version: ${{ inputs.python-version }} + artifacts-dir: ${{ inputs.artifacts-dir }} - name: Set up Node.js uses: actions/setup-node@v3 @@ -31,5 +42,5 @@ runs: - name: Install npm run: | npm install - echo $(echo '{"node_version": "'$(node --version)'","npm_list":'$(npm list --json)'}') > node_versions_info.json + echo $(echo '{"node_version": "'$(node --version)'","npm_list":'$(npm list --json)'}') > ${{ inputs.artifacts-dir }}/node_versions_info.json shell: bash diff --git a/.github/actions/test_summary/action.yml b/.github/actions/test_summary/action.yml index d82bd2c4a03..dcf387039d4 100644 --- a/.github/actions/test_summary/action.yml +++ b/.github/actions/test_summary/action.yml @@ -3,14 +3,6 @@ description: 'Uploads artifacts, prints out a summary and check if there are any author: 'Demisto-SDK' inputs: - python-version: - required: true - type: string - description: "The python version" - artifacts-folder-name: - required: false - type: string - description: "The folder name to save artifacts" summary-display-options: required: false type: string @@ -21,21 +13,34 @@ inputs: type: "string" description: "The junit-path file to post its summary in github workflow summary" default: "" - + artifacts-path-dir: + required: false + type: string + default: $GITHUB_JOB + description: "The path to the artifacts dir" + artifact-name: + required: false + type: string + default: $GITHUB_JOB + description: "The name of of the artifact to upload" runs: using: 'composite' steps: + - name: Combine Artifacts + if: always() + run: | + # move logs files created and .coverage to artifacts folder + mv .coverage ${{ inputs.artifacts-path-dir }}/.coverage || true + mv demisto_sdk_debug*.log ${{ inputs.artifacts-path-dir }}/logs || true + shell: bash - name: Upload artifacts if: always() - uses: actions/upload-artifact@v4 + uses: ./.github/actions/upload_artifacts with: - name: ${{ inputs.artifacts-folder-name }}-artifacts-python-${{ inputs.python-version }} - path: | - ${{ inputs.artifacts-folder-name }} - node_versions_info.json - .coverage + artifacts-path-dir: ${{ inputs.artifacts-path-dir }} + artifact-name: ${{ inputs.artifact-name }} - name: Print Summary of pytest results in workflow summary if: ${{ inputs.junit-path != '' }} uses: pmeier/pytest-results-action@main @@ -44,13 +49,13 @@ runs: summary: true display-options: ${{ inputs.summary-display-options }} fail-on-empty: false - - name: Check if ${{ inputs.artifacts-folder-name }} have passed + - name: Check if tests have have passed shell: bash if: always() run: | if [[ "$PYTEST_EXIT_CODE" -ne 0 ]]; then - echo "There are ${{ inputs.artifacts-folder-name }} that failed, pytest finished with exit code $PYTEST_EXIT_CODE, to see the ${{ inputs.artifacts-folder-name }} summary refer to https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}?pr=${{ github.event.pull_request.number }}" + echo "There are tests that failed, pytest finished with exit code $PYTEST_EXIT_CODE, to see the tests summary refer to https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}?pr=${{ github.event.pull_request.number }}" else - echo "All ${{ inputs.artifacts-folder-name }} have passed, congratulations!" + echo "All tests have passed, congratulations!" fi exit $PYTEST_EXIT_CODE diff --git a/.github/actions/upload_artifacts/action.yml b/.github/actions/upload_artifacts/action.yml new file mode 100644 index 00000000000..b3bfb845653 --- /dev/null +++ b/.github/actions/upload_artifacts/action.yml @@ -0,0 +1,27 @@ +name: 'Upload artifacts to github summary' +description: 'Uploads artifacts to the github summary of a workflow' +author: 'Demisto-SDK' + +inputs: + artifacts-path-dir: + required: true + type: string + description: "The path to the artifacts dir" + + artifact-name: + required: false + type: string + default: $GITHUB_JOB + description: "The name of of the artifact to upload" + + +runs: + using: 'composite' + steps: + - name: Upload artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.artifact-name }} + path: | + ${{ inputs.artifacts-path-dir }} diff --git a/.github/workflows/close_jira_issue_by_pr_merge.yml b/.github/workflows/close_jira_issue_by_pr_merge.yml index 3037d3def9c..5b64ab45c11 100644 --- a/.github/workflows/close_jira_issue_by_pr_merge.yml +++ b/.github/workflows/close_jira_issue_by_pr_merge.yml @@ -19,7 +19,7 @@ jobs: python-version: '3.9' - name: Setup Poetry uses: Gr1N/setup-poetry@v8 - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: .venv key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} diff --git a/.github/workflows/link_edited_pr_to_jira_issue.yml b/.github/workflows/link_edited_pr_to_jira_issue.yml index b968950319f..cd117665b5b 100644 --- a/.github/workflows/link_edited_pr_to_jira_issue.yml +++ b/.github/workflows/link_edited_pr_to_jira_issue.yml @@ -19,7 +19,7 @@ jobs: python-version: '3.10' - name: Setup Poetry uses: Gr1N/setup-poetry@v8 - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: .venv key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} diff --git a/.github/workflows/on-push.yml b/.github/workflows/on-push.yml index ad12dbece2e..d07ab6bf2eb 100644 --- a/.github/workflows/on-push.yml +++ b/.github/workflows/on-push.yml @@ -16,6 +16,7 @@ concurrency: jobs: validate-changelog: runs-on: ubuntu-latest + if: github.head_ref != 'master' steps: - name: Checkout uses: actions/checkout@v4 @@ -23,7 +24,7 @@ jobs: fetch-depth: 0 - name: Python ${{ matrix.python-version }} - Setup Environment - uses: ./.github/actions/setup_poetry_python_environment + uses: ./.github/actions/setup_environment with: python-version: '3.10' @@ -35,6 +36,27 @@ jobs: echo "Validating changelog for PR: $PR_NUMBER with Name: $PR_TITLE" poetry run python ./Utils/github_workflow_scripts/changelog_validation_scripts/validate_changelog.py -n "$PR_NUMBER" -t "$PR_TITLE" + pre-commit-checks: + runs-on: ubuntu-latest + name: Pre Commit Checks + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Setup Python Environment + uses: ./.github/actions/setup_environment + with: + python-version: '3.10' + + - name: Run Pre Commit + uses: pre-commit/action@v3.0.0 + with: + extra_args: --all-files + + - name: Run Pytest collection + run: | + poetry run pytest --collect-only . + unit-tests: name: Unit Tests / Python ${{ matrix.python-version }} runs-on: ubuntu-latest @@ -54,6 +76,7 @@ jobs: uses: ./.github/actions/setup_test_environment with: python-version: ${{ matrix.python-version }} + artifacts-dir: unit-tests/logs - name: Run Unit Tests run: | @@ -64,17 +87,18 @@ jobs: node demisto_sdk/commands/common/markdown_server/mdx-parse-server.js & node_pid=$! - mkdir unit-tests poetry run pytest -v . --ignore={demisto_sdk/commands/init/templates,demisto_sdk/tests/integration_tests,demisto_sdk/commands/content_graph,tests_end_to_end} --cov=demisto_sdk --cov-report=html:unit-tests/coverage --junitxml=unit-tests/junit.xml || pytest_exit_code=$? echo "PYTEST_EXIT_CODE=$pytest_exit_code" >> $GITHUB_ENV kill $node_pid + exit $pytest_exit_code - name: Python ${{ matrix.python-version }} - Test Summary Upload uses: ./.github/actions/test_summary + if: always() with: - python-version: ${{ matrix.python-version }} - artifacts-folder-name: unit-tests + artifact-name: unit-tests-python-${{ matrix.python-version }}-artifacts + artifacts-path-dir: unit-tests junit-path: unit-tests/junit.xml integration-tests: @@ -96,12 +120,12 @@ jobs: uses: ./.github/actions/setup_test_environment with: python-version: ${{ matrix.python-version }} + artifacts-dir: integration-tests/logs - name: Run Integration Tests run: | source "$(poetry env info --path)/bin/activate" - mkdir integration-tests poetry run pytest -v demisto_sdk/tests/integration_tests --cov=demisto_sdk --cov-report=html:integration-tests/coverage --junitxml=integration-tests/junit.xml || pytest_exit_code=$? echo "PYTEST_EXIT_CODE=$pytest_exit_code" >> $GITHUB_ENV @@ -109,9 +133,10 @@ jobs: - name: Python ${{ matrix.python-version }} - Test Summary Upload uses: ./.github/actions/test_summary + if: always() with: - python-version: ${{ matrix.python-version }} - artifacts-folder-name: integration-tests + artifact-name: integration-tests-python-${{ matrix.python-version }}-artifacts + artifacts-path-dir: integration-tests junit-path: integration-tests/junit.xml graph-tests: @@ -133,20 +158,23 @@ jobs: uses: ./.github/actions/setup_test_environment with: python-version: ${{ matrix.python-version }} + artifacts-dir: graph-tests/logs - name: Run Graph Tests run: | source "$(poetry env info --path)/bin/activate" - mkdir graph-tests poetry run pytest -v demisto_sdk/commands/content_graph --cov=demisto_sdk --cov-report=html:graph-tests/coverage --junitxml=graph-tests/junit.xml || pytest_exit_code=$? echo "PYTEST_EXIT_CODE=$pytest_exit_code" >> $GITHUB_ENV + exit $pytest_exit_code + - name: Python ${{ matrix.python-version }} - Test Summary Upload uses: ./.github/actions/test_summary + if: always() with: - python-version: ${{ matrix.python-version }} - artifacts-folder-name: graph-tests + artifact-name: graph-tests-python-${{ matrix.python-version }}-artifacts + artifacts-path-dir: graph-tests junit-path: graph-tests/junit.xml coverage: @@ -158,7 +186,7 @@ jobs: uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Download all artifacts + - name: Download all artifacts # TODO - download only the relevant artifacts uses: actions/download-artifact@v4 - name: Run coverage run: | @@ -195,7 +223,7 @@ jobs: run: echo commit=$(git rev-parse HEAD) >> $GITHUB_OUTPUT - name: Python ${{ matrix.python-version }} - Setup Environment - uses: ./.github/actions/setup_poetry_python_environment + uses: ./.github/actions/setup_environment with: python-version: '3.10' working-dir: content @@ -220,7 +248,7 @@ jobs: poetry run pytest ./Tests/private_build/tests -v poetry run pytest Utils -v - test-demisto-sdk-pre-commit-command: + test-pre-commit-command: runs-on: ubuntu-latest name: Test Demisto-SDK Pre-Commit Command steps: @@ -228,7 +256,7 @@ jobs: uses: actions/checkout@v3 - name: Setup Python Environment - uses: ./.github/actions/setup_poetry_python_environment + uses: ./.github/actions/setup_environment with: python-version: '3.10' @@ -241,12 +269,12 @@ jobs: - name: Cache Pre commit id: cache-pre-commit - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cache/pre-commit key: ${{ runner.os }}-pre-commit - - name: Create SDK Config File - Ignore Docker and Release-Notes Validations + - name: Create SDK Config File - Ignore BA101 run: | # Run only a specific validation to make sure validate is triggered successfully in demisto-sdk pre-commit cd content @@ -256,6 +284,8 @@ jobs: run: | source $(poetry env info --path)/bin/activate cd content + mkdir -p test-pre-commit-command/logs + export DEMISTO_SDK_LOG_FILE_PATH=./test-pre-commit-command/logs/demisto_sdk_debug.log echo "# test" >> Packs/HelloWorld/Integrations/HelloWorld/HelloWorld.yml echo "# test" >> Packs/CortexXDR/Integrations/CortexXDRIR/CortexXDRIR.yml echo "# test" >> Packs/QRadar/Integrations/QRadar_v3/QRadar_v3.yml @@ -272,23 +302,51 @@ jobs: cd content demisto-sdk pre-commit --validate --show-diff-on-failure --verbose -i Packs/HelloWorld -i Packs/QRadar/Integrations/QRadar_v3 --mode=nightly - pre-commit-checks: + - name: Upload artifacts + if: always() + uses: ./.github/actions/upload_artifacts + with: + artifacts-path-dir: content/test-pre-commit-command + artifact-name: test-demisto-sdk-pre-commit-command-artifacts + + test-graph-commands: runs-on: ubuntu-latest - name: Pre Commit Checks + name: Test Graph Commands steps: - name: Checkout uses: actions/checkout@v3 - - name: Setup Python Environment From Content Repository - uses: ./.github/actions/setup_poetry_python_environment + - name: Setup Python Environment + uses: ./.github/actions/setup_environment with: python-version: '3.10' + artifacts-dir: test-graph-commands - - name: Run Pre Commit - uses: pre-commit/action@v3.0.0 + - name: Checkout content + uses: actions/checkout@v3 with: - extra_args: --all-files + repository: demisto/content + path: content - - name: Run Pytest collection + - name: Run Graph run: | - poetry run pytest --collect-only . + source $(poetry env info --path)/bin/activate + cd content + mkdir -p test-graph-commands/logs + export DEMISTO_SDK_LOG_FILE_PATH=./test-graph-commands/logs/demisto_sdk_debug.log + # create content graph from scratch + demisto-sdk create-content-graph + # clean import folder + sudo rm -rf /var/lib/neo4j/import + # Update content graph from the bucket + demisto-sdk update-content-graph -g -o ./test-graph-commands/content_graph + + # Update content graph from the the previous content graph that was created/built + demisto-sdk update-content-graph -i ./test-graph-commands/content_graph/xsoar.zip -o ./test-graph-commands/content_graph + + - name: Upload artifacts + if: always() + uses: ./.github/actions/upload_artifacts + with: + artifacts-path-dir: content/test-graph-commands + artifact-name: test-graph-commands-artifacts diff --git a/demisto_sdk/__main__.py b/demisto_sdk/__main__.py index 96af6325f59..99035b2b2bf 100644 --- a/demisto_sdk/__main__.py +++ b/demisto_sdk/__main__.py @@ -236,9 +236,9 @@ def main(ctx, config, version, release_notes, **kwargs): last_release = get_last_remote_release_version() logger.info(f"[yellow]You are using demisto-sdk {__version__}.[/yellow]") if last_release and __version__ != last_release: - logger.info( - f"[yellow]however version {last_release} is available.\n" - f"To update, run pip3 install --upgrade demisto-sdk[/yellow]" + logger.warning( + f"A newer version ({last_release}) is available. " + f"To update, run 'pip3 install --upgrade demisto-sdk'" ) if release_notes: rn_entries = get_release_note_entries(__version__) diff --git a/demisto_sdk/commands/common/constants.py b/demisto_sdk/commands/common/constants.py index 1b8f677ebc7..efc75aa1fc0 100644 --- a/demisto_sdk/commands/common/constants.py +++ b/demisto_sdk/commands/common/constants.py @@ -1373,6 +1373,11 @@ class PB_Status: ".lock", ] ENDPOINT_COMMAND_NAME = "endpoint" +GET_MAPPING_FIELDS_COMMAND_NAME = "get-mapping-fields" +GET_MAPPING_FIELDS_COMMAND = { + "description": "Retrieves a User Profile schema which holds all of the user fields in the application. Used for outgoing mapping through the Get Schema option.", + "name": GET_MAPPING_FIELDS_COMMAND_NAME, +} RELIABILITY_PARAMETER_NAMES = [ "integration_reliability", # First item in the list will be used in errors @@ -1381,6 +1386,12 @@ class PB_Status: "reliability", ] +COMMON_PARAMS_DISPLAY_NAME = { + "insecure": "Trust any certificate (not secure)", + "unsecure": "Trust any certificate (not secure)", + "proxy": "Use system proxy settings", +} + REPUTATION_COMMAND_NAMES = {"file", "email", "domain", "url", "ip", "cve"} BANG_COMMAND_NAMES = {"file", "email", "domain", "url", "ip", "cve", "endpoint"} @@ -1677,6 +1688,7 @@ class PB_Status: FIRST_FETCH = "first_fetch" MAX_FETCH = "max_fetch" +DEFAULT_MAX_FETCH = 10 SKIP_RELEASE_NOTES_FOR_TYPES = ( FileType.RELEASE_NOTES, @@ -1734,6 +1746,10 @@ class PB_Status: ] +class Auto(str, Enum): + PREDEFINED = "PREDEFINED" + + class ContentItems(Enum): # the format is defined in issue #19786, may change in the future SCRIPTS = "automation" @@ -1927,6 +1943,7 @@ class ParameterType(Enum): TEXT_AREA_ENCRYPTED = 14 SINGLE_SELECT = 15 MULTI_SELECT = 16 + EXPIRATION_FIELD = 17 NO_TESTS_DEPRECATED = "No tests (deprecated)" diff --git a/demisto_sdk/commands/common/hook_validations/integration.py b/demisto_sdk/commands/common/hook_validations/integration.py index c34b6fbd40c..84fe001c4d5 100644 --- a/demisto_sdk/commands/common/hook_validations/integration.py +++ b/demisto_sdk/commands/common/hook_validations/integration.py @@ -33,6 +33,7 @@ XSOAR_CONTEXT_STANDARD_URL, XSOAR_SUPPORT, MarketplaceVersions, + ParameterType, ) from demisto_sdk.commands.common.default_additional_info_loader import ( load_default_additional_info_dict, @@ -78,7 +79,6 @@ class IntegrationValidator(ContentEntityValidator): also try to catch possible Backward compatibility breaks due to the preformed changes. """ - EXPIRATION_FIELD_TYPE = 17 ALLOWED_HIDDEN_PARAMS = {"longRunning", "feedIncremental", "feedReputation"} def __init__( @@ -1194,7 +1194,7 @@ def is_valid_display_configuration(self): configuration_display = configuration_param.get("display") # This parameter type will not use the display value. - if field_type == self.EXPIRATION_FIELD_TYPE: + if field_type == ParameterType.EXPIRATION_FIELD.value: if configuration_display: error_message, error_code = Errors.not_used_display_name( configuration_param["name"] diff --git a/demisto_sdk/commands/common/logger.py b/demisto_sdk/commands/common/logger.py index 0152cccbf86..379d30fd189 100644 --- a/demisto_sdk/commands/common/logger.py +++ b/demisto_sdk/commands/common/logger.py @@ -378,13 +378,11 @@ def logging_setup( Returns: logging.Logger: logger object """ + global LOG_FILE_PATH + if not hasattr(logging.getLoggerClass(), "success"): _add_logging_level("SUCCESS", SUCCESS_LEVEL) - global logger - global LOG_FILE_PATH - global LOG_FILE_PATH_PRINT - console_handler = logging.StreamHandler() console_handler.set_name(CONSOLE_HANDLER) console_handler.setLevel(console_log_threshold or logging.INFO) @@ -396,30 +394,47 @@ def logging_setup( log_handlers: List[logging.Handler] = [console_handler] + # We set up the console handler separately before the file logger is ready, so that we can display log messages + root_logger: logging.Logger = logging.getLogger("") + set_demisto_handlers_to_logger(_logger=root_logger, handlers=log_handlers) + set_demisto_handlers_to_logger(_logger=logger, handlers=log_handlers) + logger.propagate = False + if not skip_log_file_creation: if log_file_directory_path_str := ( log_file_path or os.getenv(DEMISTO_SDK_LOG_FILE_PATH) ): - log_file_directory_path = Path(log_file_directory_path_str) + current_log_file_path = Path(log_file_directory_path_str).resolve() - if not log_file_directory_path.is_dir(): - # Can't use 'logger.error' here, as the logger is not yet initialized. - exit( - f"Error: Configured logs path '{log_file_directory_path}' does not exist." + if current_log_file_path.is_dir(): + final_log_file_path = current_log_file_path / LOG_FILE_NAME + + elif current_log_file_path.is_file(): + logger.warning( + f"Log file path '{current_log_file_path}' is a file and not a directory. " + f"Log file will be created in parent directory '{current_log_file_path.parent}'." ) + final_log_file_path = current_log_file_path.parent / LOG_FILE_NAME + + else: # Path is neither a file nor a directory + logger.warning( + f"Log file path '{current_log_file_path}' does not exist and will be created." + ) + current_log_file_path.mkdir(parents=True, exist_ok=True) + final_log_file_path = current_log_file_path / LOG_FILE_NAME else: # Use default log files path log_file_directory_path = LOGS_DIR log_file_directory_path.mkdir( parents=True, exist_ok=True ) # Generate directory if it doesn't exist + final_log_file_path = log_file_directory_path / LOG_FILE_NAME - # Add log file handler - log_file_path = log_file_directory_path / LOG_FILE_NAME - LOG_FILE_PATH = log_file_path + # Update global variable + LOG_FILE_PATH = final_log_file_path file_handler = RotatingFileHandler( - filename=log_file_path, + filename=LOG_FILE_PATH, mode="a", maxBytes=LOG_FILE_SIZE, backupCount=LOG_FILE_COUNT, @@ -439,18 +454,16 @@ def logging_setup( level=log_level, ) - root_logger: logging.Logger = logging.getLogger("") + # Set up handlers again, this time with the file handler set_demisto_handlers_to_logger(_logger=root_logger, handlers=log_handlers) set_demisto_handlers_to_logger(_logger=logger, handlers=log_handlers) - logger.propagate = False logger.debug(f"Python version: {sys.version}") logger.debug(f"Working dir: {Path.cwd()}") logger.debug(f"Platform: {platform.system()}") - if LOG_FILE_PATH and LOG_FILE_PATH_PRINT: - logger.info(f"[yellow]Log file location: {log_file_path}[/yellow]") - LOG_FILE_PATH_PRINT = False # Avoid printing the log file path more than once. + if LOG_FILE_PATH_PRINT and not skip_log_file_creation: + logger.info(f"[yellow]Log file location: {LOG_FILE_PATH}[/yellow]") return logger diff --git a/demisto_sdk/commands/common/tests/update_id_set_test.py b/demisto_sdk/commands/common/tests/update_id_set_test.py index e3bb5de920b..030bebee532 100644 --- a/demisto_sdk/commands/common/tests/update_id_set_test.py +++ b/demisto_sdk/commands/common/tests/update_id_set_test.py @@ -2872,6 +2872,41 @@ def test_get_fields_by_script_argument(task): result = get_fields_by_script_argument(task) assert "field_name" in result + @staticmethod + def test_get_fields_by_script_argument__explicit_method(): + """ + Given + - A playbook task with custom fields that retrieves its values in an explicit way (using the ${} method instead of "get" method) + When + - Searching for dependent incident fields in the task script arguments + Then + - The function should return an empty set with no errors, since a the custom field - ${} sould be ignored + """ + task = {"id": "ID", "scriptarguments": {"customFields": {"simple": "${keys}"}}} + result = get_fields_by_script_argument(task) + assert result == set() + + @staticmethod + def test_get_fields_by_script_argument__json(): + """ + Given + - A playbook task with custom fields value is a json + When + - Searching for dependent incident fields in the task script arguments + Then + - The function should return all dependent incident custom fields in the task + """ + task = { + "id": "ID", + "scriptarguments": { + "customFields": { + "complex": {"root": "keys", "test_key": "tets_value", "id": "ID"} + } + }, + } + result = get_fields_by_script_argument(task) + assert result == {"root", "test_key"} + class TestFlow(unittest.TestCase): WIDGET_DATA = { diff --git a/demisto_sdk/commands/common/update_id_set.py b/demisto_sdk/commands/common/update_id_set.py index 8662379c065..028708d767a 100755 --- a/demisto_sdk/commands/common/update_id_set.py +++ b/demisto_sdk/commands/common/update_id_set.py @@ -679,13 +679,21 @@ def get_fields_by_script_argument(task): # the value should be a list of dicts in str format custom_field_value = list(field_value.values())[0] if isinstance(custom_field_value, str): - custom_fields_list = json.loads(custom_field_value) - if not isinstance(custom_fields_list, list): - custom_fields_list = [custom_fields_list] - for custom_field in custom_fields_list: - for field_name in custom_field.keys(): - if field_name not in BUILT_IN_FIELDS: - dependent_incident_fields.add(field_name) + if custom_field_value.startswith("$"): + logger.warning( + "You're using an unrecommended method - ${} - to retrieve values from the context data." + ) + continue + else: + custom_fields_list = json.loads(custom_field_value) + else: + custom_fields_list = custom_field_value + if not isinstance(custom_fields_list, list): + custom_fields_list = [custom_fields_list] + for custom_field in custom_fields_list: + for field_name in custom_field.keys(): + if field_name not in BUILT_IN_FIELDS: + dependent_incident_fields.add(field_name) return dependent_incident_fields diff --git a/demisto_sdk/commands/content_graph/objects/integration.py b/demisto_sdk/commands/content_graph/objects/integration.py index e3fc4330038..2378a99b2e3 100644 --- a/demisto_sdk/commands/content_graph/objects/integration.py +++ b/demisto_sdk/commands/content_graph/objects/integration.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import TYPE_CHECKING, Callable, List, Optional +from typing import TYPE_CHECKING, Any, Callable, List, Optional import demisto_client @@ -11,9 +11,9 @@ # avoid circular imports from demisto_sdk.commands.content_graph.objects.script import Script -from pydantic import Field +from pydantic import BaseModel, Field -from demisto_sdk.commands.common.constants import MarketplaceVersions +from demisto_sdk.commands.common.constants import Auto, MarketplaceVersions from demisto_sdk.commands.common.logger import logger from demisto_sdk.commands.content_graph.common import ContentType, RelationshipType from demisto_sdk.commands.content_graph.objects.integration_script import ( @@ -21,10 +21,54 @@ ) +class Parameter(BaseModel): + name: str + type: int + additionalinfo: Optional[str] = None + defaultvalue: Optional[Any] = None + required: Optional[bool] = False + display: Optional[str] = None + section: Optional[str] = None + advanced: Optional[bool] = False + hidden: Optional[Any] = False + options: Optional[List[str]] = None + displaypassword: Optional[str] = None + hiddenusername: Optional[bool] = False + hiddenpassword: Optional[bool] = False + fromlicense: Optional[str] = None + + +class Argument(BaseModel): + name: str + description: str + required: Optional[bool] = False + default: Optional[bool] = False + predefined: Optional[List[str]] = None + isArray: Optional[bool] = False + defaultvalue: Optional[Any] = None + secret: Optional[bool] = False + deprecated: Optional[bool] = False + type: Optional[str] = None + hidden: Optional[bool] = False + auto: Optional[Auto] = None + + +class Output(BaseModel): + description: str + contentPath: Optional[str] = None + contextPath: Optional[str] = None + important: Optional[bool] = False + importantDescription: Optional[str] = None + type: Optional[str] = None + + class Command(BaseNode, content_type=ContentType.COMMAND): # type: ignore[call-arg] name: str # From HAS_COMMAND relationship + args: List[Argument] = Field([], exclude=True) + outputs: List[Output] = Field([], exclude=True) + deprecated: bool = Field(False) description: Optional[str] = Field("") @@ -50,9 +94,12 @@ class Integration(IntegrationScript, content_type=ContentType.INTEGRATION): # t is_fetch_events: bool = Field(False, alias="isfetchevents") is_fetch_assets: bool = False is_feed: bool = False + is_beta: bool = False + is_mappable: bool = False long_running: bool = False category: str commands: List[Command] = [] + params: List[Parameter] = Field([], exclude=True) @property def imports(self) -> List["Script"]: @@ -127,3 +174,19 @@ def match(_dict: dict, path: Path) -> bool: if "category" in _dict and path.suffix == ".yml": return True return False + + def save(self): + super().save() + data = self.data + data["script"]["commands"] = [] + yml_commands = [] + for command in self.commands: + yml_commands.append( + { + "name": command.name, + "deprecated": command.deprecated, + "description": command.description, + } + ) + + data["script"]["commands"] = yml_commands diff --git a/demisto_sdk/commands/content_graph/parsers/integration.py b/demisto_sdk/commands/content_graph/parsers/integration.py index 29fefd72784..fc3c7e511c6 100644 --- a/demisto_sdk/commands/content_graph/parsers/integration.py +++ b/demisto_sdk/commands/content_graph/parsers/integration.py @@ -19,6 +19,8 @@ class CommandParser: name: str deprecated: bool description: str + args: List[dict] + outputs: List[dict] class IntegrationParser(IntegrationScriptParser, content_type=ContentType.INTEGRATION): @@ -34,7 +36,9 @@ def __init__( self.is_fetch = self.script_info.get("isfetch", False) self.is_fetch_assets = self.script_info.get("isfetchassets", False) self.is_fetch_events = self.script_info.get("isfetchevents", False) + self.is_mappable = self.script_info.get("ismappable", False) self.is_feed = self.script_info.get("feed", False) + self.is_beta = self.script_info.get("beta", False) self.long_running = self.script_info.get("longRunning", False) self.is_long_running = self.script_info.get("longRunning", False) self.commands: List[CommandParser] = [] @@ -51,6 +55,7 @@ def field_mapping(self): "type": "script.type", "subtype": "script.subtype", "alt_docker_images": "script.alt_dockerimages", + "configuration": "configuration", } ) return super().field_mapping @@ -59,6 +64,10 @@ def field_mapping(self): def display_name(self) -> Optional[str]: return get_value(self.yml_data, self.field_mapping.get("display_name", "")) + @property + def params(self) -> Optional[List]: + return get_value(self.yml_data, self.field_mapping.get("configuration", ""), []) + def connect_to_commands(self) -> None: """Creates HAS_COMMAND relationships with the integration commands. Command's properties are stored in the relationship's data, @@ -68,6 +77,8 @@ def connect_to_commands(self) -> None: name = command_data.get("name") deprecated = command_data.get("deprecated", False) or self.deprecated description = command_data.get("description") + args = command_data.get("arguments") or [] + outputs = command_data.get("outputs") or [] self.add_relationship( RelationshipType.HAS_COMMAND, target=name, @@ -77,7 +88,13 @@ def connect_to_commands(self) -> None: description=description, ) self.commands.append( - CommandParser(name=name, description=description, deprecated=deprecated) + CommandParser( + name=name, + description=description, + deprecated=deprecated, + args=args, + outputs=outputs, + ) ) def connect_to_dependencies(self) -> None: diff --git a/demisto_sdk/commands/generate_outputs/generate_outputs.py b/demisto_sdk/commands/generate_outputs/generate_outputs.py index e16b3f4b9f2..e2894f4ea27 100644 --- a/demisto_sdk/commands/generate_outputs/generate_outputs.py +++ b/demisto_sdk/commands/generate_outputs/generate_outputs.py @@ -15,14 +15,14 @@ def json_to_outputs_flow(kwargs): if not kwargs.get("command"): - logger.info( - "[red]To use the json-to-outputs version of this command please include a `command` argument.[/red]" + logger.error( + "To use the json-to-outputs version of this command please include a `command` argument." ) return 1 if not kwargs.get("prefix"): - logger.info( - "[red]To use the json-to-outputs version of this command please include a `prefix` argument.[/red]" + logger.error( + "To use the json-to-outputs version of this command please include a `prefix` argument." ) return 1 diff --git a/demisto_sdk/commands/generate_outputs/generate_outputs_test.py b/demisto_sdk/commands/generate_outputs/generate_outputs_test.py index 529e9cafbe9..54dfd5b607e 100644 --- a/demisto_sdk/commands/generate_outputs/generate_outputs_test.py +++ b/demisto_sdk/commands/generate_outputs/generate_outputs_test.py @@ -8,7 +8,7 @@ @pytest.mark.parametrize( - "args, excpected_stdout", + "args, expected_stdout", [ (["-j", "123"], "please include a `command` argument."), (["-j", "123", "-c", "ttt"], "please include a `prefix` argument."), @@ -16,7 +16,7 @@ ], ) def test_generate_outputs_json_to_outputs_flow( - mocker, monkeypatch, args, excpected_stdout + mocker, monkeypatch, args, expected_stdout ): """ Given @@ -28,7 +28,7 @@ def test_generate_outputs_json_to_outputs_flow( Then - Ensure that the outputs are valid """ - logger_info = mocker.patch.object(logging.getLogger("demisto-sdk"), "info") + logger_error = mocker.patch.object(logging.getLogger("demisto-sdk"), "error") monkeypatch.setenv("COLUMNS", "1000") import demisto_sdk.commands.generate_outputs.generate_outputs as go @@ -37,14 +37,14 @@ def test_generate_outputs_json_to_outputs_flow( runner = CliRunner() runner.invoke(main.generate_outputs, args=args, catch_exceptions=False) - if excpected_stdout: - assert str_in_call_args_list(logger_info.call_args_list, excpected_stdout) + if expected_stdout: + assert str_in_call_args_list(logger_error.call_args_list, expected_stdout) else: - assert len(logger_info.call_args_list) == 0 + assert len(logger_error.call_args_list) == 0 @pytest.mark.parametrize( - "args, excpected_stdout, expected_exit_code", + "args, expected_stdout, expected_exit_code", [ ("-e", "requires an argument", 2), (["-e", ""], "command please include an `input` argument", 0), @@ -52,7 +52,7 @@ def test_generate_outputs_json_to_outputs_flow( ], ) def test_generate_outputs_generate_integration_context_flow( - mocker, monkeypatch, args, excpected_stdout, expected_exit_code + mocker, monkeypatch, args, expected_stdout, expected_exit_code ): """ Given @@ -75,4 +75,4 @@ def test_generate_outputs_generate_integration_context_flow( result = runner.invoke(main.generate_outputs, args=args, catch_exceptions=False) assert result.exit_code == expected_exit_code if expected_exit_code == 0: - assert str_in_call_args_list(logger_info.call_args_list, excpected_stdout) + assert str_in_call_args_list(logger_info.call_args_list, expected_stdout) diff --git a/demisto_sdk/commands/validate/old_validate_manager.py b/demisto_sdk/commands/validate/old_validate_manager.py index dad4809d241..a4e73139897 100644 --- a/demisto_sdk/commands/validate/old_validate_manager.py +++ b/demisto_sdk/commands/validate/old_validate_manager.py @@ -20,6 +20,7 @@ GENERIC_FIELDS_DIR, GENERIC_TYPES_DIR, IGNORED_PACK_NAMES, + LISTS_DIR, OLDEST_SUPPORTED_VERSION, PACKS_DIR, PACKS_PACK_META_FILE_NAME, @@ -740,8 +741,13 @@ def is_skipped_file(self, file_path: str) -> bool: path = Path(file_path) if LOG_FILE_PATH and LOG_FILE_PATH == path: return True - return path.name in SKIPPED_FILES or ( - path.name == "CommonServerPython.py" and path.parent.parent.name != "Base" + return ( + path.name in SKIPPED_FILES + or ( + path.name == "CommonServerPython.py" + and path.parent.parent.name != "Base" + ) + or (path.parent.name == LISTS_DIR and path.name.endswith("_data.json")) ) # flake8: noqa: C901 diff --git a/demisto_sdk/commands/validate/tests/IN_validators_test.py b/demisto_sdk/commands/validate/tests/IN_validators_test.py index 71ccc8b83ed..bc967a9ad47 100644 --- a/demisto_sdk/commands/validate/tests/IN_validators_test.py +++ b/demisto_sdk/commands/validate/tests/IN_validators_test.py @@ -1,15 +1,73 @@ +from typing import List + import pytest +from demisto_sdk.commands.common.constants import ( + COMMON_PARAMS_DISPLAY_NAME, + DEFAULT_MAX_FETCH, + FIRST_FETCH, + FIRST_FETCH_PARAM, + GET_MAPPING_FIELDS_COMMAND, + GET_MAPPING_FIELDS_COMMAND_NAME, + MAX_FETCH, + MAX_FETCH_PARAM, +) +from demisto_sdk.commands.content_graph.objects.integration import Integration from demisto_sdk.commands.validate.tests.test_tools import ( create_integration_object, create_script_object, ) +from demisto_sdk.commands.validate.validators.IN_validators.IN100_is_valid_proxy_and_insecure import ( + IsValidProxyAndInsecureValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN102_is_valid_checkbox_param import ( + IsValidCheckboxParamValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN104_is_valid_category import ( + IsValidCategoryValidator, +) from demisto_sdk.commands.validate.validators.IN_validators.IN108_is_valid_subtype import ( ValidSubtypeValidator, ) +from demisto_sdk.commands.validate.validators.IN_validators.IN109_is_id_contain_beta import ( + IsIdContainBetaValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN110_is_name_contain_beta import ( + IsNameContainBetaValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN112_is_display_contain_beta import ( + IsDisplayContainBetaValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN113_is_command_args_contain_duplications import ( + IsCommandArgsContainDuplicationsValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN114_is_params_contain_duplications import ( + IsParamsContainDuplicationsValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN115_is_valid_context_path import ( + IsValidContextPathValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN117_should_have_display_field import ( + ShouldHaveDisplayFieldValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN118_is_missing_display_field import ( + IsMissingDisplayFieldValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN125_is_valid_max_fetch_param import ( + IsValidMaxFetchParamValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN126_is_valid_fetch_integration import ( + IsValidFetchIntegrationValidator, +) from demisto_sdk.commands.validate.validators.IN_validators.IN130_is_integration_runable import ( IsIntegrationRunnableValidator, ) +from demisto_sdk.commands.validate.validators.IN_validators.IN131_is_valid_as_mappable_integration import ( + IsValidAsMappableIntegrationValidator, +) +from demisto_sdk.commands.validate.validators.IN_validators.IN134_is_containing_multiple_default_args import ( + IsContainingMultipleDefaultArgsValidator, +) @pytest.mark.parametrize( @@ -53,7 +111,7 @@ ], ) def test_ValidSubtypeValidator_is_valid( - content_items, expected_number_of_failures, expected_msgs + content_items, expected_number_of_failures: int, expected_msgs: List[str] ): """ Given @@ -157,7 +215,7 @@ def test_ValidSubtypeValidator_is_valid( ], ) def test_IsIntegrationRunnableValidator_is_valid( - content_items, expected_number_of_failures + content_items: List[Integration], expected_number_of_failures: int ): """ Given @@ -184,3 +242,1670 @@ def test_IsIntegrationRunnableValidator_is_valid( or results[0].message == "Could not find any runnable command in the integration.\nMust have at least one of: a command under the `commands` section, `isFetch: true`, `feed: true`, or `longRunning: true`." ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "insecure", + "type": 8, + "required": False, + "display": "Trust any certificate (not secure)", + } + ] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "unscure", + "type": 8, + "required": False, + "display": "Trust any certificate (not secure)", + } + ] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "proxy", + "type": 8, + "required": False, + "display": "Use system proxy settings", + } + ] + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["configuration"], + values=[[{"name": "proxy", "display": "a", "type": 1}]], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "proxy", + "display": "Use system proxy settingss", + "type": 1, + } + ] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "proxy", + "display": "Use system proxy settings", + "type": 1, + "required": False, + } + ] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "proxy", + "display": "Use system proxy settings", + "type": 8, + "required": True, + } + ] + ], + ), + ], + 4, + [ + "The following params are invalid:\nThe proxy param display name should be 'Use system proxy settings', the 'defaultvalue' field should be 'False', the 'required' field should be 'False', and the 'required' field should be 8.", + "The following params are invalid:\nThe proxy param display name should be 'Use system proxy settings', the 'defaultvalue' field should be 'False', the 'required' field should be 'False', and the 'required' field should be 8.", + "The following params are invalid:\nThe proxy param display name should be 'Use system proxy settings', the 'defaultvalue' field should be 'False', the 'required' field should be 'False', and the 'required' field should be 8.", + "The following params are invalid:\nThe proxy param display name should be 'Use system proxy settings', the 'defaultvalue' field should be 'False', the 'required' field should be 'False', and the 'required' field should be 8.", + ], + ), + ], +) +def test_IsValidProxyAndInsecureValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Four valid integrations: + - One integration without proxy / insecure params. + - One integration with valid insecure param. + - One integration with valid unsecure param. + - One integration with valid proxy param. + - Case 2: Four invalid integrations: + - One integration with proxy param with a wrong display name. + - One integration with proxy param without required field and a wrong type field. + - One integration with proxy param with a wrong type. + - One integration with proxy param with a required field set to true. + When + - Calling the IsValidProxyAndInsecureValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail all. + """ + results = IsValidProxyAndInsecureValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_IsValidProxyAndInsecureValidator_fix(): + """ + Given + An integration with invalid proxy & insecure params. + When + - Calling the IsValidProxyAndInsecureValidator fix function. + Then + - Make sure that all the relevant fields were added/fixed and that the right msg was returned. + """ + content_item = create_integration_object( + paths=["configuration"], + values=[ + [ + {"name": "proxy", "display": "a", "type": 1}, + { + "name": "insecure", + "type": 1, + "required": True, + "display": "Trust any certificate (not secure)", + }, + ] + ], + ) + validator = IsValidProxyAndInsecureValidator() + validator.fixed_params[content_item.name] = { + "insecure": { + "name": "insecure", + "type": 8, + "required": False, + "display": "Trust any certificate (not secure)", + }, + "proxy": { + "name": "proxy", + "type": 8, + "required": False, + "display": "Use system proxy settings", + }, + } + assert ( + validator.fix(content_item).message + == "Corrected the following params: insecure, proxy." + ) + for param in content_item.params: + assert param.type == 8 + assert not param.required + assert param.display == COMMON_PARAMS_DISPLAY_NAME[param.name] + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object( + paths=["configuration"], + values=[ + [{"name": "test_param_1", "type": 8, "display": "test param 1"}] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [{"name": "test_param_2", "type": 0, "display": "test param 2"}] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_param_3", + "type": 8, + "display": "test param 3", + "required": False, + } + ] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_param_4", + "type": 8, + "display": "test param 4", + "required": True, + }, + { + "name": "test_param_5", + "type": 8, + "display": "test param 5", + "required": False, + }, + { + "name": "test_param_6", + "type": 8, + "display": "test param 6", + "required": False, + }, + ] + ], + ), + ], + 3, + [ + "The following checkbox params required field is not set to True: test_param_1.\nMake sure to set it to True.", + "The following checkbox params required field is not set to True: test_param_3.\nMake sure to set it to True.", + "The following checkbox params required field is not set to True: test_param_5, test_param_6.\nMake sure to set it to True.", + ], + ), + ], +) +def test_IsValidCheckboxParamValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Four integrations: + - One integration with a param of type 8 but no required field. + - One integration with a param of type 0 and no required field. + - One integration with a param of type 8 but required field set to False. + - One integration with 3 param of type 8, one's required field is set to True and the other two are set to False. + When + - Calling the IsValidCheckboxParamValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should fail all except test_param 2 & 4. + """ + results = IsValidCheckboxParamValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_IsValidCheckboxParamValidator_fix(): + """ + Given + An integration with invalid proxy & insecure params. + When + - Calling the IsValidCheckboxParamValidator fix function. + Then + - Make sure that all the relevant fields were added/fixed and that the right msg was returned. + """ + content_item = create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_param_4", + "type": 8, + "display": "test param 4", + "required": True, + }, + { + "name": "test_param_5", + "type": 8, + "display": "test param 5", + "required": False, + }, + { + "name": "test_param_6", + "type": 8, + "display": "test param 6", + "required": False, + }, + ] + ], + ) + validator = IsValidCheckboxParamValidator() + validator.misconfigured_checkbox_params_by_integration[content_item.name] = [ + "test_param_5", + "test_param_6", + ] + assert ( + validator.fix(content_item).message + == "Set required field of the following params was set to True: test_param_5, test_param_6." + ) + assert all([param.required for param in content_item.params]) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ([create_integration_object()], 0, []), + ( + [create_integration_object(["category"], [""])], + 1, + [ + "The Integration's category (empty category section) doesn't match the standard,\nplease make sure that the field is a category from the following options: Network Security, Utilities, Forensics & Malware Analysis." + ], + ), + ( + [ + create_integration_object(["category"], ["Utilities"]), + create_integration_object(["category"], ["Random Category..."]), + create_integration_object(), + ], + 1, + [ + "The Integration's category (Random Category...) doesn't match the standard,\nplease make sure that the field is a category from the following options: Network Security, Utilities, Forensics & Malware Analysis." + ], + ), + ], +) +def test_IsValidCategoryValidator_is_valid( + mocker, + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items. + - Case 1: One integration with n valid category. + - Case 2: One integration with an empty category field. + - Case 3: Three integrations: + - Two integrations with a valid category. + - One integration with an invalid category. + When + - Calling the IsValidCategoryValidator is_valid function. + Then + - Make sure the right amount of pack metadatas failed, and that the right error message is returned. + - Case 1: Shouldn't fail. + - Case 2: Should fail. + - Case 3: Should fail only the pack_metadata with the "Random Category..." + """ + + mocker.patch( + "demisto_sdk.commands.validate.validators.IN_validators.IN104_is_valid_category.get_current_categories", + return_value=["Network Security", "Utilities", "Forensics & Malware Analysis"], + ) + results = IsValidCategoryValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object( + paths=["commonfields.id", "script.beta"], + values=["contain_beta", False], + ), + create_integration_object( + paths=["commonfields.id", "script.beta"], values=["test", True] + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["commonfields.id", "script.beta"], values=["beta_test", True] + ), + create_integration_object( + paths=["commonfields.id", "script.beta"], values=["test beta", True] + ), + ], + 2, + [ + "The ID field (beta_test) contains the word 'beta', make sure to remove it.", + "The ID field (test beta) contains the word 'beta', make sure to remove it.", + ], + ), + ], +) +def test_IsIdContainBetaValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Two integration: + - One non-beta integration with beta in id. + - One beta integration without beta in id. + - Case 2: Two integration: + - One beta integration with id starting with beta. + - One beta integration with beta in id. + When + - Calling the IsIdContainBetaValidator is valid function. + Then + - Make sure the right amount of failures return. + - Case 1: Shouldn't fail any. + - Case 2: Should fail both. + """ + results = IsIdContainBetaValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_IsIdContainBetaValidator_fix(): + """ + Given + - Case 1: A beta integration with an ID containing the word beta in it. + When + - Calling the IsIdContainBetaValidator fix function. + Then + - Make sure the right ID was fixed correctly and that the right ID was returned. + """ + content_item = create_integration_object( + paths=["commonfields.id", "script.beta"], values=["test beta", True] + ) + assert content_item.object_id == "test beta" + assert ( + IsIdContainBetaValidator().fix(content_item).message + == "Removed the word 'beta' from the ID, the new ID is: test." + ) + assert content_item.object_id == "test" + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object( + paths=["name", "script.beta"], values=["contain_beta", False] + ), + create_integration_object( + paths=["name", "script.beta"], values=["test", True] + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["name", "script.beta"], values=["beta_test", True] + ), + create_integration_object( + paths=["name", "script.beta"], values=["test beta", True] + ), + ], + 2, + [ + "The name field (beta_test) contains the word 'beta', make sure to remove it.", + "The name field (test beta) contains the word 'beta', make sure to remove it.", + ], + ), + ], +) +def test_IsNameContainBetaValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Two integration: + - One non-beta integration with beta in the name. + - One beta integration without beta in the name. + - Case 2: Two integration: + - One beta integration with name starting with beta. + - One beta integration with beta in the name. + When + - Calling the IsNameContainBetaValidator is valid function. + Then + - Make sure the right amount of failures return. + - Case 1: Shouldn't fail any. + - Case 2: Should fail both. + """ + results = IsNameContainBetaValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_IsNameContainBetaValidator_fix(): + """ + Given + - Case 1: A beta integration with a name containing the word beta in it. + When + - Calling the IsNameContainBetaValidator fix function. + Then + - Make sure the right ID was fixed correctly and that the right name was returned. + """ + content_item = create_integration_object( + paths=["name", "script.beta"], values=["test beta", True] + ) + assert content_item.name == "test beta" + assert ( + IsNameContainBetaValidator().fix(content_item).message + == "Removed the word 'beta' from the name field, the new name is: test." + ) + assert content_item.name == "test" + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object( + paths=["display", "script.beta"], values=["contain beta", True] + ), + create_integration_object( + paths=["display", "script.beta"], values=["test", False] + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["display", "script.beta"], values=["should fail", True] + ), + ], + 1, + [ + "The display name (should fail) doesn't contain the word 'beta', make sure to add it.", + ], + ), + ], +) +def test_IsDisplayContainBetaValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Two integration: + - One beta integration with beta in the display name. + - One non-beta integration without beta in the display name. + - Case 2: Two integration: + - One beta integration without beta in the display name. + When + - Calling the IsDisplayContainBetaValidator is valid function. + Then + - Make sure the right amount of failures return. + - Case 1: Shouldn't fail any. + - Case 2: Should fail. + """ + results = IsDisplayContainBetaValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object( + paths=["script.commands"], + values=[[]], + ), + create_integration_object( + paths=["script.commands"], + values=[ + [ + { + "name": "ip", + "description": "ip command", + "deprecated": False, + "arguments": [ + { + "name": "ip_1", + "default": True, + "isArray": True, + "required": True, + "description": "ip_1_description", + }, + { + "name": "ip_2", + "default": True, + "isArray": True, + "required": True, + "description": "ip_2_description", + }, + ], + "outputs": [], + }, + ] + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["script.commands"], + values=[ + [ + { + "name": "ip_1", + "description": "ip command 1", + "deprecated": False, + "arguments": [ + { + "name": "ip_1", + "default": True, + "isArray": True, + "required": True, + "description": "ip_1_description", + }, + { + "name": "ip_2", + "default": True, + "isArray": True, + "required": True, + "description": "ip_2_description", + }, + { + "name": "ip_1", + "default": True, + "isArray": True, + "required": True, + "description": "ip_1_description", + }, + ], + "outputs": [], + } + ] + ], + ) + ], + 1, + [ + "The following commands contain duplicated arguments:\nCommand ip_1, contains multiple appearances of the following arguments ip_1.\nPlease make sure to remove the duplications." + ], + ), + ], +) +def test_IsCommandArgsContainDuplicationsValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Two valid integrations: + - One integration without commands. + - One integration with one command without duplicated args. + - Case 2: One invalid integration with a command with 3 arguments Two of the same name and one different.. + When + - Calling the IsCommandArgsContainDuplicationsValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Shouldn't fail any. + - Case 2: Should fail. + """ + results = IsCommandArgsContainDuplicationsValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["configuration"], + values=[[]], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["configuration"], + values=[ + [ + {"name": "test_1", "display": "a", "type": 1}, + {"name": "test_1", "display": "a", "type": 1}, + ] + ], + ), + ], + 1, + [ + "The following params are duplicated: test_1.\nPlease make sure your file doesn't contain duplications.", + ], + ), + ], +) +def test_IsParamsContainDuplicationsValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Two valid integrations: + - One integration with params but without duplications. + - One integration with empty params list. + - Case 2: One invalid integration with a param with a name that return multiple name. + When + - Calling the IsParamsContainDuplicationsValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail. + """ + results = IsParamsContainDuplicationsValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object( + paths=["script.commands"], + values=[[]], + ), + create_integration_object( + paths=["script.commands"], + values=[ + [ + { + "name": "ip", + "description": "ip command", + "deprecated": False, + "arguments": [ + { + "name": "ip_1", + "default": True, + "isArray": True, + "required": True, + "description": "ip_1_description", + }, + { + "name": "ip_2", + "default": True, + "isArray": True, + "required": True, + "description": "ip_2_description", + }, + ], + "outputs": [], + }, + ] + ], + ), + create_integration_object( + paths=["script.commands"], + values=[ + [ + { + "name": "ip", + "description": "ip command", + "deprecated": False, + "arguments": [], + "outputs": [ + { + "name": "output_1", + "contextPath": "path_1", + "description": "description_1", + }, + { + "name": "output_2", + "contextPath": "path_2", + "description": "description_2", + }, + ], + }, + ] + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["script.commands"], + values=[ + [ + { + "name": "ip", + "description": "ip command", + "deprecated": False, + "arguments": [], + "outputs": [ + { + "name": "output_1", + "contextPath": "path_1", + "description": "description_1", + }, + { + "name": "output_2", + "contextpath": "path_2", + "description": "description_2", + }, + { + "name": "output_3", + "description": "description_3", + }, + { + "name": "output_4", + "contextPath": "path_4", + "description": "description_4", + }, + ], + }, + ] + ], + ), + create_integration_object( + paths=["script.commands"], + values=[ + [ + { + "name": "ip_1", + "description": "ip command", + "deprecated": False, + "arguments": [], + "outputs": [ + { + "name": "output_1", + "contextpath": "path_1", + "description": "description_1", + } + ], + }, + { + "name": "ip_2", + "description": "ip command", + "deprecated": False, + "arguments": [], + "outputs": [ + {"name": "output_1", "description": "description_1"} + ], + }, + ] + ], + ), + ], + 2, + [ + "The following commands include outputs with context path different from missing contextPath, please make sure to add: ip.", + "The following commands include outputs with context path different from missing contextPath, please make sure to add: ip_1, ip_2.", + ], + ), + ], +) +def test_IsValidContextPathValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Three valid integrations: + - One integration without commands. + - One integration with a command with empty outputs. + - One integration with one command with valid outputs. + - Case 2: Two invalid integrations: + - One integration with one command with multiple malformed outputs. + - One integration with two commands with malformed outputs. + When + - Calling the IsValidContextPathValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Shouldn't fail any. + - Case 2: Should fail all. + """ + results = IsValidContextPathValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["configuration"], + values=[[]], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_1", + "type": 17, + "required": False, + }, + { + "name": "test_2", + "type": 8, + "required": False, + "display": "Trust any certificate (not secure)", + }, + ] + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_1", + "type": 17, + "required": False, + "display": "test", + }, + { + "name": "test_2", + "type": 8, + "required": False, + "display": "Trust any certificate (not secure)", + }, + ] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_1", + "type": 17, + "required": False, + "display": "test", + }, + { + "name": "test_2", + "type": 17, + "required": False, + "display": "display", + }, + { + "name": "test_3", + "type": 17, + "required": False, + }, + ] + ], + ), + ], + 2, + [ + "The following params are expiration fields and therefore can't have a 'display' field. Make sure to remove the field for the following: test_1.", + "The following params are expiration fields and therefore can't have a 'display' field. Make sure to remove the field for the following: test_1, test_2.", + ], + ), + ], +) +def test_ShouldHaveDisplayFieldValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Three valid integrations: + - One integration without type 17 param. + - One integration without params. + - One integration with two params: one type 17 without display name and one type 8. + - Case 2: Two invalid integrations: + - One integration with two params: one type 17 with display name and one type 8. + - One integration with three params: one type 17 without display name, and two type 17 with display name. + When + - Calling the ShouldHaveDisplayFieldValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail all the type 17 with display names. + """ + results = ShouldHaveDisplayFieldValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_ShouldHaveDisplayFieldValidator_fix(): + """ + Given + - An integration with three params: one type 17 without display name, and two type 17 with display name. + When + - Calling the ShouldHaveDisplayFieldValidator fix function. + Then + - Make sure the display name was removed for all params and that the right msg was returned. + """ + content_item = create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_1", + "type": 17, + "required": False, + "display": "test", + }, + { + "name": "test_2", + "type": 17, + "required": False, + "display": "display", + }, + { + "name": "test_3", + "type": 17, + "required": False, + }, + ] + ], + ) + validator = ShouldHaveDisplayFieldValidator() + validator.invalid_params[content_item.name] = ["test_1", "test_2"] + assert ( + validator.fix(content_item).message + == "Removed display field for the following params: test_1, test_2." + ) + assert not any( + [(param.type == 17 and param.display) for param in content_item.params] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["configuration"], + values=[[]], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_1", + "type": 17, + "required": False, + }, + { + "name": "test_2", + "type": 8, + "required": False, + "display": "test 2", + }, + ] + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_1", + "type": 17, + "required": False, + }, + { + "name": "test_2", + "type": 8, + "required": False, + }, + ] + ], + ), + create_integration_object( + paths=["configuration"], + values=[ + [ + { + "name": "test_1", + "type": 10, + "required": False, + }, + { + "name": "test_2", + "type": 10, + "required": False, + "display": "display", + }, + { + "name": "test_3", + "type": 10, + "required": False, + }, + ] + ], + ), + ], + 2, + [ + "The following params doesn't have a display field, please make sure to add one: test_2.", + "The following params doesn't have a display field, please make sure to add one: test_1, test_3.", + ], + ), + ], +) +def test_IsMissingDisplayFieldValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Three valid integrations: + - One integration without type 17 param, all other params with display name. + - One integration without params. + - One integration with two params: one type 17 without display name and one type 8 with display name. + - Case 2: Two invalid integrations: + - One integration with two params: one type 17 and type params both without display name. + - One integration with three params: Two type 10 without display name, and one type 10 with display name. + When + - Calling the IsMissingDisplayFieldValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail all the type 8 / 10 without display names. + """ + results = IsMissingDisplayFieldValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["configuration"], + values=[[]], + ), + create_integration_object( + paths=["configuration", "script.isfetch"], + values=[ + [ + { + "name": "max_fetch", + "type": 0, + "required": False, + "defaultvalue": 200, + "display": "Maximum incidents to fetch.", + "additionalinfo": "Maximum number of incidents per fetch. The default value is 200.", + }, + { + "name": "test_2", + "type": 8, + "required": False, + "display": "test 2", + }, + ], + True, + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["configuration", "script.isfetch"], + values=[ + [ + { + "name": "max_fetch", + "type": 0, + "required": False, + "display": "Maximum incidents to fetch.", + "additionalinfo": "Maximum number of incidents per fetch. The default value is 200.", + }, + { + "name": "test_2", + "type": 8, + "required": False, + }, + ], + True, + ], + ), + ], + 1, + [ + "The integration is a fetch integration with max_fetch param, please make sure the max_fetch param has a default value.", + ], + ), + ], +) +def test_IsValidMaxFetchParamValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Three valid integrations: + - One integration without max_fetch param. + - One integration without params. + - One fetch integration with max_fetch param with a default value. + - Case 2: One invalid integration with max_fetch param without default value and another param that isn't max_fetch. + When + - Calling the IsValidMaxFetchParamValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail all the type 8 / 10 without display names. + """ + results = IsValidMaxFetchParamValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_IsValidMaxFetchParamValidator_fix(): + """ + Given + - A fetching integration with two params: one is a max_fetch without default value and one isn't a max_fetch + When + - Calling the IsValidMaxFetchParamValidator fix function. + Then + - Make sure the defaultvalue was updated to the max_fetch param and that the right msg was returned. + """ + content_item = create_integration_object( + paths=["configuration", "script.isfetch"], + values=[ + [ + { + "name": "max_fetch", + "type": 0, + "required": False, + "display": "Maximum incidents to fetch.", + "additionalinfo": "Maximum number of incidents per fetch. The default value is 200.", + }, + { + "name": "test_2", + "type": 8, + "required": False, + }, + ], + True, + ], + ) + assert ( + IsValidMaxFetchParamValidator().fix(content_item).message + == f"Added a 'defaultvalue = {DEFAULT_MAX_FETCH}' to the max_fetch param." + ) + assert any( + [ + (param.name == "max_fetch" and param.defaultvalue is not None) + for param in content_item.params + ] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["configuration"], + values=[[]], + ), + create_integration_object( + paths=["configuration", "script.isfetch"], + values=[ + [MAX_FETCH_PARAM, FIRST_FETCH_PARAM], + True, + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["script.isfetch"], + values=[True], + ), + create_integration_object( + paths=["configuration", "script.isfetch"], + values=[ + [MAX_FETCH_PARAM], + True, + ], + ), + create_integration_object( + paths=["configuration", "script.isfetch"], + values=[ + [FIRST_FETCH_PARAM], + True, + ], + ), + ], + 3, + [ + "The integration is a fetch integration and missing the following params: max_fetch, first_fetch.", + "The integration is a fetch integration and missing the following params: first_fetch.", + "The integration is a fetch integration and missing the following params: max_fetch.", + ], + ), + ], +) +def test_IsValidFetchIntegrationValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Three valid integrations: + - One none fetching integration without max_fetch or first_fetch params. + - One none fetching integration without params. + - One fetch integration with both first_fetch and max_fetch params. + - Case 2: Three invalid integrations: + - One integration without max_fetch & first_fetch params. + - One integration without first_fetch param. + - One integration without max_fetch param. + + When + - Calling the IsValidFetchIntegrationValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail all: + - First integration should fail due to both first_fetch & max_fetch missing. + - Second integration should fail due to missing first_fetch. + - Third integration should fail due to missing max_fetch. + """ + results = IsValidFetchIntegrationValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_IsValidFetchIntegrationValidator_fix(): + """ + Given + - A fetching integration without max_fetch & first_fetch params. + When + - Calling the IsValidFetchIntegrationValidator fix function. + Then + - Make sure that the params were added to the params list and that the right msg was returned. + """ + content_item = create_integration_object( + paths=["script.isfetch"], + values=[True], + ) + validator = IsValidFetchIntegrationValidator() + validator.missing_fetch_params[content_item.name] = { + MAX_FETCH: MAX_FETCH_PARAM, + FIRST_FETCH: FIRST_FETCH_PARAM, + } + assert ( + validator.fix(content_item).message + == f"Added the following params to the integration: {MAX_FETCH}, {FIRST_FETCH}." + ) + assert all( + [param in content_item.params for param in [MAX_FETCH_PARAM, FIRST_FETCH_PARAM]] + ) + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["script.commands"], + values=[[]], + ), + create_integration_object( + paths=["script.commands", "script.ismappable"], + values=[ + [GET_MAPPING_FIELDS_COMMAND], + True, + ], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["script.ismappable"], + values=[ + True, + ], + ), + ], + 1, + [ + f"The integration is a mappable integration and is missing the {GET_MAPPING_FIELDS_COMMAND_NAME} command. Please add the command." + ], + ), + ], +) +def test_IsValidAsMappableIntegrationValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Three valid integrations: + - One none mappable integration with commands but no get-mapping-fields command. + - One none mappable integration without commands. + - One mappable integration with get-mapping-fields command. + - Case 2: One invalid mappable integration without get-mapping-fields command. + + When + - Calling the IsValidAsMappableIntegrationValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail. + """ + results = IsValidAsMappableIntegrationValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) + + +def test_IsValidAsMappableIntegrationValidator_fix(): + """ + Given + - A mappable integration without get-mapping-fields command. + When + - Calling the IsValidAsMappableIntegrationValidator fix function. + Then + - Make sure that the command was added to the integration and that the right msg was returned. + """ + content_item = create_integration_object( + paths=["script.ismappable"], + values=[ + True, + ], + ) + assert ( + IsValidAsMappableIntegrationValidator().fix(content_item).message + == f"Added the {GET_MAPPING_FIELDS_COMMAND_NAME} command to the integration." + ) + assert [ + GET_MAPPING_FIELDS_COMMAND_NAME in command.name + for command in content_item.commands + ] + + +@pytest.mark.parametrize( + "content_items, expected_number_of_failures, expected_msgs", + [ + ( + [ + create_integration_object(), + create_integration_object( + paths=["script.commands"], + values=[[]], + ), + ], + 0, + [], + ), + ( + [ + create_integration_object( + paths=["script.commands"], + values=[ + [ + { + "name": "test_1", + "arguments": [ + { + "name": "test_1_arg_1", + "default": True, + "description": "test_1_arg_1_description", + }, + { + "name": "test_1_arg_2", + "default": True, + "description": "test_1_arg_2_description", + }, + { + "name": "test_1_arg_3", + "description": "test_1_arg_3_description", + }, + ], + } + ] + ], + ), + ], + 1, + [ + "The following commands have more than 1 default arg, please make sure they have at most one: test_1." + ], + ), + ], +) +def test_IsContainingMultipleDefaultArgsValidator_is_valid( + content_items: List[Integration], + expected_number_of_failures: int, + expected_msgs: List[str], +): + """ + Given + content_items iterables. + - Case 1: Two valid integrations: + - One integration with commands with multiple default args. + - One integration without commands. + - Case 2: One invalid integration with a command with multiple default args. + + When + - Calling the IsContainingMultipleDefaultArgsValidator is valid function. + Then + - Make sure the validation fail when it needs to and the right error message is returned. + - Case 1: Should pass all. + - Case 2: Should fail. + """ + results = IsContainingMultipleDefaultArgsValidator().is_valid(content_items) + assert len(results) == expected_number_of_failures + assert all( + [ + result.message == expected_msg + for result, expected_msg in zip(results, expected_msgs) + ] + ) diff --git a/demisto_sdk/commands/validate/tools.py b/demisto_sdk/commands/validate/tools.py index 8c124e0ae48..35ebcd3530b 100644 --- a/demisto_sdk/commands/validate/tools.py +++ b/demisto_sdk/commands/validate/tools.py @@ -1,8 +1,9 @@ import re -from typing import Dict, List, Set +from typing import Dict, List, Optional, Set from demisto_sdk.commands.common.logger import logger from demisto_sdk.commands.common.tools import get_approved_tags_from_branch +from demisto_sdk.commands.content_graph.objects.integration import Parameter from demisto_sdk.commands.content_graph.objects.playbook import Playbook @@ -101,3 +102,19 @@ def validate_categories_approved(categories: list, approved_list: list): if category not in approved_list: return False return True + + +def find_param(params: List[Parameter], param_to_find: str) -> Optional[Parameter]: + """_summary_ + + Args: + params (List[Parameter]): The integration's params list. + param_to_find (str): The name of the param we wish to find. + + Returns: + dict: The param with the given name or an empty string. + """ + for param in params: + if param.name == param_to_find: + return param + return None diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN100_is_valid_proxy_and_insecure.py b/demisto_sdk/commands/validate/validators/IN_validators/IN100_is_valid_proxy_and_insecure.py new file mode 100644 index 00000000000..99ce18facd0 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN100_is_valid_proxy_and_insecure.py @@ -0,0 +1,104 @@ +from __future__ import annotations + +from typing import ClassVar, Iterable, List, Tuple, Union + +from demisto_sdk.commands.common.constants import COMMON_PARAMS_DISPLAY_NAME +from demisto_sdk.commands.content_graph.objects.integration import ( + Integration, + Parameter, +) +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class IsValidProxyAndInsecureValidator(BaseValidator[ContentTypes]): + error_code = "IN100" + description = "Validate that the proxy & insecure params are configured correctly." + error_message = "The following params are invalid:\n{0}" + fix_message = "Corrected the following params: {0}." + related_field = "configuration" + is_auto_fixable = True + fixed_params: ClassVar[dict] = {} + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + "\n".join( + [ + f"The {key} param display name should be '{val['display']}', the 'defaultvalue' field should be 'False', the 'required' field should be 'False', and the 'required' field should be 8." + for key, val in self.fixed_params[content_item.name].items() + ] + ) + ), + content_object=content_item, + ) + for content_item in content_items + if not all( + [ + self.is_valid_param( + content_item.name, content_item.params, ("insecure", "unsecure") + ), + self.is_valid_param( + content_item.name, content_item.params, ("proxy") + ), + ] + ) + ] + + def is_valid_param( + self, + integration_name: str, + params: List[Parameter], + expected_names: Union[str, Tuple], + ) -> bool: + current_param = None + for param in params: + if param.name in expected_names: + current_param = param + break + if current_param and any( + [ + current_param.display != COMMON_PARAMS_DISPLAY_NAME[current_param.name], + current_param.defaultvalue not in (False, "false", None), + current_param.required, + current_param.type != 8, + ] + ): + self.fixed_params[integration_name] = self.fixed_params.get( + integration_name, {} + ) + self.fixed_params[integration_name][current_param.name] = { + "display": COMMON_PARAMS_DISPLAY_NAME[current_param.name], + "defaultvalue": False, + "required": False, + "type": 8, + } + return False + return True + + def fix( + self, + content_item: ContentTypes, + ) -> FixResult: + for key, val in self.fixed_params[content_item.name].items(): + for param in content_item.params: + if param.name == key: + param.display = val["display"] + param.defaultvalue = False + param.required = False + param.type = 8 + break + return FixResult( + validator=self, + message=self.fix_message.format( + ", ".join(list(self.fixed_params[content_item.name].keys())) + ), + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN102_is_valid_checkbox_param.py b/demisto_sdk/commands/validate/validators/IN_validators/IN102_is_valid_checkbox_param.py new file mode 100644 index 00000000000..38932a911ac --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN102_is_valid_checkbox_param.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from typing import ClassVar, Iterable, List + +from demisto_sdk.commands.content_graph.objects.integration import ( + Integration, + Parameter, +) +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class IsValidCheckboxParamValidator(BaseValidator[ContentTypes]): + error_code = "IN102" + description = "Validate that th a checkbox param is configured correctly with required argument set to true." + error_message = "The following checkbox params required field is not set to True: {0}.\nMake sure to set it to True." + fix_message = "Set required field of the following params was set to True: {0}." + related_field = "configuration" + is_auto_fixable = True + misconfigured_checkbox_params_by_integration: ClassVar[dict] = {} + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + ", ".join(misconfigured_checkbox_params) + ), + content_object=content_item, + ) + for content_item in content_items + if ( + misconfigured_checkbox_params := self.get_misconfigured_checkbox_params( + content_item.params, content_item.name + ) + ) + ] + + def get_misconfigured_checkbox_params( + self, params: List[Parameter], integration_name: str + ): + self.misconfigured_checkbox_params_by_integration[integration_name] = [ + param.name + for param in params + if param.type == 8 + and param.name not in ("insecure", "unsecure", "proxy", "isFetch") + and not param.required + ] + return self.misconfigured_checkbox_params_by_integration[integration_name] + + def fix(self, content_item: ContentTypes) -> FixResult: + for param in content_item.params: + if ( + param.name + in self.misconfigured_checkbox_params_by_integration[content_item.name] + ): + param.required = True + return FixResult( + validator=self, + message=self.fix_message.format( + ", ".join( + self.misconfigured_checkbox_params_by_integration[content_item.name] + ) + ), + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN104_is_valid_category.py b/demisto_sdk/commands/validate/validators/IN_validators/IN104_is_valid_category.py new file mode 100644 index 00000000000..45c852be5b7 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN104_is_valid_category.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.common.tools import get_current_categories +from demisto_sdk.commands.content_graph.objects.integration import Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + ValidationResult, +) + +ContentTypes = Integration + + +class IsValidCategoryValidator(BaseValidator[ContentTypes]): + error_code = "IN104" + description = "Validate that the Integrations category is valid." + error_message = "The Integration's category ({0}) doesn't match the standard,\nplease make sure that the field is a category from the following options: {1}." + related_field = "category" + is_auto_fixable = False + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + approved_list = get_current_categories() + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + content_item.category or "empty category section", + ", ".join(approved_list), + ), + content_object=content_item, + ) + for content_item in content_items + if content_item.category not in approved_list + ] diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN109_is_id_contain_beta.py b/demisto_sdk/commands/validate/validators/IN_validators/IN109_is_id_contain_beta.py new file mode 100644 index 00000000000..672e79c1c16 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN109_is_id_contain_beta.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import re +from typing import Iterable, List + +from demisto_sdk.commands.content_graph.objects.integration import Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class IsIdContainBetaValidator(BaseValidator[ContentTypes]): + error_code = "IN109" + description = "Validate that the ID field doesn't include the substring 'beta'." + error_message = ( + "The ID field ({0}) contains the word 'beta', make sure to remove it." + ) + fix_message = "Removed the word 'beta' from the ID, the new ID is: {0}." + related_field = "commonfields.id" + is_auto_fixable = True + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(content_item.object_id), + content_object=content_item, + ) + for content_item in content_items + if content_item.is_beta and "beta" in content_item.object_id.lower() + ] + + def fix(self, content_item: ContentTypes) -> FixResult: + content_item.object_id = re.sub( + "[ \t]+", " ", content_item.object_id.replace("beta", "") + ).strip() + return FixResult( + validator=self, + message=self.fix_message.format(content_item.object_id), + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN110_is_name_contain_beta.py b/demisto_sdk/commands/validate/validators/IN_validators/IN110_is_name_contain_beta.py new file mode 100644 index 00000000000..c7866a70778 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN110_is_name_contain_beta.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import re +from typing import Iterable, List + +from demisto_sdk.commands.content_graph.objects.integration import Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class IsNameContainBetaValidator(BaseValidator[ContentTypes]): + error_code = "IN109" + description = "Validate that the name field doesn't include the substring 'beta'." + error_message = ( + "The name field ({0}) contains the word 'beta', make sure to remove it." + ) + fix_message = "Removed the word 'beta' from the name field, the new name is: {0}." + related_field = "name" + is_auto_fixable = True + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(content_item.name), + content_object=content_item, + ) + for content_item in content_items + if content_item.is_beta and "beta" in content_item.name.lower() + ] + + def fix(self, content_item: ContentTypes) -> FixResult: + content_item.name = re.sub( + "[ \t]+", " ", content_item.name.replace("beta", "") + ).strip() + return FixResult( + validator=self, + message=self.fix_message.format(content_item.name), + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN112_is_display_contain_beta.py b/demisto_sdk/commands/validate/validators/IN_validators/IN112_is_display_contain_beta.py new file mode 100644 index 00000000000..0b1fe52b8de --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN112_is_display_contain_beta.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.content_graph.objects.integration import Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + ValidationResult, +) + +ContentTypes = Integration + + +class IsDisplayContainBetaValidator(BaseValidator[ContentTypes]): + error_code = "IN112" + description = "Validate that the display name contain the substring 'beta'." + error_message = ( + "The display name ({0}) doesn't contain the word 'beta', make sure to add it." + ) + related_field = "display" + is_auto_fixable = False + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(content_item.display_name), + content_object=content_item, + ) + for content_item in content_items + if content_item.is_beta and "beta" not in content_item.display_name.lower() + ] diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN113_is_command_args_contain_duplications.py b/demisto_sdk/commands/validate/validators/IN_validators/IN113_is_command_args_contain_duplications.py new file mode 100644 index 00000000000..95a1701716a --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN113_is_command_args_contain_duplications.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from typing import Dict, Iterable, List + +from demisto_sdk.commands.content_graph.objects.integration import Command, Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + ValidationResult, +) + +ContentTypes = Integration + + +class IsCommandArgsContainDuplicationsValidator(BaseValidator[ContentTypes]): + error_code = "IN113" + description = "Validate that there're no duplicated params for the integration." + error_message = "The following commands contain duplicated arguments:\n{0}\nPlease make sure to remove the duplications." + related_field = "script.commands" + is_auto_fixable = False + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + ".\n".join( + [ + f"Command {key}, contains multiple appearances of the following arguments {', '.join(val)}." + for key, val in duplicated_args_by_command.items() + ] + ) + ), + content_object=content_item, + ) + for content_item in content_items + if ( + duplicated_args_by_command := self.is_containing_dups( + content_item.commands + ) + ) + ] + + def is_containing_dups(self, commands: List[Command]) -> Dict[str, set]: + duplicated_args_by_command: Dict[str, set] = {} + for command in commands: + appeared_set = set() + duplicated_args = set() + for command in commands: + for arg in command.args: + if arg.name in appeared_set: + duplicated_args.add(arg.name) + else: + appeared_set.add(arg.name) + if duplicated_args: + duplicated_args_by_command[command.name] = duplicated_args + return duplicated_args_by_command diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN114_is_params_contain_duplications.py b/demisto_sdk/commands/validate/validators/IN_validators/IN114_is_params_contain_duplications.py new file mode 100644 index 00000000000..b0850fac0f1 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN114_is_params_contain_duplications.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +from typing import Iterable, List, Set + +from demisto_sdk.commands.content_graph.objects.integration import ( + Integration, + Parameter, +) +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + ValidationResult, +) + +ContentTypes = Integration + + +class IsParamsContainDuplicationsValidator(BaseValidator[ContentTypes]): + error_code = "IN114" + description = "Validate that there're no duplicated params for the integration." + error_message = "The following params are duplicated: {0}.\nPlease make sure your file doesn't contain duplications." + related_field = "configuration" + is_auto_fixable = False + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(", ".join(duplicated_param_names)), + content_object=content_item, + ) + for content_item in content_items + if (duplicated_param_names := self.is_containing_dups(content_item.params)) + ] + + def is_containing_dups(self, params: List[Parameter]) -> Set[str]: + appeared_set = set() + dups_set: Set[str] = set() + for param in params: + if param.name in appeared_set: + dups_set.add(param.name) + else: + appeared_set.add(param.name) + return dups_set diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN115_is_valid_context_path.py b/demisto_sdk/commands/validate/validators/IN_validators/IN115_is_valid_context_path.py new file mode 100644 index 00000000000..f6cb2597fcb --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN115_is_valid_context_path.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.content_graph.objects.integration import Command, Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + ValidationResult, +) + +ContentTypes = Integration + + +class IsValidContextPathValidator(BaseValidator[ContentTypes]): + error_code = "IN115" + description = ( + "Validate that the contextPath field of each output is in the right format." + ) + error_message = "The following commands include outputs with context path different from missing contextPath, please make sure to add: {0}." + related_field = "contextPath" + is_auto_fixable = False + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + ", ".join(commands_missing_context_path) + ), + content_object=content_item, + ) + for content_item in content_items + if ( + commands_missing_context_path := [ + command.name + for command in content_item.commands + if self.is_command_missing_context_path(command) + ] + ) + ] + + def is_command_missing_context_path(self, command: Command) -> bool: + """Validate that all outputs entry has contextPath key for a given command. + + Args: + command (Command): The command to run on + + Returns: + (bool): True if the an output entry is missing a contextPath. Otherwise, return False. + """ + for output in command.outputs: + if not output.contextPath: + return True + return False diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN117_should_have_display_field.py b/demisto_sdk/commands/validate/validators/IN_validators/IN117_should_have_display_field.py new file mode 100644 index 00000000000..6d4c72cfdfd --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN117_should_have_display_field.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +from typing import ClassVar, Iterable, List + +from demisto_sdk.commands.common.constants import ParameterType +from demisto_sdk.commands.content_graph.objects.integration import ( + Integration, + Parameter, +) +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class ShouldHaveDisplayFieldValidator(BaseValidator[ContentTypes]): + error_code = "IN117" + description = ( + "Validate that type 17 configuration params doesn't include the display field." + ) + error_message = "The following params are expiration fields and therefore can't have a 'display' field. Make sure to remove the field for the following: {0}." + fix_message = "Removed display field for the following params: {0}." + related_field = "display, type" + is_auto_fixable = True + invalid_params: ClassVar[dict] = {} + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(", ".join(invalid_params)), + content_object=content_item, + ) + for content_item in content_items + if ( + invalid_params := self.get_invalid_params( + content_item.params, content_item.name + ) + ) + ] + + def get_invalid_params( + self, params: List[Parameter], integration_name: str + ) -> List[str]: + """Validate that all the params are not of type 17 and include a display field. + + Args: + params (List[dict]): The integration params. + integration_name (str): The name of the integration. + + Returns: + List[str]: The list of the names of the params that are not valid. + """ + self.invalid_params[integration_name] = [ + param.name + for param in params + if param.type == ParameterType.EXPIRATION_FIELD.value and param.display + ] + return self.invalid_params.get(integration_name, []) + + def fix(self, content_item: ContentTypes) -> FixResult: + invalid_params = self.invalid_params[content_item.name] + for param in content_item.params: + if param.name in invalid_params: + param.display = None + return FixResult( + validator=self, + message=self.fix_message.format(", ".join(invalid_params)), + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN118_is_missing_display_field.py b/demisto_sdk/commands/validate/validators/IN_validators/IN118_is_missing_display_field.py new file mode 100644 index 00000000000..018c9d11e40 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN118_is_missing_display_field.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.common.constants import ParameterType +from demisto_sdk.commands.content_graph.objects.integration import ( + Integration, + Parameter, +) +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + ValidationResult, +) + +ContentTypes = Integration + + +class IsMissingDisplayFieldValidator(BaseValidator[ContentTypes]): + error_code = "IN118" + description = "Validate that the integration parameter has a display field if it's not of type 17." + error_message = "The following params doesn't have a display field, please make sure to add one: {0}." + related_field = "display, displaypassowrd" + is_auto_fixable = False + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(", ".join(invalid_params)), + content_object=content_item, + ) + for content_item in content_items + if (invalid_params := self.get_invalid_params(content_item.params)) + ] + + def get_invalid_params(self, params: List[Parameter]) -> List[str]: + """Validate that all the relevant params have a display name. + + Args: + params (List[dict]): The integration params. + + Returns: + List[str]: The list of the names of the params that are not valid. + """ + return [ + param.name + for param in params + if param.type != ParameterType.EXPIRATION_FIELD.value + and not param.hidden + and not param.display + and not param.displaypassword + and param.name not in ("feedExpirationPolicy", "feedExpirationInterval") + ] diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN125_is_valid_max_fetch_param.py b/demisto_sdk/commands/validate/validators/IN_validators/IN125_is_valid_max_fetch_param.py new file mode 100644 index 00000000000..3984018a9b1 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN125_is_valid_max_fetch_param.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.common.constants import DEFAULT_MAX_FETCH, MAX_FETCH +from demisto_sdk.commands.content_graph.objects.integration import Integration +from demisto_sdk.commands.validate.tools import find_param +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class IsValidMaxFetchParamValidator(BaseValidator[ContentTypes]): + error_code = "IN125" + description = "Validate that the max_fetch param has a defaultvalue" + error_message = "The integration is a fetch integration with max_fetch param, please make sure the max_fetch param has a default value." + fix_message = ( + f"Added a 'defaultvalue = {DEFAULT_MAX_FETCH}' to the max_fetch param." + ) + related_field = "defaultvalue" + is_auto_fixable = True + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(), + content_object=content_item, + ) + for content_item in content_items + if content_item.is_fetch + and (max_fetch_param := find_param(content_item.params, MAX_FETCH)) + and not max_fetch_param.defaultvalue + ] + + def fix(self, content_item: ContentTypes) -> FixResult: + max_fetch_param = find_param(content_item.params, MAX_FETCH) + if max_fetch_param: + max_fetch_param.defaultvalue = DEFAULT_MAX_FETCH + return FixResult( + validator=self, + message=self.fix_message.format(), + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN126_is_valid_fetch_integration.py b/demisto_sdk/commands/validate/validators/IN_validators/IN126_is_valid_fetch_integration.py new file mode 100644 index 00000000000..d2face2a712 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN126_is_valid_fetch_integration.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from typing import ClassVar, Iterable, List + +from demisto_sdk.commands.common.constants import ( + FIRST_FETCH, + FIRST_FETCH_PARAM, + MAX_FETCH, + MAX_FETCH_PARAM, +) +from demisto_sdk.commands.content_graph.objects.integration import ( + Integration, + Parameter, +) +from demisto_sdk.commands.validate.tools import find_param +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class IsValidFetchIntegrationValidator(BaseValidator[ContentTypes]): + error_code = "IN126" + description = "Validate that a fetch integration is not missing the first_fetch & max_fetch params." + error_message = ( + "The integration is a fetch integration and missing the following params: {0}." + ) + fix_message = "Added the following params to the integration: {0}." + related_field = "configurations." + is_auto_fixable = True + missing_fetch_params: ClassVar[dict] = {} + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + ", ".join(list(missing_params.keys())) + ), + content_object=content_item, + ) + for content_item in content_items + if content_item.is_fetch + and ( + missing_params := self.is_valid_fetch_integration( + content_item.name, content_item.params + ) + ) + ] + + def is_valid_fetch_integration( + self, integration_name: str, params: List[Parameter] + ) -> dict: + """_summary_ + + Args: + integration_name (str): The name of the current integration to validate. + params (List[dict]): The list of the integration params. + + Returns: + dict: The missing param by param_name: param_entity. + """ + self.missing_fetch_params[integration_name] = { + key: val + for key, val in { + MAX_FETCH: MAX_FETCH_PARAM, + FIRST_FETCH: FIRST_FETCH_PARAM, + }.items() + if not find_param(params, key) + } + return self.missing_fetch_params.get(integration_name, {}) + + def fix(self, content_item: ContentTypes) -> FixResult: + for missing_param in self.missing_fetch_params[content_item.name].values(): + content_item.params.append(missing_param) + return FixResult( + validator=self, + message=self.fix_message.format( + ", ".join(list(self.missing_fetch_params[content_item.name].keys())) + ), + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN131_is_valid_as_mappable_integration.py b/demisto_sdk/commands/validate/validators/IN_validators/IN131_is_valid_as_mappable_integration.py new file mode 100644 index 00000000000..06cdc75bc18 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN131_is_valid_as_mappable_integration.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.common.constants import ( + GET_MAPPING_FIELDS_COMMAND, + GET_MAPPING_FIELDS_COMMAND_NAME, +) +from demisto_sdk.commands.content_graph.objects.integration import Command, Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Integration + + +class IsValidAsMappableIntegrationValidator(BaseValidator[ContentTypes]): + error_code = "IN131" + description = "Validate that the integration is valid as a mappable integration." + error_message = f"The integration is a mappable integration and is missing the {GET_MAPPING_FIELDS_COMMAND_NAME} command. Please add the command." + fix_message = ( + f"Added the {GET_MAPPING_FIELDS_COMMAND_NAME} command to the integration." + ) + related_field = "ismappable, commands" + is_auto_fixable = True + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(), + content_object=content_item, + ) + for content_item in content_items + if content_item.is_mappable + and not any( + [ + command.name == GET_MAPPING_FIELDS_COMMAND_NAME + for command in content_item.commands + ] + ) + ] + + def fix(self, content_item: ContentTypes) -> FixResult: + content_item.commands.append(Command(**GET_MAPPING_FIELDS_COMMAND)) + return FixResult( + validator=self, + message=self.fix_message, + content_object=content_item, + ) diff --git a/demisto_sdk/commands/validate/validators/IN_validators/IN134_is_containing_multiple_default_args.py b/demisto_sdk/commands/validate/validators/IN_validators/IN134_is_containing_multiple_default_args.py new file mode 100644 index 00000000000..241b77e2d5a --- /dev/null +++ b/demisto_sdk/commands/validate/validators/IN_validators/IN134_is_containing_multiple_default_args.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.content_graph.objects.integration import Integration +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + ValidationResult, +) + +ContentTypes = Integration + + +class IsContainingMultipleDefaultArgsValidator(BaseValidator[ContentTypes]): + error_code = "IN134" + description = ( + "Validate that there're no more than 1 default argument for a command." + ) + error_message = "The following commands have more than 1 default arg, please make sure they have at most one: {0}." + related_field = "default" + is_auto_fixable = False + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format(", ".join(multiple_args_command)), + content_object=content_item, + ) + for content_item in content_items + if ( + multiple_args_command := [ + command.name + for command in content_item.commands + if len([arg.name for arg in command.args if arg.default]) >= 2 + ] + ) + ] diff --git a/demisto_sdk/scripts/changelog/changelog.py b/demisto_sdk/scripts/changelog/changelog.py index 3006be70830..68144d75e16 100755 --- a/demisto_sdk/scripts/changelog/changelog.py +++ b/demisto_sdk/scripts/changelog/changelog.py @@ -237,13 +237,13 @@ def _validate_branch(pr_number: str) -> None: if is_changelog_modified(): raise ValueError( "Do not modify changelog.md\n" - "Run `demisto-sdk changelog --init -n `" + "Run `sdk-changelog --init -n `" " to create a changelog file instead." ) if not is_log_yml_exist(pr_number): raise ValueError( "Missing changelog file.\n" - "Run `demisto-sdk changelog --init -n ` and fill it." + "Run `sdk-changelog --init -n ` and fill it." ) validate_log_yml(pr_number) diff --git a/demisto_sdk/scripts/merge_pytest_reports.py b/demisto_sdk/scripts/merge_pytest_reports.py index 9252355c581..a98c17d2ad9 100644 --- a/demisto_sdk/scripts/merge_pytest_reports.py +++ b/demisto_sdk/scripts/merge_pytest_reports.py @@ -67,8 +67,11 @@ def merge_coverage_report(): coverage_path = CONTENT_PATH / ".coverage" coverage_path.unlink(missing_ok=True) cov = coverage.Coverage(data_file=coverage_path) - coverage_paths = CONTENT_PATH / ".pre-commit" / "coverage" - if not coverage_path.exists() or not (files := list(coverage_paths.iterdir())): + # this is the path where the pre-commit created the coverage files + created_coverage_path = CONTENT_PATH / ".pre-commit" / "coverage" + if not created_coverage_path.exists() or not ( + files := list(created_coverage_path.iterdir()) + ): logger.warning("No coverage files found, skipping coverage report.") return fixed_files = [str(file) for file in files if fix_coverage_report_path(Path(file))] diff --git a/demisto_sdk/tests/integration_tests/logger_integration_test.py b/demisto_sdk/tests/integration_tests/logger_integration_test.py index e29647164f8..38de7c01125 100644 --- a/demisto_sdk/tests/integration_tests/logger_integration_test.py +++ b/demisto_sdk/tests/integration_tests/logger_integration_test.py @@ -1,3 +1,4 @@ +import logging import shutil from pathlib import Path from tempfile import mkdtemp @@ -5,14 +6,15 @@ from click.testing import CliRunner from demisto_sdk.__main__ import main +from TestSuite.test_tools import str_in_call_args_list TEMP_DIRS_PREFIX = "demisto-sdk-test-" -def test_logs_dir_default_value(mocker): +def test_default_logs_dir(mocker): """ Given: - demisto-sdk command with no log path set. + demisto-sdk command with no custom log path set. When: Running demisto-sdk command. @@ -20,7 +22,7 @@ def test_logs_dir_default_value(mocker): Then: Ensure logs are created at the default location, and that the path is created if it doesn't exist. """ - root_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)) + root_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() logs_dir_replacement = root_dir / "logs" mocker.patch( "demisto_sdk.commands.common.logger.LOGS_DIR", new=logs_dir_replacement @@ -36,7 +38,7 @@ def test_logs_dir_default_value(mocker): shutil.rmtree(root_dir) -def test_logs_dir_set_by_flag(mocker): +def test_custom_logs_dir_set_by_flag(mocker): """ Given: demisto-sdk command with log path set by flag. @@ -47,8 +49,8 @@ def test_logs_dir_set_by_flag(mocker): Then: Ensure logs are created at the specified location. """ - default_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)) - custom_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)) + default_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() + custom_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() mocker.patch("demisto_sdk.commands.common.logger.LOGS_DIR", new=default_logs_dir) runner = CliRunner(mix_stderr=False) @@ -64,7 +66,7 @@ def test_logs_dir_set_by_flag(mocker): shutil.rmtree(custom_logs_dir) -def test_logs_dir_set_by_env_var(mocker, monkeypatch): +def test_custom_logs_dir_set_by_env_var(mocker, monkeypatch): """ Given: demisto-sdk command with log path set by an environment variable. @@ -75,8 +77,8 @@ def test_logs_dir_set_by_env_var(mocker, monkeypatch): Then: Ensure logs are created at the specified location. """ - default_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)) - custom_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)) + default_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() + custom_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() monkeypatch.setenv("DEMISTO_SDK_LOG_FILE_PATH", str(custom_logs_dir)) mocker.patch("demisto_sdk.commands.common.logger.LOGS_DIR", new=default_logs_dir) @@ -93,31 +95,77 @@ def test_logs_dir_set_by_env_var(mocker, monkeypatch): shutil.rmtree(custom_logs_dir) -def test_invalid_logs_dir(mocker): +def test_custom_logs_dir_does_not_exist(mocker): """ Given: - demisto-sdk command with invalid log path set. + demisto-sdk command with a non-existent log path set by flag. When: Running demisto-sdk command. Then: - Ensure a proper error message is printed. + Ensure that the path is created, logs are created at the specified location, and a warning is printed. """ - default_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)) + default_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() + custom_logs_dir = ( + Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() / "non-existent-dir" + ) + mocker.patch("demisto_sdk.commands.common.logger.LOGS_DIR", new=default_logs_dir) - invalid_path = "this/path/doesnt/exist" + logger_warning = mocker.patch.object(logging.getLogger("demisto-sdk"), "warning") runner = CliRunner(mix_stderr=False) - result = runner.invoke( - main, ["validate", "-a", "--log-file-path", str(invalid_path)] - ) + runner.invoke(main, ["validate", "-a", "--log-file-path", str(custom_logs_dir)]) - assert result.exit_code == 1 + assert str_in_call_args_list( + logger_warning.call_args_list, + f"Log file path '{custom_logs_dir}' does not exist and will be created.", + ) + assert list(custom_logs_dir.glob("*")) == [ + custom_logs_dir / "demisto_sdk_debug.log" + ] assert ( - f"Error: Configured logs path '{invalid_path}' does not exist." in result.stdout + len(list(default_logs_dir.glob("*"))) == 0 + ) # Assure default logs dir is not used + default_logs_dir.rmdir() + shutil.rmtree(custom_logs_dir) + + +def test_logs_dir_not_a_dir(mocker): + """ + Given: + demisto-sdk command with a log path set by flag that points to a file. + + When: + Running demisto-sdk command. + + Then: + Ensure that the path is created, logs are created at the parent directory, and a warning is printed. + """ + default_logs_dir = Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() + custom_logs_dir = ( + Path(mkdtemp(prefix=TEMP_DIRS_PREFIX)).resolve() / "some_random_file" ) + custom_logs_dir_parent = custom_logs_dir.parent + custom_logs_dir.touch() + + mocker.patch("demisto_sdk.commands.common.logger.LOGS_DIR", new=default_logs_dir) + logger_warning = mocker.patch.object(logging.getLogger("demisto-sdk"), "warning") + + runner = CliRunner(mix_stderr=False) + runner.invoke(main, ["validate", "-a", "--log-file-path", str(custom_logs_dir)]) + + assert str_in_call_args_list( + logger_warning.call_args_list, + f"Log file path '{custom_logs_dir}' is a file and not a directory. Log file will be created in " + f"parent directory '{custom_logs_dir_parent}'.", + ) + assert list(custom_logs_dir_parent.glob("*")) == [ + custom_logs_dir, # The random file we created + custom_logs_dir_parent / "demisto_sdk_debug.log", + ] assert ( len(list(default_logs_dir.glob("*"))) == 0 ) # Assure default logs dir is not used default_logs_dir.rmdir() + shutil.rmtree(custom_logs_dir_parent)