diff --git a/.changelog/4139.yml b/.changelog/4139.yml new file mode 100644 index 0000000000..9e9432d6a4 --- /dev/null +++ b/.changelog/4139.yml @@ -0,0 +1,4 @@ +changes: +- description: Added the `defaultDataSource` field to the pack metadata. + type: feature +pr_number: 4139 diff --git a/demisto_sdk/commands/common/content/tests/objects/pack_objects/pack_metadata/pack_metadata_test.py b/demisto_sdk/commands/common/content/tests/objects/pack_objects/pack_metadata/pack_metadata_test.py index 5fdeaafabf..64deea73fc 100644 --- a/demisto_sdk/commands/common/content/tests/objects/pack_objects/pack_metadata/pack_metadata_test.py +++ b/demisto_sdk/commands/common/content/tests/objects/pack_objects/pack_metadata/pack_metadata_test.py @@ -19,8 +19,13 @@ from demisto_sdk.commands.common.content.objects.pack_objects import PackMetaData from demisto_sdk.commands.common.content.objects.pack_objects.pack import Pack from demisto_sdk.commands.common.content.objects_factory import path_to_pack_object +from demisto_sdk.commands.common.docker.docker_image import DockerImage from demisto_sdk.commands.common.logger import logger from demisto_sdk.commands.common.tools import src_root +from demisto_sdk.commands.content_graph.common import ( + ContentType, +) +from demisto_sdk.commands.content_graph.objects.integration import Command, Integration from demisto_sdk.commands.content_graph.objects.pack_content_items import ( PackContentItems, ) @@ -530,3 +535,286 @@ def test_replace_item_if_has_higher_toversion( integration, content_item_metadata, integration.summary(), marketplace ) assert content_item_metadata["toversion"] == expected_toversion + + +def mock_integration_for_data_source( + name, + display_name, + is_fetch=False, + is_fetch_events=False, + is_remote_sync_in=False, + is_fetch_samples=False, + is_feed=False, + deprecated=False, + marketplaces=MarketplaceVersions.MarketplaceV2, + path=Path("Packs"), +): + if not isinstance(marketplaces, list): + marketplaces = [marketplaces] + return Integration( + id=name, + content_type=ContentType.INTEGRATION, + node_id=f"{ContentType.INTEGRATION}:{name}", + path=path, + fromversion="5.0.0", + toversion="99.99.99", + display_name=display_name, + name=name, + marketplaces=marketplaces, + deprecated=deprecated, + type="python3", + docker_image=DockerImage("demisto/python3:3.10.11.54799"), + category="blabla", + commands=[Command(name="test-command", description="")], + is_fetch=is_fetch, + is_fetch_events=is_fetch_events, + is_remote_sync_in=is_remote_sync_in, + is_fetch_samples=is_fetch_samples, + is_feed=is_feed, + ) + + +@pytest.mark.parametrize( + "support_level, include_name, expected_output", + ( + ( + None, + False, + [ + "Partner Contribution Same Name", + "partner_contribution_different_name", + "regular_integration_different_name", + "Samples Fetch", + "Some Mirroring", + ], + ), + ( + "partner", + False, + [ + "Partner Contribution Same Name", + "partner_contribution_different_name", + "regular_integration_different_name", + "Samples Fetch", + "Some Mirroring", + ], + ), + ( + None, + True, + [ + { + "id": "Partner Contribution Same Name", + "name": "Partner Contribution Same Name (Partner Contribution)", + }, + { + "id": "partner_contribution_different_name", + "name": "Partner Contribution Different Name (Partner Contribution)", + }, + { + "id": "regular_integration_different_name", + "name": "Regular Integration Different Name", + }, + {"id": "Samples Fetch", "name": "Samples Fetch"}, + {"id": "Some Mirroring", "name": "Some Mirroring"}, + ], + ), + ( + "partner", + True, + [ + { + "id": "Partner Contribution Same Name", + "name": "Partner Contribution Same Name", + }, + { + "id": "partner_contribution_different_name", + "name": "Partner Contribution Different Name", + }, + { + "id": "regular_integration_different_name", + "name": "Regular Integration Different Name", + }, + {"id": "Samples Fetch", "name": "Samples Fetch"}, + {"id": "Some Mirroring", "name": "Some Mirroring"}, + ], + ), + ), +) +def test_get_valid_data_source_integrations( + support_level, include_name, expected_output +): + """ + Given: + - Support level and whether to include the name + + When: + - Getting valid data source integrations for a pack + + Then: + - The correct data source integration return, with the expected name + """ + integrations = [ + mock_integration_for_data_source( + "Partner Contribution Same Name", + "Partner Contribution Same Name (Partner Contribution)", + is_fetch=True, + ), + mock_integration_for_data_source( + "partner_contribution_different_name", + "Partner Contribution Different Name (Partner Contribution)", + is_fetch_events=True, + ), + mock_integration_for_data_source( + "regular_integration_different_name", + "Regular Integration Different Name", + is_fetch=True, + ), + mock_integration_for_data_source( + "Not Fetching Integration", "Not Fetching Integration", is_fetch=False + ), + mock_integration_for_data_source( + "Deprecated Integration", + "Deprecated Integration", + is_fetch=True, + deprecated=True, + ), + mock_integration_for_data_source( + "Not XSIAM Integration", + "Not XSIAM Integration", + is_fetch=True, + marketplaces=MarketplaceVersions.XSOAR_ON_PREM, + ), + mock_integration_for_data_source( + "Some Feed", "Some Feed", is_fetch=True, is_feed=True + ), + mock_integration_for_data_source( + "Samples Fetch", "Samples Fetch", is_fetch_samples=True + ), + mock_integration_for_data_source( + "Some Mirroring", "Some Mirroring", is_remote_sync_in=True + ), + ] + + content_items = PackContentItems() + content_items.integration.extend(integrations) + + result = PackMetadata.get_valid_data_source_integrations( + content_items, support_level, include_name + ) + assert result == expected_output + + +@pytest.mark.parametrize( + "support, given_default_data_source_id, integrations, expected_default_data_source", + ( + ( + "partner", + "partner_support", + [ + mock_integration_for_data_source( + "partner_support", + "Partner Support (Partner Contribution)", + is_fetch=True, + ), + mock_integration_for_data_source( + "other_partner_support", + "Other Partner Support (Partner Contribution)", + is_fetch=True, + ), + ], + {"id": "partner_support", "name": "Partner Support"}, + ), + ( + "xsoar", + "xsoar_support", + [ + mock_integration_for_data_source( + "xsoar_support", "XSOAR Support", is_fetch=True + ), + mock_integration_for_data_source( + "Other XSOAR Support", "Other XSOAR Support", is_fetch=True + ), + ], + {"id": "xsoar_support", "name": "XSOAR Support"}, + ), + ( + "xsoar", + None, + [ + mock_integration_for_data_source( + "One Fetching Integration", + "One Fetching Integration", + is_fetch=True, + ), + mock_integration_for_data_source( + "Not Fetching Integration", + "Not Fetching Integration", + is_fetch=False, + ), + mock_integration_for_data_source( + "Deprecated Integration", + "Deprecated Integration", + is_fetch=True, + deprecated=True, + ), + mock_integration_for_data_source( + "Feed Integration", "Feed Integration", is_fetch=True, is_feed=True + ), + ], + {"id": "One Fetching Integration", "name": "One Fetching Integration"}, + ), + ), +) +def test_set_default_data_source( + support, given_default_data_source_id, integrations, expected_default_data_source +): + """ + Given: + - Support level, default data source name and id, pack integrations names and ids + + When: + - Setting a default data source to a pack + + Then: + - The correct data source integration is set + """ + content_items, my_instance = mock_pack_metadata_for_data_source( + support=support, + default_data_source=given_default_data_source_id, + integrations=integrations, + ) + + my_instance._set_default_data_source(content_items) + assert my_instance.default_data_source_id == expected_default_data_source.get("id") + assert my_instance.default_data_source_name == expected_default_data_source.get( + "name" + ) + + +def mock_pack_metadata_for_data_source(support, default_data_source, integrations): + my_instance = PackMetadata( + name="test", + display_name="", + description="", + created="", + legacy=False, + support=support, + url="", + email="", + eulaLink="", + price=0, + hidden=False, + commit="", + downloads=0, + keywords=[], + searchRank=0, + excludedDependencies=[], + videos=[], + modules=[], + default_data_source_id=default_data_source, + ) # type: ignore + + content_items = PackContentItems() + content_items.integration.extend(integrations) + return content_items, my_instance diff --git a/demisto_sdk/commands/content_graph/objects/integration.py b/demisto_sdk/commands/content_graph/objects/integration.py index 9c09677966..3367ed2af3 100644 --- a/demisto_sdk/commands/content_graph/objects/integration.py +++ b/demisto_sdk/commands/content_graph/objects/integration.py @@ -102,8 +102,10 @@ class Integration(IntegrationScript, content_type=ContentType.INTEGRATION): # t is_fetch_events: bool = Field(False, alias="isfetchevents") is_fetch_assets: bool = Field(False, alias="isfetchassets") is_fetch_events_and_assets: bool = False + is_fetch_samples: bool = False is_feed: bool = False is_beta: bool = False + is_remote_sync_in: bool = False is_mappable: bool = False long_running: bool = False category: str @@ -218,3 +220,17 @@ def light_svg(self) -> LightSVGRelatedFile: @cached_property def image(self) -> ImageRelatedFile: return ImageRelatedFile(self.path, git_sha=self.git_sha) + + def is_data_source(self): + return ( + MarketplaceVersions.MarketplaceV2 in self.marketplaces + and not self.deprecated + and not self.is_feed + and ( + self.is_fetch + or self.is_fetch_events + or self.is_remote_sync_in + or self.is_fetch_events_and_assets + or self.is_fetch_samples + ) + ) diff --git a/demisto_sdk/commands/content_graph/objects/pack_metadata.py b/demisto_sdk/commands/content_graph/objects/pack_metadata.py index 43abe00871..e34ad79cdf 100644 --- a/demisto_sdk/commands/content_graph/objects/pack_metadata.py +++ b/demisto_sdk/commands/content_graph/objects/pack_metadata.py @@ -1,6 +1,6 @@ import os from datetime import datetime -from typing import Dict, List, Optional, Tuple +from typing import Dict, List, Optional, Tuple, Union from packaging.version import Version, parse from pydantic import BaseModel, Field @@ -19,6 +19,9 @@ from demisto_sdk.commands.content_graph.objects.content_item import ContentItem from demisto_sdk.commands.content_graph.objects.pack import PackContentItems from demisto_sdk.commands.content_graph.objects.relationship import RelationshipData +from demisto_sdk.commands.prepare_content.integration_script_unifier import ( + IntegrationScriptUnifier, +) MINIMAL_UPLOAD_SUPPORTED_VERSION = Version("6.5.0") MINIMAL_ALLOWED_SKIP_VALIDATION_VERSION = Version("6.6.0") @@ -57,6 +60,8 @@ class PackMetadata(BaseModel): modules: List[str] = Field([]) integrations: List[str] = Field([]) hybrid: bool = Field(False, alias="hybrid") + default_data_source_id: Optional[str] = Field("", alias="defaultDataSource") + default_data_source_name: Optional[str] = Field("", exclude=True) # For private packs premium: Optional[bool] @@ -84,6 +89,7 @@ def _enhance_pack_properties( pack_id (str): The pack ID. content_items (PackContentItems): The pack content items object. """ + self._set_default_data_source(content_items) self.tags = self._get_pack_tags(marketplace, pack_id, content_items) self.author = self._get_author(self.author, marketplace) # We want to add the pipeline_id only if this is called within our repo. @@ -103,6 +109,7 @@ def _format_metadata( - Adding the content items display names. - Gathering the pack dependencies and adding the metadata. - Unifying the `url` and `email` into the `support_details` property. + - Adding the default data source if exists. Args: marketplace (MarketplaceVersions): The marketplace to which the pack should belong to. @@ -119,17 +126,48 @@ def _format_metadata( content_displays, ) = self._get_content_items_and_displays_metadata(marketplace, content_items) + default_data_source_value = ( + { + "name": self.default_data_source_name, + "id": self.default_data_source_id, + } + if self.default_data_source_name + and self.default_data_source_id + and marketplace == MarketplaceVersions.MarketplaceV2 + else None # if the pack is multiple marketplace, override the initially set str default_data_source_id + ) + _metadata.update( { "contentItems": collected_content_items, "contentDisplays": content_displays, "dependencies": self._enhance_dependencies(marketplace, dependencies), "supportDetails": self._get_support_details(), + "defaultDataSource": default_data_source_value, } ) return _metadata + @staticmethod + def _place_data_source_integration_first( + integration_list: List[Dict], + data_source_id: str, + ): + integration_metadata_object = [ + integration + for integration in integration_list + if integration.get("id") == data_source_id + ] + + if not integration_metadata_object: + logger.error( + f"Integration metadata object was not found for {data_source_id=} in {integration_list=}." + ) + logger.info(f"Placing {data_source_id=} first in the integration_list.") + integration_list.remove(integration_metadata_object[0]) + integration_list.insert(0, integration_metadata_object[0]) + def _get_content_items_and_displays_metadata( self, marketplace: MarketplaceVersions, content_items: PackContentItems ) -> Tuple[Dict, Dict]: @@ -174,7 +212,17 @@ def _get_content_items_and_displays_metadata( else f"{content_type_display}s" for content_type, content_type_display in content_displays.items() } - + if ( + self.default_data_source_id + and self.default_data_source_name + and collected_content_items + and marketplace == MarketplaceVersions.MarketplaceV2 + ): + # order collected_content_items integration list so that the defaultDataSource will be first + self._place_data_source_integration_first( + collected_content_items[ContentType.INTEGRATION.metadata_name], + self.default_data_source_id, + ) return collected_content_items, content_displays def _enhance_dependencies( @@ -282,7 +330,7 @@ def _get_pack_tags( ) tags |= ( {PackTags.DATA_SOURCE} - if self._is_data_source(content_items) + if self.is_data_source(content_items) and marketplace == MarketplaceVersions.MarketplaceV2 else set() ) @@ -316,19 +364,76 @@ def _get_tags_by_marketplace(self, marketplace: str): return tags - def _is_data_source(self, content_items: PackContentItems) -> bool: - """Returns a boolean result on whether the pack should considered as a "Data Source" pack.""" - return ( - len( - [ - MarketplaceVersions.MarketplaceV2 in integration.marketplaces - and (integration.is_fetch or integration.is_fetch_events) - for integration in content_items.integration - ] + def is_data_source(self, content_items: PackContentItems) -> bool: + """Returns a boolean result on whether the pack should be considered as a "Data Source" pack.""" + if self.default_data_source_id and self.default_data_source_name: + return True + return any(self.get_valid_data_source_integrations(content_items)) + + def _set_default_data_source(self, content_items: PackContentItems) -> None: + """If there is more than one data source in the pack, return the default data source.""" + data_sources: List[Dict[str, str]] = self.get_valid_data_source_integrations( # type: ignore[assignment] + content_items, self.support, include_name=True + ) + + if self.default_data_source_id and self.default_data_source_id in [ + data_source.get("id") for data_source in data_sources + ]: + # the provided default_data_source_id is of a valid integration, keep it + self.default_data_source_name = [ + data_source.get("name") + for data_source in data_sources + if data_source.get("id") == self.default_data_source_id + ][0] + logger.info( + f"Keeping the provided {self.default_data_source_id=} with {self.default_data_source_name=}" + ) + return + + if not data_sources: + return + + logger.info( + f"No defaultDataSource provided ({self.default_data_source_id}) or it is not a valid data source," + f" choosing default from {data_sources=}" + ) + if len(data_sources) > 1: + # should not happen because of validation PA131 + logger.info( + f"{self.name} has multiple data sources. Setting a default value." ) - == 1 + + # setting a value to the defaultDataSource in case there is a data source + self.default_data_source_name = ( + data_sources[0].get("name") if data_sources else None + ) + self.default_data_source_id = ( + data_sources[0].get("id") if data_sources else None ) + @staticmethod + def get_valid_data_source_integrations( + content_items: PackContentItems, + support_level: str = None, + include_name: bool = False, + ) -> List[Union[Dict[str, str], str]]: + """ + Find fetching integrations in XSIAM, not deprecated. + When a support level is provided, the returned display names are without the contribution suffix. + """ + return [ + { + "name": IntegrationScriptUnifier.remove_support_from_display_name( + integration.display_name, support_level + ), + "id": integration.object_id, # same as integration.name + } + if include_name + else integration.object_id + for integration in content_items.integration + if integration.is_data_source() + ] + def _get_tags_from_landing_page(self, pack_id: str) -> set: """ Build the pack's tag list according to the landingPage sections file. @@ -504,7 +609,7 @@ def _search_content_item_metadata_object( item_type_key: Optional[str], ) -> Optional[dict]: """ - Search an content item object in the content items metadata list by its ID and name. + Search a content item object in the content items metadata list by its ID and name. Args: collected_content_items (dict): The content items metadata list that were already collected. diff --git a/demisto_sdk/commands/content_graph/parsers/integration.py b/demisto_sdk/commands/content_graph/parsers/integration.py index 54bf9ac9aa..2bc9ce3345 100644 --- a/demisto_sdk/commands/content_graph/parsers/integration.py +++ b/demisto_sdk/commands/content_graph/parsers/integration.py @@ -40,6 +40,8 @@ def __init__( "isfetcheventsandassets", False ) self.is_mappable = self.script_info.get("ismappable", False) + self.is_remote_sync_in = self.script_info.get("isremotesyncin", False) + self.is_fetch_samples = self.script_info.get("isFetchSamples", False) self.is_feed = self.script_info.get("feed", False) self.is_beta = self.script_info.get("beta", False) self.long_running = self.script_info.get("longRunning", False) diff --git a/demisto_sdk/commands/content_graph/parsers/pack.py b/demisto_sdk/commands/content_graph/parsers/pack.py index c7e9863648..19a77771a3 100644 --- a/demisto_sdk/commands/content_graph/parsers/pack.py +++ b/demisto_sdk/commands/content_graph/parsers/pack.py @@ -149,6 +149,7 @@ def __init__(self, path: Path, metadata: Dict[str, Any]) -> None: self.commit = "" self.downloads: int = 0 self.tags: List[str] = metadata.get("tags") or [] + self.default_data_source_id: str = metadata.get("defaultDataSource") or "" self.keywords: List[str] = metadata.get("keywords", []) self.search_rank: int = 0 self.videos: List[str] = metadata.get("videos", []) @@ -371,4 +372,5 @@ def field_mapping(self): "modules": "modules", "disable_monthly": "disableMonthly", "content_commit_hash": "contentCommitHash", + "default_data_source_id": "defaultDataSource", } diff --git a/demisto_sdk/commands/generate_docs/generate_integration_doc.py b/demisto_sdk/commands/generate_docs/generate_integration_doc.py index 8942c208f6..458f634934 100644 --- a/demisto_sdk/commands/generate_docs/generate_integration_doc.py +++ b/demisto_sdk/commands/generate_docs/generate_integration_doc.py @@ -24,6 +24,7 @@ from demisto_sdk.commands.common.logger import logger from demisto_sdk.commands.common.tools import ( get_content_path, + get_pack_metadata, get_yaml, ) from demisto_sdk.commands.generate_docs.common import ( @@ -551,6 +552,18 @@ def generate_integration_doc( "", ] ) + # Checks if this integration is the default data source + pack_metadata = get_pack_metadata(input_path) + default_data_source_id = pack_metadata.get("defaultDataSource") + if yml_data.get("commonfields", {}).get("id") == default_data_source_id: + docs.extend( + [ + "<~XSIAM>", + "This is the default integration for this content pack when configured by the Data Onboarder.", + "", + "", + ] + ) # Checks if the integration is a new version integration_version = re.findall("[vV][2-9]$", yml_data.get("display", "")) if integration_version and not skip_breaking_changes: diff --git a/demisto_sdk/commands/prepare_content/integration_script_unifier.py b/demisto_sdk/commands/prepare_content/integration_script_unifier.py index 51320c89a4..2cef5514e5 100644 --- a/demisto_sdk/commands/prepare_content/integration_script_unifier.py +++ b/demisto_sdk/commands/prepare_content/integration_script_unifier.py @@ -5,7 +5,7 @@ import re import tempfile from pathlib import Path -from typing import Dict, List, Union +from typing import Dict, List, Optional, Union from inflection import dasherize, underscore from ruamel.yaml.scalarstring import ( # noqa: TID251 - only importing FoldedScalarString is OK @@ -607,13 +607,9 @@ def add_contributors_support( if support_level_header := unified_yml.get(SUPPORT_LEVEL_HEADER): contributor_type = support_level_header - if ( - " Contribution)" not in unified_yml["display"] - and contributor_type != "xsoar" - ): - unified_yml["display"] += CONTRIBUTOR_DISPLAY_NAME.format( - contributor_type.capitalize() - ) + unified_yml["display"] = IntegrationScriptUnifier.get_display_name( + unified_yml["display"], contributor_type + ) existing_detailed_description = unified_yml.get("detaileddescription", "") if contributor_type == COMMUNITY_SUPPORT: @@ -647,6 +643,34 @@ def add_contributors_support( return unified_yml + @staticmethod + def get_display_name(display_name: str, contributor_type: str): + if ( + display_name + and contributor_type + and " Contribution)" not in display_name + and contributor_type != "xsoar" + ): + display_name += CONTRIBUTOR_DISPLAY_NAME.format( + contributor_type.capitalize() + ) + return display_name + + @staticmethod + def remove_support_from_display_name( + display_name: str, contributor_type: Optional[str] + ): + if ( + display_name + and contributor_type + and " Contribution)" in display_name + and contributor_type != "xsoar" + ): + suffix = CONTRIBUTOR_DISPLAY_NAME.format(contributor_type.capitalize()) + if display_name.endswith(suffix): + display_name = display_name[: -len(suffix)] + return display_name + @staticmethod def get_integration_doc_link(package_path: Path, unified_yml: Dict) -> str: """Generates the integration link to the integration documentation diff --git a/demisto_sdk/commands/prepare_content/tests/yml_unifier_test.py b/demisto_sdk/commands/prepare_content/tests/yml_unifier_test.py index 08d549c389..4d7bcf5e66 100644 --- a/demisto_sdk/commands/prepare_content/tests/yml_unifier_test.py +++ b/demisto_sdk/commands/prepare_content/tests/yml_unifier_test.py @@ -277,6 +277,70 @@ def test_insert_description_to_yml_with_no_detailed_desc(tmp_path): ) +@pytest.mark.parametrize( + "display_name, support_level, expected_name", + ( + ("Cymulate v2", "partner", "Cymulate v2 (Partner Contribution)"), + ( + "Cymulate v2 (Partner Contribution)", + "partner", + "Cymulate v2 (Partner Contribution)", + ), + ("Cymulate v2", "xsoar", "Cymulate v2"), + ("Cymulate v2", None, "Cymulate v2"), + ("Cymulate v2", "community", "Cymulate v2 (Community Contribution)"), + (None, "community", None), + ("", "community", ""), + ), +) +def test_get_display_name(display_name, support_level, expected_name): + """ + Given: + - Current display name and pack support level + + When: + - Enhancing the integration yml display name to also hold the support level + + Then: + - The returned display name holds the expected support level suffix + """ + assert ( + IntegrationScriptUnifier.get_display_name(display_name, support_level) + == expected_name + ) + + +@pytest.mark.parametrize( + "display_name, support_level, expected_name", + ( + ("Cymulate v2 (Partner Contribution)", "partner", "Cymulate v2"), + ("Cymulate v2 (Partner Contribution)", "partner", "Cymulate v2"), + ("Cymulate v2", "xsoar", "Cymulate v2"), + ("Cymulate v2", None, "Cymulate v2"), + ("Cymulate v2 (Community Contribution)", "community", "Cymulate v2"), + (None, "community", None), + ("", "community", ""), + ), +) +def test_remove_support_from_display_name(display_name, support_level, expected_name): + """ + Given: + - Current display name and pack support level + + When: + - Removing the support level from the integration display name to hold the + + Then: + - The returned display name doesn't hold the support level suffix + """ + assert ( + IntegrationScriptUnifier.remove_support_from_display_name( + display_name, support_level + ) + == expected_name + ) + + def test_get_integration_doc_link_positive(tmp_path): """ Given: diff --git a/demisto_sdk/commands/validate/default_config.toml b/demisto_sdk/commands/validate/default_config.toml index 02bf0ed2ed..253fe36cd1 100644 --- a/demisto_sdk/commands/validate/default_config.toml +++ b/demisto_sdk/commands/validate/default_config.toml @@ -33,7 +33,7 @@ warning = [] select = [ "BA100", "BA101", "BA105", "BA106", "BA110", "BA111", "BA116", "BA118", "BA124", "BA126", "PA100", "PA101", "PA102", "PA103", "PA104", "PA105", "PA107", "PA108", "PA109", "PA111", "PA113", "PA115", "PA117", "PA118", "PA119", "PA120", - "PA121", "PA123", "PA125", "PA127", "PA128", "PA130", + "PA121", "PA123", "PA125", "PA127", "PA128", "PA130", "PA131", "PA132", "BC100", "BC105", "BC108", "GR106", "IN100", "IN102", "IN104", "IN106", "IN107", "IN108", "IN109", "IN110", "IN112", "IN113", "IN114", "IN115", "IN117", "IN118", "IN121", "IN122", "IN123", "IN124", "IN125", "IN126", "IN127", "IN130", @@ -60,7 +60,7 @@ select = [ "BA100", "BA101", "BA105", "BA106", "BA110", "BA111", "BA116", "BA118", "BA126", "GR106", "PA100", "PA101", "PA102", "PA103", "PA104", "PA105", "PA107", "PA108", "PA109", "PA111", "PA113", "PA115", "PA117", "PA118", "PA119", "PA120", - "PA121", "PA123", "PA125", "PA127", "PA130", + "PA121", "PA123", "PA125", "PA127", "PA130", "PA131", "PA132", "DO100", "DO101", "DO102", "DO103", "DO104", "SC100", "SC105", "SC106", "RM104", "RM105", "RM113", "RM114", diff --git a/demisto_sdk/commands/validate/sdk_validation_config.toml b/demisto_sdk/commands/validate/sdk_validation_config.toml index b1a2941911..1cb58908f5 100644 --- a/demisto_sdk/commands/validate/sdk_validation_config.toml +++ b/demisto_sdk/commands/validate/sdk_validation_config.toml @@ -24,7 +24,7 @@ ignorable_errors = [ select = [ "BA100", "BA101", "BA105", "BA106", "BA108", "BA110", "BA111", "BA114", "BA116", "BA118", "BA119", "BA124", "BA126", "PA100", "PA101", "PA102", "PA103", "PA104", "PA105", "PA107", "PA108", "PA109", "PA111", "PA113", "PA115", "PA117", "PA118", "PA119", "PA120", - "PA121", "PA123", "PA125", "PA127", "PA128", "PA130", + "PA121", "PA123", "PA125", "PA127", "PA128", "PA130", "PA131", "PA132", "BC100", "BC101", "BC102", "BC103", "BC104", "BC105", "BC108", "BC110", "BC111", "IN100", "IN101", "IN102", "IN104", "IN106", "IN107", "IN108", "IN109", "IN110", "IN112", "IN113", "IN114", "IN115", "IN117", "IN118", "IN121", "IN122", "IN123", "IN124", "IN125", "IN126", "IN127", "IN130", "IN131", "IN134", "IN135", "IN139", "IN141", "IN142", "IN144", "IN145", "IN146", "IN149", "IN150", "IN151", "IN152", "IN153", "IN154", "IN156", "IN158", "IN159", "IN160", @@ -56,7 +56,7 @@ select = [ "BA100", "BA101", "BA105", "BA106", "BA110", "BA111", "BA113", "BA116", "BA118", "BA119", "BA126", "DS100", "PA100", "PA101", "PA102", "PA103", "PA104", "PA105", "PA107", "PA108", "PA109", "PA111", "PA113", "PA115", "PA117", "PA118", "PA119", "PA120", - "PA121", "PA123", "PA125", "PA127", "PA130", + "PA121", "PA123", "PA125", "PA127", "PA130", "PA131", "PA132", "DO100", "DO101", "DO102", "DO103", "DO104", "SC100", "SC105", "SC106", "SC109", "RM104", "RM105", "RM113", "RM114", diff --git a/demisto_sdk/commands/validate/tests/PA_validators_test.py b/demisto_sdk/commands/validate/tests/PA_validators_test.py index 55d0d9c261..16dccfb47d 100644 --- a/demisto_sdk/commands/validate/tests/PA_validators_test.py +++ b/demisto_sdk/commands/validate/tests/PA_validators_test.py @@ -13,6 +13,7 @@ PACK_METADATA_SUPPORT, PACK_METADATA_TAGS, PACK_METADATA_USE_CASES, + MarketplaceVersions, ) from demisto_sdk.commands.content_graph.parsers.related_files import RelatedFile from demisto_sdk.commands.validate.tests.test_tools import ( @@ -85,6 +86,12 @@ from demisto_sdk.commands.validate.validators.PA_validators.PA130_is_current_version_correct_format import ( IsCurrentVersionCorrectFormatValidator, ) +from demisto_sdk.commands.validate.validators.PA_validators.PA131_is_default_data_source_provided import ( + IsDefaultDataSourceProvidedValidator, +) +from demisto_sdk.commands.validate.validators.PA_validators.PA132_is_valid_default_datasource import ( + IsValidDefaultDataSourceNameValidator, +) @pytest.mark.parametrize( @@ -726,6 +733,208 @@ def test_IsValidDescriptionFieldValidator_is_valid( ) +@pytest.mark.parametrize( + "pack, integrations, expected_number_of_failures", + [ + ( + create_pack_object(), + [ + create_integration_object( + ["script.isfetch", "name"], ["true", "TestIntegration1"] + ), + create_integration_object(["script.isfetch"], ["true"]), + ], + 1, + ), + ( + create_pack_object(["defaultDataSource"], ["defaultDataSourceValue"]), + [ + create_integration_object( + ["script.isfetch", "name"], ["true", "defaultDataSourceValue"] + ), + create_integration_object(["script.isfetch"], ["true"]), + ], + 0, + ), + ( + create_pack_object(), + [create_integration_object(["script.isfetch"], ["true"])], + 0, + ), + ( + create_pack_object(["marketplaces"], [[MarketplaceVersions.XSOAR]]), + [ + create_integration_object( + ["script.isfetch", "name"], ["true", "TestIntegration1"] + ), + create_integration_object(["script.isfetch"], ["true"]), + ], + 0, + ), + ], +) +def test_IsDefaultDataSourceProvidedValidator_is_valid( + pack, integrations, expected_number_of_failures +): + """ + Given + content_items. + - Case 1: One XSIAM pack_metadata with 2 integrations and no defaultDataSource. + - Case 2: One XSIAM pack_metadata with 2 integrations and a defaultDataSource. + - Case 3: One XSIAM pack_metadata with one integration and no defaultDataSource. + - Case 4: One non XSIAM pack_metadata with 2 integrations. + + When + - Calling the IsDefaultDataSourceProvidedValidator is_valid function. + + Then + - Make sure the right amount of pack metadata failed, and that the right error message is returned. + - Case 1: Should fail. + - Case 2: Shouldn't fail. + - Case 3: Shouldn't fail. + - Case 4: Shouldn't fail. + """ + pack.content_items.integration.extend(integrations) + results = IsDefaultDataSourceProvidedValidator().is_valid([pack]) + assert len(results) == expected_number_of_failures + assert not results or all( + [ + ( + result.message + == "The pack metadata does not contain the 'defaultDataSource' field. " + "Please specify a defaultDataSource from the following options: ['TestIntegration', 'TestIntegration']." + ) + for result in results + ] + ) + + +def test_IsDefaultDataSourceProvidedValidator_fix(): + """ + Given + - A pack_metadata with no defaultDataSource, for a pack with one event collector + + When + - Calling the IsDefaultDataSourceProvidedValidator fix function. + + Then + - Make sure that the defaultDataSource is set to the event collector integration id + """ + content_item = create_pack_object() + integrations = [ + create_integration_object( + ["script.isfetchevents", "commonfields.id"], + ["true", "defaultDataSourceValue"], + ), + create_integration_object(["script.isfetch"], ["true"]), + ] + content_item.content_items.integration.extend(integrations) + assert not content_item.default_data_source_id + validator = IsDefaultDataSourceProvidedValidator() + assert validator.fix(content_item).message == ( + "Set the 'defaultDataSource' for 'HelloWorld' pack to the " + "'defaultDataSourceValue' integration, as it is an event collector." + ) + assert content_item.default_data_source_id == "defaultDataSourceValue" + + +@pytest.mark.parametrize( + "pack, integrations, expected_number_of_failures", + [ + ( + create_pack_object( + ["defaultDataSource"], ["InvalidDefaultDataSourceValue"] + ), + [ + create_integration_object( + ["script.isfetch", "commonfields.id"], ["true", "TestIntegration1"] + ), + create_integration_object(["script.isfetch"], ["true"]), + ], + 1, + ), + ( + create_pack_object(["defaultDataSource"], ["defaultDataSourceValue"]), + [ + create_integration_object( + ["script.isfetch", "commonfields.id"], + ["true", "defaultDataSourceValue"], + ), + create_integration_object(["script.isfetch"], ["true"]), + ], + 0, + ), + ( + create_pack_object(), + [create_integration_object(["script.isfetch"], ["true"])], + 0, + ), + ], +) +def test_IsValidDefaultDataSourceNameValidator_is_valid( + pack, integrations, expected_number_of_failures +): + """ + Given + - Case 1: One XSIAM pack_metadata with 2 integrations and a defaultDataSource that is not one of the pack integrations. + - Case 2: One XSIAM pack_metadata with 2 integrations and a defaultDataSource that is one of the pack integrations. + - Case 3: One XSIAM pack_metadata with one integration and no defaultDataSource. + + When + - Calling the IsValidDefaultDataSourceNameValidator is_valid function. + + Then + - Make sure the right amount of pack metadata failed, and that the right error message is returned. + - Case 1: Should fail. + - Case 2: Shouldn't fail. + - Case 3: Shouldn't fail. + """ + pack.content_items.integration.extend(integrations) + results = IsValidDefaultDataSourceNameValidator().is_valid([pack]) + assert len(results) == expected_number_of_failures + assert not results or all( + [ + ( + result.message + == "Pack metadata contains an invalid 'defaultDataSource': InvalidDefaultDataSourceValue. " + "Please fill in a valid datasource integration, one of these options: ['TestIntegration1', 'TestIntegration']." + ) + for result in results + ] + ) + + +def test_IsValidDefaultDataSourceNameValidator_fix(): + """ + Given + - A pack_metadata with a defaultDataSource value that holds the integration display name instead of integration id + + When + - Calling the IsValidDefaultDataSourceNameValidator fix function. + + Then + - Make sure that the defaultDataSource is set to the integration id + """ + content_item = create_pack_object( + ["defaultDataSource"], ["Default Data Source Value"] + ) + integrations = [ + create_integration_object( + ["script.isfetch", "commonfields.id", "display"], + ["true", "defaultDataSourceValue", "Default Data Source Value"], + ), + create_integration_object(["script.isfetch"], ["true"]), + ] + content_item.content_items.integration.extend(integrations) + assert content_item.default_data_source_id == "Default Data Source Value" + validator = IsValidDefaultDataSourceNameValidator() + assert validator.fix(content_item).message == ( + "Updated the 'defaultDataSource' for the 'HelloWorld' pack to use the 'defaultDataSourceValue' " + "integration ID instead of the display name that was previously used." + ) + assert content_item.default_data_source_id == "defaultDataSourceValue" + + @pytest.mark.parametrize( "content_items, expected_number_of_failures", [ diff --git a/demisto_sdk/commands/validate/validators/PA_validators/PA131_is_default_data_source_provided.py b/demisto_sdk/commands/validate/validators/PA_validators/PA131_is_default_data_source_provided.py new file mode 100644 index 0000000000..0e403fdd57 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/PA_validators/PA131_is_default_data_source_provided.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +from typing import Iterable, List + +from demisto_sdk.commands.common.constants import MarketplaceVersions +from demisto_sdk.commands.content_graph.objects.pack import Pack +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Pack + + +class IsDefaultDataSourceProvidedValidator(BaseValidator[ContentTypes]): + error_code = "PA131" + description = "Validate that the pack_metadata contains a default datasource, if there are more than one datasource." + rationale = "Wizards and other tools rely on the default datasource to be set." + error_message = ( + "The pack metadata does not contain the 'defaultDataSource' field. " + "Please specify a defaultDataSource from the following options: {0}." + ) + fix_message = "Set the 'defaultDataSource' for '{0}' pack to the '{1}' integration, as it is an event collector." + related_field = "defaultDataSource" + is_auto_fixable = True + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + content_item.get_valid_data_source_integrations( + content_item.content_items, content_item.support + ) + ), + content_object=content_item, + ) + for content_item in content_items + if MarketplaceVersions.MarketplaceV2 in content_item.marketplaces + and ( + content_item.is_data_source(content_item.content_items) + and not content_item.default_data_source_id + and len( + content_item.get_valid_data_source_integrations( + content_item.content_items + ) + ) + > 1 + ) + ] + + def fix(self, content_item: ContentTypes) -> FixResult: + # The fix applies when there is one event collector, which is preferred over fetching integrations + data_sources_fetch_events = [ + integration.object_id + for integration in content_item.content_items.integration + if MarketplaceVersions.MarketplaceV2 in integration.marketplaces + and not integration.deprecated + and integration.is_fetch_events + ] + + if len(data_sources_fetch_events) == 1: + content_item.default_data_source_id = data_sources_fetch_events[0] + return FixResult( + validator=self, + message=self.fix_message.format( + content_item.name, data_sources_fetch_events[0] + ), + content_object=content_item, + ) + + raise Exception( + "Unable to determine which integration should be set as default." + ) diff --git a/demisto_sdk/commands/validate/validators/PA_validators/PA132_is_valid_default_datasource.py b/demisto_sdk/commands/validate/validators/PA_validators/PA132_is_valid_default_datasource.py new file mode 100644 index 0000000000..b2fbd868e0 --- /dev/null +++ b/demisto_sdk/commands/validate/validators/PA_validators/PA132_is_valid_default_datasource.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +from typing import Dict, Iterable, List + +from demisto_sdk.commands.content_graph.objects.pack import Pack +from demisto_sdk.commands.validate.validators.base_validator import ( + BaseValidator, + FixResult, + ValidationResult, +) + +ContentTypes = Pack + + +class IsValidDefaultDataSourceNameValidator(BaseValidator[ContentTypes]): + error_code = "PA132" + description = "Validate that the pack_metadata contains a valid default datasource, when provided." + rationale = "Wizards and other tools rely on the default datasource to be set." + error_message = ( + "Pack metadata contains an invalid 'defaultDataSource': {0}. " + "Please fill in a valid datasource integration, one of these options: {1}." + ) + fix_message = ( + "Updated the 'defaultDataSource' for the '{0}' pack to use the '{1}' " + "integration ID instead of the display name that was previously used." + ) + related_field = "defaultDataSource" + is_auto_fixable = True + + def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]: + return [ + ValidationResult( + validator=self, + message=self.error_message.format( + content_item.default_data_source_id, + content_item.get_valid_data_source_integrations( + content_item.content_items, content_item.support + ), + ), + content_object=content_item, + ) + for content_item in content_items + if ( + content_item.default_data_source_id + and content_item.default_data_source_id + not in content_item.get_valid_data_source_integrations( + content_item.content_items, content_item.support + ) + ) + ] + + def fix(self, content_item: ContentTypes) -> FixResult: + # The fix applies when the defaultDataSource value is the display name instead of the id of the selected integration + data_sources: List[Dict[str, str]] = content_item.get_valid_data_source_integrations( # type: ignore[assignment] + content_item.content_items, content_item.support, include_name=True + ) + + default_data_source = [ + data_source + for data_source in data_sources + if data_source.get("name") == content_item.default_data_source_id + ] + + if default_data_source: + content_item.default_data_source_id = default_data_source[0].get("id") + return FixResult( + validator=self, + message=self.fix_message.format( + content_item.name, default_data_source[0].get("id") + ), + content_object=content_item, + ) + + raise Exception( + "Unable to determine which integration should be set as default." + )