Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ConfJSON object #3918

Closed
wants to merge 13 commits into from
22 changes: 13 additions & 9 deletions demisto_sdk/commands/content_graph/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
import re
from pathlib import Path
from typing import Any, Callable, Dict, Iterator, List, NamedTuple, Set
from typing import Any, Callable, Dict, Iterator, List, NamedTuple, Optional, Set

from neo4j import graph

Expand Down Expand Up @@ -31,7 +31,7 @@
PACK_CONTRIBUTORS_FILENAME = "CONTRIBUTORS.json"
UNIFIED_FILES_SUFFIXES = [".yml", ".json"]

SERVER_CONTENT_ITEMS_PATH = "Tests/Marketplace/server_content_items.json"
SERVER_CONTENT_ITEMS_PATH = Path("Tests/Marketplace/server_content_items.json")


class Neo4jRelationshipResult(NamedTuple):
Expand All @@ -49,6 +49,7 @@ class RelationshipType(str, enum.Enum):
USES = "USES"
USES_BY_ID = "USES_BY_ID"
USES_BY_NAME = "USES_BY_NAME"
USES_BY_CLI_NAME = "USES_BY_CLI_NAME"
USES_COMMAND_OR_SCRIPT = "USES_COMMAND_OR_SCRIPT"
USES_PLAYBOOK = "USES_PLAYBOOK"

Expand Down Expand Up @@ -359,22 +360,25 @@ def _lazy_decorator(self):
return LazyProperty(_lazy_decorator)


def get_server_content_items() -> Dict[ContentType, list]:
def get_server_content_items(tag: Optional[str] = None) -> Dict[ContentType, list]:
"""Reads a JSON file containing server content items from content repository
and returns a dict representation of it in the required format.

Args:
tag (Optional[str], optional): A tag to get the server content items from.
If not specified, the server content items will be read from the local file.
Returns:
Dict[ContentType, list]: A mapping of content types to the list of server content items.
"""

try:
json_data: dict = get_json(SERVER_CONTENT_ITEMS_PATH)
except FileNotFoundError:
from_remote = tag is not None or not SERVER_CONTENT_ITEMS_PATH.exists()
if not from_remote:
json_data: dict = get_json(str(SERVER_CONTENT_ITEMS_PATH))
else:
json_data = get_remote_file(
SERVER_CONTENT_ITEMS_PATH,
str(SERVER_CONTENT_ITEMS_PATH),
git_content_config=GitContentConfig(
repo_name=GitContentConfig.OFFICIAL_CONTENT_REPO_NAME,
),
tag=tag,
)
return {ContentType(k): v for k, v in json_data.items()}

Expand Down
2 changes: 2 additions & 0 deletions demisto_sdk/commands/content_graph/objects/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from demisto_sdk.commands.common.content_constant_paths import CONTENT_PATH
from demisto_sdk.commands.common.cpu_count import cpu_count
from demisto_sdk.commands.common.logger import logger
from demisto_sdk.commands.content_graph.objects.conf_json import ConfJson
from demisto_sdk.commands.content_graph.objects.pack import Pack
from demisto_sdk.commands.content_graph.parsers.repository import RepositoryParser

Expand All @@ -20,6 +21,7 @@
class ContentDTO(BaseModel):
path: DirectoryPath = Path(CONTENT_PATH) # type: ignore
packs: List[Pack]
conf_json: ConfJson

@staticmethod
def from_path(path: Path = CONTENT_PATH):
Expand Down
71 changes: 51 additions & 20 deletions demisto_sdk/commands/validate/default_config.toml
Original file line number Diff line number Diff line change
@@ -1,44 +1,75 @@
[use_git]
ignorable_errors = [
"BA101", "BA106", "BA108", "BA109", "BA110", "BA111", "BA112", "BA113", "BA116", "BA119", "BA124", "BA125",
"GF102",
"DS107", "DS108",
"IF100", "IF106", "IF113", "IF115", "IF116",
"IN107", "IN109", "IN110", "IN122", "IN123", "IN124", "IN126", "IN128", "IN135", "IN136", "IN139", "IN144", "IN145", "IN150", "IN153", "IN154", "IN161",
"MP106",
"PA113", "PA116", "PA124", "PA125", "PA127", "PA129",
"PB104", "PB105", "PB106", "PB107", "PB110", "PB111", "PB114", "PB115", "PB116", "PB118", "PB119", "PB121",
"RM100", "RM102", "RM104", "RM106", "RM108", "RM110", "RM112", "RM113",
"RP102", "RP104",
"SC100", "SC105", "SC106",
"IM111",
"RN112", "RN113", "RN114", "RN115", "RN116",
"MR108",
"PR101",
"LO107",
"DB100",
"GR103",
"SC105"
]


[use_git]
select = [
"BA101", "BA105", "BA106", "BA116", "BA118",
"PA107", "PA108", "PA111", "PA115", "PA130",
"BA101", "BA105", "BA106", "BA116", "BA118", "BA126",
"PA100", "PA101", "PA102", "PA103", "PA104", "PA105", "PA107", "PA108", "PA109", "PA111", "PA113", "PA115", "PA117", "PA118", "PA119", "PA120",
"PA121", "PA123", "PA125", "PA127", "PA130",
"BC100",
"GR106",
"DO108",
"IN108",
"IN130",
"IN100", "IN102", "IN104", "IN106", "IN107", "IN108", "IN109", "IN110", "IN112", "IN113", "IN114", "IN115", "IN117", "IN118", "IN121", "IN122", "IN123", "IN124", "IN125", "IN126", "IN127", "IN130",
"IN131", "IN134", "IN135", "IN139", "IN141", "IN142", "IN145", "IN146", "IN149", "IN150", "IN151", "IN152", "IN153", "IN154", "IN156", "IN158", "IN159", "IN160",
"IN161", "IN162",
"PB118",
"DO100", "DO101", "DO102", "DO103", "DO104, DO105, DO106",
"SC100", "SC105", "SC106", "SC109"
]
warning = []
ignorable_errors = [
"BA101", "BA116",
"PB118"
]

[validate_all]

[validate_all]
select = [
"BA101", "BA105", "BA106","BA116", "BA118",
"IN100", "IN102", "IN104", "IN106", "IN107", "IN108", "IN109", "IN110", "IN112", "IN113", "IN114", "IN121", "IN122", "IN123", "IN124", "IN125", "IN126", "IN127", "IN130",
"IN131", "IN134", "IN135", "IN139", "IN141", "IN142", "IN145", "IN146", "IN149", "IN150", "IN151", "IN152", "IN153", "IN154", "IN156", "IN158", "IN159", "IN160",
"IN161", "IN162",
"BA101", "BA105", "BA106","BA116", "BA118", "BA126",
"GR106",
"DO108",
"IN108",
"IN130",
"PA107", "PA108", "PA111", "PA115", "PA130"
"PA100", "PA101", "PA102", "PA103", "PA104", "PA105", "PA107", "PA108", "PA109", "PA111", "PA113", "PA115", "PA117", "PA118", "PA119", "PA120",
"PA121", "PA123", "PA125", "PA127", "PA130",
"DO100", "DO101", "DO102", "DO103", "DO104",
"SC100", "SC105", "SC106", "SC109"
]
warning = []
ignorable_errors = ["BA101", "BA116"]


[Custom_Categories]
select = []
warning = []
ignorable_errors = []


[support_level]

[support_level.community]
ignore = []
ignore = [
"BC100", "BC101", "BC102", "BC103", "BC104", "BC105", "BC106", "BC107", "BC108", "BC109",
"CJ100", "CJ101", "CJ102", "CJ103", "CJ104", "CJ105",
"DS100", "DS101", "DS102", "DS103", "DS104",
"IN125", "IN126", "IN140"
]

[support_level.partner]
ignore = []
ignore = [
"CJ100", "CJ101", "CJ102", "CJ103", "CJ104", "CJ105",
"IN140"
]
88 changes: 52 additions & 36 deletions demisto_sdk/commands/validate/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,15 @@
specify_files_from_directory,
)
from demisto_sdk.commands.content_graph.objects.base_content import BaseContent
from demisto_sdk.commands.content_graph.objects.conf_json import ConfJSON
from demisto_sdk.commands.content_graph.objects.pack import Pack
from demisto_sdk.commands.content_graph.objects.repository import (
ContentDTO,
)
from demisto_sdk.commands.content_graph.parsers.content_item import (
InvalidContentItemException,
)
from demisto_sdk.commands.validate.validators.base_validator import ValidatableTypes


class Initializer:
Expand Down Expand Up @@ -276,18 +278,18 @@ def specify_files_by_status(

return filtered_modified_files, filtered_added_files, filtered_renamed_files

def gather_objects_to_run_on(self) -> Set[BaseContent]:
def gather_objects_to_run_on(self) -> Set[ValidatableTypes]:
"""
Filter the file that should run according to the given flag (-i/-g/-a).

Returns:
Set[BaseContent]: the set of files that should run.
"""
content_objects_to_run: Set[BaseContent] = set()
content_objects_to_run: Set[ValidatableTypes] = set()
if self.use_git:
content_objects_to_run = self.get_files_from_git()
elif self.file_path:
content_objects_to_run = self.paths_to_basecontent_set(
content_objects_to_run = self.collect_validatable_objects(
set(self.file_path.split(",")), None
)
elif self.all_files:
Expand All @@ -299,36 +301,36 @@ def gather_objects_to_run_on(self) -> Set[BaseContent]:
self.use_git = (True,)
self.committed_only = True
content_objects_to_run = self.get_files_from_git()
content_objects_to_run_with_packs: Set[BaseContent] = self.get_items_from_packs(
content_objects_to_run
)
content_objects_to_run_with_packs: Set[
ValidatableTypes
] = self.get_items_from_packs(content_objects_to_run)
return content_objects_to_run_with_packs

def get_items_from_packs(
self, content_objects_to_run: Set[BaseContent]
) -> Set[BaseContent]:
self, content_objects_to_run: Set[ValidatableTypes]
) -> Set[ValidatableTypes]:
"""Gets the packs content items from the Packs objects in the given set if they weren't there before.

Args:
content_objects_to_run (Set[BaseContent]): The set of BaseContent items to pick the Pack objects from.
content_objects_to_run (Set[ValidatableTypes]): The set of ValidatableTypes items to pick the Pack objects from.

Returns:
Set[BaseContent]: The given set unified with the content items from inside the Pack objects.
Set[ValidatableTypes]: The given set unified with the content items from inside the Pack objects.
"""
content_objects_to_run_with_packs: Set[BaseContent] = set()
result: Set[ValidatableTypes] = set()
for content_object in content_objects_to_run:
if isinstance(content_object, Pack):
for content_item in content_object.content_items:
if content_item not in content_objects_to_run:
content_objects_to_run_with_packs.add(content_item)
content_objects_to_run_with_packs.add(content_object)
return content_objects_to_run_with_packs
result.add(content_item)
result.add(content_object)
return result

def get_files_from_git(self) -> Set[BaseContent]:
def get_files_from_git(self) -> Set[ValidatableTypes]:
"""Return all files added/changed/deleted.

Returns:
Set[BaseContent]: The set of all the files from git successfully casted to BaseContent
Set[ValidatableTypes]: The set of all the files from git successfully casted to BaseContent
"""
self.validate_git_installed()
self.set_prev_ver()
Expand All @@ -345,32 +347,35 @@ def get_files_from_git(self) -> Set[BaseContent]:
added_files = self.filter_files(added_files)
renamed_files = self.filter_files(renamed_files)
deleted_files = self.filter_files(deleted_files)
basecontent_with_path_set: Set[BaseContent] = set()
basecontent_with_path_set = basecontent_with_path_set.union(
self.paths_to_basecontent_set(
result: Set[ValidatableTypes] = set()
result = result.union(
self.collect_validatable_objects(
modified_files, GitStatuses.MODIFIED, git_sha=self.prev_ver
)
)
basecontent_with_path_set = basecontent_with_path_set.union(
self.paths_to_basecontent_set(
result = result.union(
self.collect_validatable_objects(
renamed_files, GitStatuses.RENAMED, git_sha=self.prev_ver
)
)
basecontent_with_path_set = basecontent_with_path_set.union(
self.paths_to_basecontent_set(
result = result.union(
self.collect_validatable_objects(
added_files, GitStatuses.ADDED, git_sha=self.prev_ver
)
)
basecontent_with_path_set = basecontent_with_path_set.union(
self.paths_to_basecontent_set(
result = result.union(
self.collect_validatable_objects(
deleted_files, GitStatuses.DELETED, git_sha=self.prev_ver
)
)
return basecontent_with_path_set
return result

def paths_to_basecontent_set(
self, files_set: set, git_status: Optional[str], git_sha: Optional[str] = None
) -> Set[BaseContent]:
def collect_validatable_objects(
self,
files_set: set,
git_status: Optional[str],
git_sha: Optional[str] = None,
) -> Set[ValidatableTypes]:
"""Return a set of all the successful casts to BaseContent from given set of files.

Args:
Expand All @@ -380,30 +385,41 @@ def paths_to_basecontent_set(
Returns:
Set[BaseContent]: The set of all the successful casts to BaseContent from given set of files.
"""
basecontent_with_path_set: Set[BaseContent] = set()
invalid_content_items: List[str] = []
result: Set[ValidatableTypes] = set()
invalid: List[str] = []
for file_path in files_set:
try:
if git_status == GitStatuses.RENAMED:
temp_obj: Optional[BaseContent] = BaseContent.from_path(
Path(file_path[0]),
Path(
file_path[0]
), # When renaming (only), we get the old and new paths
git_status=git_status,
old_file_path=Path(file_path[1]),
git_sha=git_sha,
)

elif git_status == GitStatuses.ADDED:
temp_obj = BaseContent.from_path(Path(file_path), git_status)

else:
if file_path.relative_to(CONTENT_PATH) == Path(
"Tests/conf.json"
): # TODO replace with constant
# not BaseContent, but is validated
result.add(ConfJSON.from_path(file_path))
continue

temp_obj = BaseContent.from_path(
Path(file_path), git_status, git_sha=git_sha
)
if temp_obj is None:
invalid_content_items.append(file_path)
invalid.append(file_path)
else:
basecontent_with_path_set.add(temp_obj)
result.add(temp_obj)
except InvalidContentItemException:
invalid_content_items.append(file_path)
return basecontent_with_path_set
invalid.append(file_path)
return result

def filter_files(self, files_set: Set[Path]):
"""Filter out all the files with suffixes that are not supported by BaseContent.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from __future__ import annotations

from demisto_sdk.commands.content_graph.objects.conf_json import ConfJSON
from demisto_sdk.commands.validate.validators.base_validator import (
BaseValidator,
)

ContentTypes = ConfJSON


class ConfJSONLinkValidator(BaseValidator[ContentTypes]):
error_code = "GR109"
description = (
"Validates that all content items mentioned in conf.json exist in the repo."
)
error_message = f"Cannot find content object(s) mentioned in conf.json, with id(s) {0} in the repo."
content_types = ContentTypes
is_auto_fixable = False

# def is_valid(self, content_items: Iterable[ContentTypes]) -> List[ValidationResult]:
# graph_conf_json: ContentTypes = one(
# self.graph.search(
# object_id=one(content_items).object_id
# ) # type:ignore[assignment]
# )
# missing_objects = [
# relationship.content_item_to
# for relationship in itertools.chain.from_iterable(
# graph_conf_json.relationships_data.values()
# )
# if relationship.content_item_to.not_in_repository
# ]

# return [
# ValidationResult(
# validator=self,
# message=self.error_message.format(missing_object.object_id),
# content_object=graph_conf_json,
# )
# for missing_object in sorted(missing_objects, key=lambda o: o.object_id)
# ]
Loading
Loading