Skip to content

Commit

Permalink
Merge pull request #130 from Honny1/expand-json-content
Browse files Browse the repository at this point in the history
JSON content extension
  • Loading branch information
evgenyz committed Mar 2, 2023
2 parents 1ed617b + a286865 commit 4906c7e
Show file tree
Hide file tree
Showing 18 changed files with 821 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .scrutinizer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ build:
before:
- pip3 install -r requirements.txt
- npm i -g eslint
- pip3 install isort pylint flake8 coverage
- pip3 install isort pylint flake8 coverage jsonschema
environment:
python: 3.9.6
nodes:
Expand Down
49 changes: 49 additions & 0 deletions json_validator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/usr/bin/env python3

# Copyright 2022, Red Hat, Inc.
# SPDX-License-Identifier: LGPL-2.1-or-later

import argparse
import json
import sys

from jsonschema import validate


def parse_args():
parser = argparse.ArgumentParser(prog='JSON Schema validator')
parser.add_argument("-s",
"--schema",
type=str,
default="./tests/json_schema_of_report.json",
help="Path to schema of JSON to validate."
)
parser.add_argument('JSON',
type=argparse.FileType("r"),
nargs='?',
default=sys.stdin,
help="JSON file source. Default: stdin"
)
return parser.parse_args()


def validate_json(schema_src, json_file):
json_schema = None
json_data = None

with open(schema_src, "r", encoding="utf-8") as schema_file:
json_schema = json.load(schema_file)

json_data = json.load(json_file)
json_file.close()

validate(json_data, json_schema)


def main():
args = parse_args()
validate_json(args.schema, args.JSON)


if __name__ == "__main__":
main()
9 changes: 9 additions & 0 deletions openscap_report/scap_results_parser/data_structures/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,15 @@

from dataclasses import asdict, dataclass, field

GROUP_JSON_KEYS = [
"group_id",
"title",
"description",
"platforms",
"rules_ids",
"sub_groups",
]


@dataclass
class Group:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@

from dataclasses import asdict, dataclass

IDENTIFIER_JSON_KEYS = [
"system",
"text",
]


@dataclass
class Identifier:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# Copyright 2022, Red Hat, Inc.
# SPDX-License-Identifier: LGPL-2.1-or-later


def rearrange_references(dictionary_json):
global_references = {}
for rule in dictionary_json["rules"].values():
new_rule_references = []
for ref in rule["references"]:
global_references[ref["text"]] = ref["href"]
new_rule_references.append(ref["text"])
rule["references"] = new_rule_references
dictionary_json["references"] = global_references


def rearrange_identifiers(dictionary_json):
global_identifiers = {}
for rule in dictionary_json["rules"].values():
new_rule_identifiers = []
for ident in rule["identifiers"]:
global_identifiers[ident["text"]] = ident["system"]
new_rule_identifiers.append(ident["text"])
rule["identifiers"] = new_rule_identifiers
dictionary_json["identifiers"] = global_identifiers


def _get_dict_or_value(val):
if isinstance(val, list):
out = []
for item in val:
out.append(_get_dict_or_value(item))
return out
if isinstance(val, dict):
return remove_empty_values(val)
return val


def is_not_empty(val):
if val is None:
return False
if isinstance(val, float):
return True
return len(val) > 0


def remove_empty_values(dictionary_json):
out = {}
for key, val in dictionary_json.items():
clean_value = _get_dict_or_value(val)
if is_not_empty(clean_value):
out[key] = clean_value
return out


def remove_not_selected_rules(dictionary_json, ids_of_selected_rules):
selected_rules = {}
for rule_id, rule in dictionary_json["rules"].items():
if rule_id in ids_of_selected_rules or (
rule["result"] != "notselected" and not is_not_empty(ids_of_selected_rules)
):
selected_rules[rule_id] = rule
dictionary_json["rules"] = selected_rules
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,13 @@

from .oval_node import OvalNode

OVAL_DEFINITION_JSON_KEYS = [
"definition_id",
"title",
"description",
"version",
]


@dataclass
class OvalDefinition:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,13 @@

from dataclasses import asdict, dataclass, field

PROFILE_JSON_KEYS = [
"profile_id",
"description",
"title",
"extends",
]


@dataclass
class ProfileInfo:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@

from dataclasses import asdict, dataclass

REFERENCE_JSON_KEYS = [
"href",
"text",
]


@dataclass
class Reference:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,15 @@

from dataclasses import asdict, dataclass

REMEDIATION_JSON_KEYS = [
"remediation_id",
"system",
"complexity",
"disruption",
"strategy",
"fix",
]


@dataclass
class Remediation:
Expand Down
48 changes: 34 additions & 14 deletions openscap_report/scap_results_parser/data_structures/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,33 @@
from dataclasses import asdict, dataclass, field

from ..exceptions import MissingProcessableRules
from .group import Group
from .profile_info import ProfileInfo
from .result_of_scan import ResultOfScan
from .rule import Rule
from .group import GROUP_JSON_KEYS, Group
from .identifier import IDENTIFIER_JSON_KEYS
from .json_transformation import (rearrange_identifiers, rearrange_references,
remove_empty_values,
remove_not_selected_rules)
from .oval_definition import OVAL_DEFINITION_JSON_KEYS
from .profile_info import PROFILE_JSON_KEYS, ProfileInfo
from .reference import REFERENCE_JSON_KEYS
from .remediation import REMEDIATION_JSON_KEYS
from .result_of_scan import SCAN_JSON_KEYS, ResultOfScan
from .rule import RULE_JSON_KEYS, Rule
from .warning import WARNING_JSON_KEYS

JSON_REPORT_CONTENT = [
"profile_info",
"scan_result",
"rules",
*GROUP_JSON_KEYS,
*IDENTIFIER_JSON_KEYS,
*OVAL_DEFINITION_JSON_KEYS,
*PROFILE_JSON_KEYS,
*REFERENCE_JSON_KEYS,
*REMEDIATION_JSON_KEYS,
*RULE_JSON_KEYS,
*SCAN_JSON_KEYS,
*WARNING_JSON_KEYS,
]


@dataclass
Expand All @@ -20,18 +43,15 @@ class Report:

@staticmethod
def default_json_filter(dictionary):
allowed_keys = [
"title",
"profile_name",
"cpe_platforms",
"scanner",
"benchmark_id",
"score"
]
return {key: value for (key, value) in dictionary if key in allowed_keys}
return {key: value for (key, value) in dictionary if key in JSON_REPORT_CONTENT}

def as_dict_for_default_json(self):
return asdict(self, dict_factory=self.default_json_filter)
json_dict = asdict(self, dict_factory=self.default_json_filter)
remove_not_selected_rules(json_dict, self.profile_info.selected_rules_ids)
rearrange_references(json_dict)
rearrange_identifiers(json_dict)
json_dict = remove_empty_values(json_dict)
return json_dict

def as_dict(self):
return asdict(self)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,24 @@

from dataclasses import asdict, dataclass, field

SCAN_JSON_KEYS = [
"title",
"identity",
"profile_id",
"target",
"cpe_platforms",
"scanner",
"scanner_version",
"benchmark_url",
"benchmark_id",
"benchmark_version",
"start_time",
"end_time",
"test_system",
"score",
"score_max",
]


@dataclass
class ResultOfScan: # pylint: disable=R0902
Expand Down
17 changes: 17 additions & 0 deletions openscap_report/scap_results_parser/data_structures/rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,23 @@
from .remediation import Remediation
from .warning import RuleWarning

RULE_JSON_KEYS = [
"rule_id",
"title",
"result",
"time",
"severity",
"identifiers",
"references",
"description",
"rationale",
"warnings",
"platforms",
"oval_definition_id",
"messages",
"remediations",
]


@dataclass
class Rule: # pylint: disable=R0902
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
# SPDX-License-Identifier: LGPL-2.1-or-later
from dataclasses import asdict, dataclass

WARNING_JSON_KEYS = [
"text",
"category",
]


@dataclass
class RuleWarning:
Expand Down
2 changes: 2 additions & 0 deletions tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,5 @@
PATH_TO_XCCDF_WITHOUT_INFO = Path(__file__).parent / "test_data/xccdf-with-removed-info.xml"
PATH_TO_XCCDF_WITHOUT_SYSTEM_DATA = Path(__file__).parent / "test_data/xccdf_no_system_data.xml"
PATH_TO_XCCDF_WITH_MULTI_CHECK = Path(__file__).parent / "test_data/xccdf_multi_check.xml"

PATH_TO_JSON_SCHEMA = Path(__file__).parent / "json_schema_of_report.json"
30 changes: 30 additions & 0 deletions tests/integration_tests/test_json.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Copyright 2022, Red Hat, Inc.
# SPDX-License-Identifier: LGPL-2.1-or-later

import json

import pytest
from jsonschema import validate

from openscap_report.debug_settings import DebugSetting
from openscap_report.report_generators.json import JSONReportGenerator
from tests.unit_tests.test_data_structure import get_parser, get_report

from ..constants import PATH_TO_ARF, PATH_TO_JSON_SCHEMA


@pytest.mark.integration_test
def test_json_structure_with_schema():
json_schema = None
with open(PATH_TO_JSON_SCHEMA, "r", encoding="utf-8") as schema_file:
json_schema = json.load(schema_file)
json_gen = JSONReportGenerator(get_parser(PATH_TO_ARF))
json_data = json_gen.generate_report(DebugSetting()).read().decode("utf-8")
validate(json.loads(json_data), json_schema)


@pytest.mark.integration_test
def test_json_count_of_rules():
report = get_report()
json_dict = report.as_dict_for_default_json()
assert len(json_dict["rules"]) == 714
Loading

0 comments on commit 4906c7e

Please sign in to comment.