Skip to content

Commit

Permalink
Merge pull request #1 from KevinDeJong-TomTom/fix/too-many-reports
Browse files Browse the repository at this point in the history
fix: add message when the number of annotation exceeds limit
  • Loading branch information
KevinDeJong-TomTom committed Aug 6, 2021
2 parents 0ad4ccf + 9fb1e66 commit 1c3019a
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 24 deletions.
8 changes: 4 additions & 4 deletions bitbucket_code_insight_reporter/bitbucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,10 @@ def _annotations_url(self, project_key, repository_slug, commit_id, report_key):
def delete_code_insights_report(
self, project_key, repository_slug, commit_id, report_key
):
val = requests.delete(
requests.delete(
self._report_url(project_key, repository_slug, commit_id, report_key),
auth=self._auth,
)
).raise_for_status()

def create_code_insights_report(
self, project_key, repository_slug, commit_id, report_key, **report
Expand All @@ -64,7 +64,7 @@ def create_code_insights_report(
self._report_url(project_key, repository_slug, commit_id, report_key),
json=report,
auth=self._auth,
)
).raise_for_status()

def add_code_insights_annotations_to_report(
self, project_key, repository_slug, commit_id, report_key, annotations
Expand All @@ -73,4 +73,4 @@ def add_code_insights_annotations_to_report(
self._annotations_url(project_key, repository_slug, commit_id, report_key),
json={"annotations": annotations},
auth=self._auth,
)
).raise_for_status()
12 changes: 11 additions & 1 deletion bitbucket_code_insight_reporter/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from llvm_diagnostics import parser

ANNOTATION_LIMIT = 1000
LOGGER = logging.getLogger(__name__)


Expand Down Expand Up @@ -128,7 +129,16 @@ def generate(
_report["report"]["logo-url"] = logo_url

_annotations = retrieve_annotations_from_file(llvm_logging, workspace)
if _annotations:
if _annotations and len(_annotations) < ANNOTATION_LIMIT:
_report["annotations"] = _annotations
else:
_warning = f"NOTE: The number of code annotations ({len(_annotations)}) exceeded the limit ({ANNOTATION_LIMIT})!"
if details:
_report["report"]["details"] += f"{os.linesep}{os.linesep}{_warning}"
else:
_report["report"]["details"] = _warning

if _annotations and len(_annotations) < ANNOTATION_LIMIT:
_report["annotations"] = _annotations

_failure = len(_annotations) > 0
Expand Down
44 changes: 25 additions & 19 deletions bitbucket_code_insight_reporter/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
import json
import logging

from requests.models import HTTPError

from .bitbucket import Bitbucket


Expand Down Expand Up @@ -108,29 +110,33 @@ def report(
report_title=_report["report"]["title"],
**_report["report"],
)
except:
print("ERR - Failed to create new Code Insight Report")
except HTTPError as e:
logging.error("Failed to create new Code Insight Report")
logging.error(e)
return 1

_annotations = _report.get("annotations", None)
if not _annotations:
return 0

logging.debug(
f"""\
Project: {bitbucket_project}
Repository: {repository_slug}
Commit Hash: {commit_hash}
Annotations: {json.dumps(_annotations, indent=4, sort_keys=True)}"""
)
if _annotations:
logging.debug(
f"""\
Project: {bitbucket_project}
Repository: {repository_slug}
Commit Hash: {commit_hash}
Annotations: {json.dumps(_annotations, indent=4, sort_keys=True)}"""
)

bitbucket.add_code_insights_annotations_to_report(
project_key=bitbucket_project,
repository_slug=repository_slug,
commit_id=commit_hash,
report_key=_report_id,
annotations=_annotations,
)
try:
bitbucket.add_code_insights_annotations_to_report(
project_key=bitbucket_project,
repository_slug=repository_slug,
commit_id=commit_hash,
report_key=_report_id,
annotations=_annotations,
)
except HTTPError as e:
logging.error("Failed to add annotations to the Code Insight Report")
logging.error(e)
return 1

logging.info("Done...")

Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
install_requires=(
'Click>=7,<8',
'llvm_diagnostics>=0<1',
'requests>=2.25.1,<3',
),
entry_points={
'console_scripts': [
Expand Down
108 changes: 108 additions & 0 deletions tests/test_generator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
#!/usr/bin/env python3

# Copyright (c) 2021 - 2021 TomTom N.V.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import llvm_diagnostics
from llvm_diagnostics.messages import DiagnosticsLevel, DiagnosticsMessage
import pytest
from unittest.mock import Mock

from bitbucket_code_insight_reporter import generate


@pytest.mark.parametrize(
"input, expectation",
(
("error", "HIGH"),
("warning", "MEDIUM"),
("note", "LOW"),
("random_string", "LOW"),
(123, "LOW"),
),
)
def test_get_severity_for_level(input, expectation):
assert generate.get_severity_for_level(input) == expectation


def test_retrieve_annotations_from_file():
llvm_diagnostics.parser.diagnostics_messages_from_file = Mock(
return_value=[
DiagnosticsMessage(
"first_test.py", 1, 1, "test message", DiagnosticsLevel.ERROR
),
DiagnosticsMessage(
"second_test.py", 2, 2, "test message", DiagnosticsLevel.WARNING
),
]
)

_expectations = [
{
"path": "first_test.py",
"message": "test message",
"line": 1,
"severity": "HIGH",
},
{
"path": "second_test.py",
"message": "test message",
"line": 2,
"severity": "MEDIUM",
},
]

assert generate.retrieve_annotations_from_file(None, None) == _expectations


def test_retrieve_annotations_from_empty_file():
llvm_diagnostics.parser.diagnostics_messages_from_file = Mock(return_value=[])
assert generate.retrieve_annotations_from_file(None, None) == []


def test_retrieve_annotations_from_file_workspace():
llvm_diagnostics.parser.diagnostics_messages_from_file = Mock(
return_value=[DiagnosticsMessage("/code/first_test.py", 1, 1, "test message")]
)

_expectations = [
{
"path": "first_test.py",
"message": "test message",
"line": 1,
"severity": "HIGH",
}
]

assert generate.retrieve_annotations_from_file(None, "/code/") == _expectations


def test_retrieve_annotations_from_file_wrong_workspace():
llvm_diagnostics.parser.diagnostics_messages_from_file = Mock(
return_value=[DiagnosticsMessage("/code/first_test.py", 1, 1, "test message")]
)

_expectations = [
{
"path": "/code/first_test.py",
"message": "test message",
"line": 1,
"severity": "HIGH",
}
]

assert (
generate.retrieve_annotations_from_file(None, "/wrong_workspace/")
== _expectations
)

0 comments on commit 1c3019a

Please sign in to comment.