Skip to content

Commit

Permalink
Remove profiling code, and fix linter errors and tests
Browse files Browse the repository at this point in the history
  • Loading branch information
vodorok committed Nov 23, 2022
1 parent 93351cd commit b47b45c
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 41 deletions.
50 changes: 21 additions & 29 deletions web/server/codechecker_server/api/mass_store_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
from ..metadata import checker_is_unavailable, MetadataInfoParser

from .report_server import ThriftRequestHandler
from .thrift_enum_helper import report_extended_data_type_str, review_status_enum
from .thrift_enum_helper import report_extended_data_type_str


LOG = get_logger('server')
Expand Down Expand Up @@ -725,7 +725,7 @@ def __add_report(
""" Add report to the database. """
try:
checker_name = report.checker_name

# Cache the severity of the checkers
try:
severity = self.__severity_map[checker_name]
Expand Down Expand Up @@ -815,7 +815,8 @@ def __process_report_file(
if not reports:
return True

def get_review_status_from_source(report: Report) -> Dict[str, Any]:
def get_review_status_from_source(
report: Report) -> Tuple[Dict[str, Any], bool]:
"""
Return the review status belonging to the given report and a
boolean value which indicates whether the review status comes from
Expand Down Expand Up @@ -853,7 +854,7 @@ def get_review_status_from_source(report: Report) -> Dict[str, Any]:
review_status["status"] = rs[0]
review_status["message"] = rs[1]

return review_status
return review_status, True
elif len(src_comment_data) > 1:
LOG.warning(
"Multiple source code comment can be found "
Expand All @@ -865,8 +866,7 @@ def get_review_status_from_source(report: Report) -> Dict[str, Any]:
f"{source_file_name}|{report.line}|"
f"{report.checker_name}")

return review_status

return review_status, False

def get_missing_file_ids(report: Report) -> List[str]:
""" Returns file paths which database file id is missing. """
Expand Down Expand Up @@ -919,9 +919,7 @@ def get_missing_file_ids(report: Report) -> List[str]:
analyzer_name = mip.checker_to_analyzer.get(
report.checker_name, report.analyzer_name)

# TODO do this in bulk
rs_from_source = get_review_status_from_source(report)
scc = True if rs_from_source else False
rs_from_source, scc = get_review_status_from_source(report)

# False positive and intentional reports are considered as closed
# reports which is indicated with non-null "fixed_at" date.
Expand All @@ -938,7 +936,8 @@ def get_missing_file_ids(report: Report) -> List[str]:
rs_from_source, scc, detection_status, detected_at,
run_history_time, analysis_info, analyzer_name, fixed_at)

self.__new_report_hashes[report.report_hash] = rs_from_source["status"]
self.__new_report_hashes[report.report_hash] = \
rs_from_source["status"]
self.__already_added_report_hashes.add(report_path_hash)

LOG.debug("Storing report done. ID=%d", report_id)
Expand Down Expand Up @@ -1001,7 +1000,6 @@ def get_skip_handler(
if not report_file.is_supported(f):
continue


for root_dir_path, _, report_file_paths in os.walk(report_dir):
LOG.debug("Get reports from '%s' directory", root_dir_path)

Expand All @@ -1025,13 +1023,13 @@ def get_skip_handler(
processed_result_file_count += 1

# Get all relevant review_statuses for the newly stored reports
#all_review_statues = session.query(ReviewStatus.bug_hash, ReviewStatus.status, DBReport.id) \
reports_to_rs_rules = session.query(ReviewStatus, DBReport.id) \
# TODO Call self.getReviewStatusRules instead of the beloew query
# but before first check the performance
reports_to_rs_rules = session.query(ReviewStatus, DBReport) \
.join(DBReport, DBReport.bug_id == ReviewStatus.bug_hash) \
.filter(sqlalchemy.and_(DBReport.run_id == run_id, # Might be faster to query without condition
ReviewStatus.bug_hash.in_(self.__new_report_hashes)))
# TODO Call self.getReviewStatusRules instead of the above query

.filter(sqlalchemy.and_(DBReport.run_id == run_id,
ReviewStatus.bug_hash.
in_(self.__new_report_hashes)))

# Create the sqlalchemy mappings for the bulk update
review_status_change = []
Expand All @@ -1043,24 +1041,18 @@ def get_skip_handler(
rs_info[0].date)
review_status_change.append(
{
"id" : rs_info[1],
"id": rs_info[1].id,
"review_status": rs_info[0].status,
"review_status_author": rs_info[0].author,
"review_status_date": rs_info[0].date,
"review_status_is_in_source" : False,
"review_status_message" : rs_info[0].message
"review_status_is_in_source": False,
"review_status_message": rs_info[0].message,
"fixed_at": rs_info[1].review_status_date
}
)
print(review_status_change)
)
# Update all newly stored reports if there are any rs-rules for them
session.bulk_update_mappings(DBReport, review_status_change)


# Update all newly stored reports if there are any rev-stat rules for them
# Find if any newly stored report need a "custom" review status.

# TODO Ask Tibi about reports already in DB.


LOG.info("[%s] Processed %d analyzer result file(s).", self.__name,
processed_result_file_count)

Expand Down
14 changes: 2 additions & 12 deletions web/server/codechecker_server/api/report_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,8 @@
import zlib

from copy import deepcopy
import cProfile
import pstats

from collections import defaultdict
from datetime import date, datetime, timedelta
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Set, Tuple

import sqlalchemy
Expand Down Expand Up @@ -3322,14 +3319,7 @@ def massStoreRun(self, name, tag, version, b64zip, force,
from codechecker_server.api.mass_store_run import MassStoreRun
m = MassStoreRun(self, name, tag, version, b64zip, force,
trim_path_prefixes, description)
with cProfile.Profile() as pr:
res = m.store()

stats = pstats.Stats(pr)
stats.sort_stats(pstats.SortKey.TIME)
stats.dump_stats(f"/home/ekutgab/ws/tools/snakeviz/dump_{datetime.timestamp(datetime.now())}.prof")

return res
return m.store()

@exc_to_thrift_reqfail
@timeit
Expand Down

0 comments on commit b47b45c

Please sign in to comment.