Skip to content

Commit

Permalink
Merge branch 'master' into issue-528_9
Browse files Browse the repository at this point in the history
  • Loading branch information
aravij committed Jul 31, 2020
2 parents 37fdcb8 + 0382fdc commit 09fbc8f
Show file tree
Hide file tree
Showing 45 changed files with 2,212 additions and 1,057 deletions.
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,20 @@
[![Maintainability](https://api.codeclimate.com/v1/badges/fd7e32d8472b4d5e8ecb/maintainability)](https://codeclimate.com/github/cqfn/aibolit/maintainability)
[![License](https://img.shields.io/badge/license-MIT-green.svg)](https://github.com/cqfn/aibolit/blob/master/LICENSE.txt)


## How it works?

Learn how Aibolit works in our [White Paper](https://github.com/cqfn/aibolit/releases/download/1.2.5-post.1/aibolit_wp.pdf).



## How to use ?

First, you install it (you must have [Python 3.7.7](https://www.python.org/downloads/)
and [Pip](https://pip.pypa.io/en/stable/installing/) installed):



```bash
$ pip3 install aibolit
```
Expand Down
143 changes: 85 additions & 58 deletions aibolit/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
from aibolit.config import Config
from aibolit.ml_pipeline.ml_pipeline import train_process, collect_dataset
from aibolit.utils.ast_builder import build_ast
from javalang.parser import JavaSyntaxError

dir_path = os.path.dirname(os.path.realpath(__file__))

Expand Down Expand Up @@ -259,7 +260,7 @@ def traverse(node):

def calculate_patterns_and_metrics(file, args):
code_lines_dict = input_params = {} # type: ignore
error_string = None
error_exc = None
patterns_to_suppress = args.suppress
try:
config = Config.get_patterns_config()
Expand All @@ -276,10 +277,10 @@ def calculate_patterns_and_metrics(file, args):
continue
__count_value(metric, input_params, code_lines_dict, file, is_metric=True)
except Exception as ex:
error_string = str(ex)
error_exc = ex
input_params = [] # type: ignore

return input_params, code_lines_dict, error_string
return input_params, code_lines_dict, error_exc


def inference(
Expand Down Expand Up @@ -331,63 +332,82 @@ def inference(
return results


def run_recommend_for_file(file: str, args):
def create_results(
input_params: List[int],
code_lines_dict: Dict[Any, Any],
args: argparse.Namespace,
classes_with_patterns_ignored: List[Any],
patterns_ignored: Dict[Any, Any]):
results_list = inference(input_params, code_lines_dict, args)
new_results: List[Any] = []
for pattern_item in results_list:
# check if the whole class is suppressed
if pattern_item['pattern_code'] not in classes_with_patterns_ignored:
# then check if patterns are ignored in fields or functions
add_pattern_if_ignored(patterns_ignored, pattern_item, new_results)
# add_pattern_if_ignored(patterns_for_fields_ignored, pattern_item, new_results)
else:
continue

return new_results


def run_recommend_for_file(file: str, args): # flake8: noqa
"""
Calculate patterns and metrics, pass values to model and suggest pattern to change
:param file: file to analyze
:param args: different command line arguments
:return: dict with code lines, filename and pattern name
"""
java_file = str(Path(os.getcwd(), file))
tree = build_ast(file)
classes_with_annonations = find_annotation_by_node_type(tree, javalang.tree.ClassDeclaration)
functions_with_annotations = find_annotation_by_node_type(tree, javalang.tree.MethodDeclaration)
fields_with_annotations = find_annotation_by_node_type(tree, javalang.tree.FieldDeclaration)
classes_with_patterns_ignored = flatten(
[pattern_code for node, pattern_code in classes_with_annonations.items()])
patterns_ignored = defaultdict(list)

for node, patterns_list in functions_with_annotations.items():
start_pos, end_pos = find_start_and_end_lines(node)
for p in patterns_list:
patterns_ignored[p].append([start_pos, end_pos])

for node, patterns_list in fields_with_annotations.items():
for p in patterns_list:
patterns_ignored[p].append([node.position.line, node.position.line])

input_params, code_lines_dict, error_string = calculate_patterns_and_metrics(java_file, args)

if not input_params:
results_list = [] # type: ignore
error_string = 'Empty java file; ncss = 0'
# deepcode ignore ExpectsIntDislikesStr: False positive
elif input_params['M2'] == 0:
results_list = [] # type: ignore
error_string = 'Empty java file; ncss = 0'
else:
results_list = inference(input_params, code_lines_dict, args)
new_results: List[Any] = []
for pattern_item in results_list:
# check if the whole class is suppressed
if pattern_item['pattern_code'] not in classes_with_patterns_ignored:
# then check if patterns are ignored in fields or functions
add_pattern_if_ignored(patterns_ignored, pattern_item, new_results)
# add_pattern_if_ignored(patterns_for_fields_ignored, pattern_item, new_results)
else:
continue

results_list = new_results

if error_string:
try:
tree = build_ast(file)
classes_with_annonations = find_annotation_by_node_type(tree, javalang.tree.ClassDeclaration)
functions_with_annotations = find_annotation_by_node_type(tree, javalang.tree.MethodDeclaration)
fields_with_annotations = find_annotation_by_node_type(tree, javalang.tree.FieldDeclaration)
classes_with_patterns_ignored = flatten(
[pattern_code for node, pattern_code in classes_with_annonations.items()])
patterns_ignored = defaultdict(list)

for node, patterns_list in functions_with_annotations.items():
start_pos, end_pos = find_start_and_end_lines(node)
for p in patterns_list:
patterns_ignored[p].append([start_pos, end_pos])

for node, patterns_list in fields_with_annotations.items():
for p in patterns_list:
patterns_ignored[p].append([node.position.line, node.position.line])

input_params, code_lines_dict, error_exception = calculate_patterns_and_metrics(java_file, args)

if not input_params:
results_list = [] # type: ignore
error_exception = 'Empty java file; ncss = 0'
# deepcode ignore ExpectsIntDislikesStr: False positive
elif input_params['M2'] == 0:
results_list = [] # type: ignore
error_exception = 'Empty java file; ncss = 0'
else:
results_list = create_results(
input_params,
code_lines_dict,
args,
classes_with_patterns_ignored,
patterns_ignored)

except Exception as e:
error_exception = e
results_list = []

if error_exception:
ncss = 0
else:
ncss = input_params.get('M4', 0)

return {
'filename': file,
'results': results_list,
'error_string': error_string,
'exception': error_exception,
'ncss': ncss,
}

Expand All @@ -411,7 +431,7 @@ def create_xml_tree(results, full_report, cmd, exit_code):
files_number_tag.addprevious(etree.Comment('Files with found patterns'))
files_number_tag.text = str(len(
[x for x in results
if (not x.get('error_string') and x.get('results'))]
if (not x.get('exception') and x.get('results'))]
))
patterns_number_tag = etree.SubElement(header_tag, 'patterns')
ncss_tag = etree.SubElement(header_tag, 'ncss')
Expand All @@ -433,7 +453,8 @@ def create_xml_tree(results, full_report, cmd, exit_code):
output_string_tag = etree.SubElement(file, 'summary')
name.text = filename
results_item = result_for_file.get('results')
errors_string = result_for_file.get('error_string')
ex = result_for_file.get('exception')
errors_string = str(result_for_file.get('exception')) or type(ex).__name__
if not results_item and not errors_string:
output_string = 'Your code is perfect in aibolit\'s opinion'
output_string_tag.text = output_string
Expand Down Expand Up @@ -486,15 +507,21 @@ def get_exit_code(results):
files_analyzed = len(results)
errors_number = 0
perfect_code_number = 0
errors_strings = []
for result_for_file in results:
results = result_for_file.get('results')
errors_string = result_for_file.get('error_string')
if not results and not errors_string:
ex = result_for_file.get('exception')
if not results and not ex:
perfect_code_number += 1
elif not results and errors_string:
errors_number += 1
elif not results and ex:
if not isinstance(ex, JavaSyntaxError):
errors_strings.append(ex)
errors_number += 1
else:
# ignore JavaSyntaxError, it is expected error
perfect_code_number += 1

if errors_number == files_analyzed:
if len(errors_strings) == files_analyzed:
# we have errors everywhere
exit_code = 2
elif perfect_code_number == files_analyzed:
Expand All @@ -518,17 +545,17 @@ def create_text(results, full_report, is_long=False):
for result_for_file in results:
filename = result_for_file.get('filename')
results_item = result_for_file.get('results')
errors_string = result_for_file.get('error_string')
if not results_item and not errors_string:
ex = result_for_file.get('exception')
if not results_item and not ex:
# Do nothing, patterns were not found
pass
if not results_item and errors_string:
if not results_item and ex:
output_string = '{}: error when calculating patterns: {}'.format(
filename,
str(errors_string)
str(ex) or type(ex).__name__
)
buffer.append(output_string)
elif results_item and not errors_string:
elif results_item and not ex:
# get unique patterns score
patterns_scores = print_total_score_for_file(buffer, filename, importances_for_all_classes, result_for_file)
patterns_number = len(patterns_scores)
Expand Down
7 changes: 7 additions & 0 deletions aibolit/ast_framework/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
from aibolit.ast_framework.ast_node_type import ASTNodeType # noqa: F401
from aibolit.ast_framework.ast_node import ASTNode # noqa: F401
from aibolit.ast_framework.ast import AST # noqa: F401

# register all standard computed fields from 'computed_fields_catalog'
from aibolit.ast_framework.computed_fields_catalog.standard_fields import (
register_standard_computed_properties,
)

register_standard_computed_properties()

0 comments on commit 09fbc8f

Please sign in to comment.