Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
MagielBruntink committed Feb 8, 2021
2 parents 9f10128 + 8a736b7 commit 4fb94b7
Show file tree
Hide file tree
Showing 8 changed files with 91 additions and 53 deletions.
21 changes: 16 additions & 5 deletions entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

plugin_name = 'RapidPlugin'
plugin_description = 'A FASTEN plug-in to populate risk related metadata for a product.'
plugin_version = '1.1.3'
plugin_version = '1.2.0'


def get_args_parser():
Expand All @@ -34,15 +34,19 @@ def get_args_parser():
help="Kafka topic to consume from.")

args_parser.add_argument('--produce_topic', type=str,
default='fasten.RapidPlugin.callable.out',
help="Kafka topic to produce to.")
default='fasten.RapidPlugin.out',
help="Kafka topic to produce revision-level message to.")

args_parser.add_argument('--produce_callable_topic', type=str,
default='fasten.RapidPlugin.callable.out',
help="Kafka topic to produce callable-level messages to.")

args_parser.add_argument('--err_topic', type=str,
default='fasten.RapidPlugin.callable.err',
default='fasten.RapidPlugin.err',
help="Kafka topic to write errors to.")

args_parser.add_argument('--log_topic', type=str,
default='fasten.RapidPlugin.callable.log',
default='fasten.RapidPlugin.log',
help="Kafka topic to write logs to.")

args_parser.add_argument('--bootstrap_servers', type=str,
Expand All @@ -64,6 +68,11 @@ def get_args_parser():
args_parser.add_argument('--sources_dir', type=str,
default='src',
help="Base directory for temporary storing downloaded source code.")

args_parser.add_argument('--max_log_message_width', type=int,
default=320,
help="Maximum number of characters before a log message will be truncated.")

return args_parser


Expand All @@ -72,12 +81,14 @@ def get_config(args):
c.add_config_value('bootstrap_servers', args.bootstrap_servers)
c.add_config_value('consume_topic', args.consume_topic)
c.add_config_value('produce_topic', args.produce_topic)
c.add_config_value('produce_callable_topic', args.produce_callable_topic)
c.add_config_value('err_topic', args.err_topic)
c.add_config_value('log_topic', args.log_topic)
c.add_config_value('group_id', args.group_id)
c.add_config_value('consumption_delay_sec', args.consumption_delay_sec)
c.add_config_value('consumer_timeout_ms', args.consumer_timeout_ms)
c.add_config_value('sources_dir', args.sources_dir)
c.add_config_value('max_log_message_width', args.max_log_message_width)
return c


Expand Down
11 changes: 6 additions & 5 deletions integration_tests/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def mock_out():
def mock_log():
mock = MockConsumer('MockConsumerLog',
'localhost:9092',
'fasten.RapidPlugin.callable.log')
'fasten.RapidPlugin.log')
mock.skip_messages()
yield mock
mock.free_resource()
Expand All @@ -51,7 +51,7 @@ def mock_log():
def mock_err():
mock = MockConsumer('MockConsumerErr',
'localhost:9092',
'fasten.RapidPlugin.callable.err')
'fasten.RapidPlugin.err')
mock.skip_messages()
yield mock
mock.free_resource()
Expand Down Expand Up @@ -111,14 +111,15 @@ def plugin_run(mock_in, mock_out, mock_log, mock_err,
"commitTag": "1.0.0"
},
{
"fasten.RepoCloner.out": {
"payload":
{
"input" : {
"input": {
"payload": {
"forge": "debian",
"product": "d1",
"version": "1.0.0",
"sourcePath": "/home/plugin/rapidplugin/tests/resources/debian/d1"
}
}
}
},
{
Expand Down
22 changes: 11 additions & 11 deletions rapidplugin/analysis/lizard_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,17 +39,17 @@ def analyze(self, payload):
version = payload['version']
with MavenUtils.get_source_path(payload, self.base_dir) as path:
package = LizardPackage(forge, product, version, str(path))
metadata = package.metadata()
for function in package.functions():
m = {}
m.update(metadata)
m.update(function.metadata())
m.update(function.metrics())
old_f = m['filename']
new_f = KafkaUtils.relativize_filename(old_f)
m.update({'filename': new_f})
out_payloads.append(m)
logger.debug("callable: {}".format(m) + '\n')
metadata = package.metadata()
for function in package.functions():
m = {}
m.update(metadata)
m.update(function.metadata())
m.update(function.metrics())
old_f = m['filename']
new_f = KafkaUtils.relativize_filename(old_f, str(path))
m.update({'filename': new_f})
out_payloads.append(m)
logger.debug("callable: {}".format(m) + '\n')
return out_payloads


Expand Down
2 changes: 1 addition & 1 deletion rapidplugin/domain/package.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ def metadata(self):
"filename": self.filename,
"callable_name": self.name,
"callable_long_name": self.long_name,
"callable_parameters": self.parameters,
"start_line": self.start_line,
"end_line": self.end_line
}
Expand All @@ -179,7 +180,6 @@ def metrics(self):
"nloc": self.nloc,
"complexity": self.complexity,
"token_count": self.token_count,
"parameters": self.parameters,
"length": self.length,
"parameter_count": self.parameter_count
}
Expand Down
23 changes: 15 additions & 8 deletions rapidplugin/rapid_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
#

import datetime
import textwrap
from time import sleep
from fasten.plugins.kafka import KafkaPluginNonBlocking
from rapidplugin.analysis.lizard_analyzer import LizardAnalyzer
Expand All @@ -34,12 +35,14 @@ def __init__(self, name, version, description, plugin_config):
super().__init__(self.plugin_config.get_config_value('bootstrap_servers'))
self.consume_topic = self.plugin_config.get_config_value('consume_topic')
self.produce_topic = self.plugin_config.get_config_value('produce_topic')
self.produce_callable_topic = self.plugin_config.get_config_value('produce_callable_topic')
self.log_topic = self.plugin_config.get_config_value('log_topic')
self.error_topic = self.plugin_config.get_config_value('err_topic')
self.group_id = self.plugin_config.get_config_value('group_id')
self.sources_dir = self.plugin_config.get_config_value('sources_dir')
self.consumer_timeout_ms = self.plugin_config.get_config_value('consumer_timeout_ms')
self.consumption_delay_sec = self.plugin_config.get_config_value('consumption_delay_sec')
self.max_log_message_width = self.plugin_config.get_config_value('max_log_message_width')
self.set_consumer_with_retry()
self.set_producer_with_retry()

Expand Down Expand Up @@ -104,8 +107,7 @@ def consume(self, record):
Arguments:
record (JSON): message from self.consume_topic
'''
record = KafkaUtils.extract_from_sync(record)
payload = record['payload'] if 'payload' in record else record
payload = KafkaUtils.extract_payload_from_metadata_db_ext_topic(record)
in_payload = KafkaUtils.tailor_input(payload)
try:
KafkaUtils.validate_message(payload)
Expand All @@ -117,7 +119,7 @@ def consume(self, record):

def produce(self, in_payload):
'''
Produces quality analysis results to the produce_topic. A separate
Produces quality analysis results to the produce_topics. A separate
message will be emitted for each payload generated by the analyzer.
Arguments:
Expand All @@ -126,17 +128,22 @@ def produce(self, in_payload):
try:
analyzer = LizardAnalyzer(self.sources_dir)
out_payloads = analyzer.analyze(in_payload)
for out_payload in out_payloads:
self.produce_payload(in_payload, out_payload)
self.produce_revision(in_payload, out_payloads)
self.handle_success(in_payload,
"Quality analysis results produced.")
except Exception as e:
self.handle_failure(in_payload,
"Quality analysis failed for payload.", str(e))

def produce_payload(self, in_payload, out_payload):
def produce_revision(self, in_payload, out_payloads):
for out_payload in out_payloads:
self.produce_callable(in_payload, out_payload)
out_message = self.create_message(in_payload, {"callables_produced": len(out_payloads)})
self.emit_message(self.produce_topic, out_message, "[SUCCESS]", out_message)

def produce_callable(self, in_payload, out_payload):
out_message = self.create_message(in_payload, {"payload": out_payload})
self.emit_message(self.produce_topic, out_message, "[SUCCESS]",
self.emit_message(self.produce_callable_topic, out_message, "[SUCCESS]",
out_message)

def handle_failure(self, in_payload, failure, error):
Expand Down Expand Up @@ -172,7 +179,7 @@ def handle_success(self, in_payload, success):

def log(self, message):
super().log("{}: {}".format(
str(datetime.datetime.now()), message
str(datetime.datetime.now()), textwrap.shorten(message, width=self.max_log_message_width)
))

def err(self, error):
Expand Down
10 changes: 5 additions & 5 deletions rapidplugin/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ def test_tailor_input(in_payload, out_payload):
tailored = KafkaUtils.tailor_input(in_payload)
assert tailored == out_payload

@pytest.mark.parametrize('old, new', [
('/private/var/folders/wr/qmdcbfsj4v98v8rwb11zjr5c0000gn/T/pytest-of-cgao/pytest-7/sources0/tmppsmmokh_/d1.c', 'd1.c')
@pytest.mark.parametrize('old, prefix, new', [
('/abs_path/rel_dir/d1.c', '/abs_path', 'rel_dir/d1.c')
])
def test_relativize_filename(old, new):
new_file_name = KafkaUtils.relativize_filename(old)
assert new_file_name == new
def test_relativize_filename(old, prefix, new):
new_file_name = KafkaUtils.relativize_filename(old, prefix)
assert Path(new_file_name) == Path(new)
53 changes: 36 additions & 17 deletions rapidplugin/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
#

from zipfile import ZipFile
from pathlib import Path
from pathlib import Path, PurePath
from git import Repo
from svn.local import LocalClient
import requests
Expand Down Expand Up @@ -42,10 +42,9 @@ def get_source_path(payload, base_dir):
if not base_dir.exists():
base_dir.mkdir(parents=True)
if payload['forge'] == "mvn":
source_path = MavenUtils.get_source_mvn(payload, base_dir)
return MavenUtils.get_source_mvn(payload, base_dir)
else:
source_path = MavenUtils.get_source_other(payload, base_dir)
return source_path
return MavenUtils.get_source_other(payload, base_dir)

@staticmethod
def get_source_mvn(payload, base_dir):
Expand All @@ -59,8 +58,7 @@ def get_source_mvn(payload, base_dir):
repo_path = payload['repoPath']
repo_type = payload['repoType']
commit_tag = payload['commitTag']
source_path = MavenUtils.checkout_version(base_dir, repo_path=repo_path, repo_type=repo_type, version_tag=commit_tag)
return source_path
return MavenUtils.checkout_version(base_dir, repo_path=repo_path, repo_type=repo_type, version_tag=commit_tag)

@staticmethod
def get_source_other(payload, base_dir):
Expand All @@ -70,14 +68,12 @@ def get_source_other(payload, base_dir):
assert source_path != "", \
f"Cannot get source code for '{payload['product']}:{payload['version']}', empty 'sourcePath."
assert os.path.isabs(source_path), "sourcePath: '{}' is not an absolute path!".format(source_path)
source_path = MavenUtils.copy_source(payload['sourcePath'], base_dir)
return source_path
return MavenUtils.copy_source(source_path, base_dir)

@staticmethod
def copy_source(source_path, base_dir):
tmp = TemporaryDirectory(dir=base_dir)
tmp_path = Path(tmp.name)
shutil.copytree(source_path, tmp_path, dirs_exist_ok=True)
shutil.copytree(source_path, tmp.name, dirs_exist_ok=True)
return tmp

@staticmethod
Expand Down Expand Up @@ -184,7 +180,8 @@ def tailor_input(payload):
"depset": [],
"build_depset": [],
"undeclared_depset": [],
"functions": {}
"functions": {},
"dependencyData" : {}
}
for key in tailor.keys():
if key in payload:
Expand All @@ -204,14 +201,36 @@ def extract_from_sync(payload):
return extract

@staticmethod
def relativize_filename(filename):
def extract_payload_from_metadata_db_ext_topic(received):
"""
Extract content of MetaDataDB{Java|C|Python}Extension.out topic.
:param payload: payload of MetaDataDB{Java|C|Python}Extension.out, see
https://github.com/fasten-project/fasten/wiki/Kafka-Topics#fastenmetadatadbextension
:return: extracted payload
"""
try: # MetaDataDBJavaExtension.out
received['input']['input']['payload']['forge']
return received['input']['input']['payload']
except KeyError: pass
try: # MetaDataDB{Python|C}Extension.out
received['input']['payload']['forge']
return received['input']['payload']
except KeyError: pass
try:
received['payload']['forge']
return received['payload']
except KeyError: pass
return received

@staticmethod
def relativize_filename(filename, prefix):
"""
Extract the relative path of the source code file.
:param filename: absolute path included by Lizard tool,
e.g. 'work_directory/tmppsmmokh_/d1.c'
e.g. '/abs_path/rel_path/d1.c'
:param prefix: the prefix path to remove to make the path relative
:return: filename relative to the temporal source directory,
e.g. 'd1.c'
e.g. 'rel_path/d1.c'
"""
regex = re.compile('(/tmp).{8}/')
return regex.split(filename)[2]
p = PurePath(filename)
return str(p.relative_to(prefix))
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

setup(
name='quality-analyzer',
version='1.1.3',
version='1.2.0',
description='FASTEN RAPID Plugin',
long_description=long_description,
long_description_content_type='text/markdown',
Expand Down

0 comments on commit 4fb94b7

Please sign in to comment.