Skip to content

Commit

Permalink
Merge pull request #819 from yandex/develop
Browse files Browse the repository at this point in the history
Release 1.12.6
  • Loading branch information
fomars committed Nov 25, 2019
2 parents 177f050 + f244ce4 commit 398b9f2
Show file tree
Hide file tree
Showing 22 changed files with 759 additions and 56 deletions.
4 changes: 2 additions & 2 deletions docs/conf.py
Expand Up @@ -52,9 +52,9 @@
# built documents.
#
# The short X.Y version.
version = '1.11'
version = '1.12'
# The full version, including alpha/beta/rc tags.
release = '1.11.2'
release = '1.12.6'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
33 changes: 22 additions & 11 deletions docs/config_reference.rst
Expand Up @@ -64,8 +64,8 @@ ShootExec
:nullable:
True

Influx
======
InfluxUploader
==============

``address`` (string)
--------------------
Expand All @@ -83,13 +83,9 @@ Influx
---------------------
*\- (no description). Default:* ``mydb``

``grafana_dashboard`` (string)
------------------------------
*\- (no description). Default:* ``tank-dashboard``

``grafana_root`` (string)
-------------------------
*\- (no description). Default:* ``http://localhost/``
``histograms`` (boolean)
------------------------
*\- (no description). Default:* ``False``

``labeled`` (boolean)
---------------------
Expand Down Expand Up @@ -266,11 +262,26 @@ Pandora

``expvar`` (boolean)
--------------------
*\- Toggle expvar monitoring. Default:* ``True``
*\- (no description). Default:* ``False``

``pandora_cmd`` (string)
------------------------
*\- Pandora executable path. Default:* ``pandora``
*\- Pandora executable path or link to it. Default:* ``pandora``

``report_file`` (string)
------------------------
*\- Pandora phout path (normally will be taken from pandora config). Default:* ``None``

:nullable:
True

``resource`` (dict)
-------------------
*\- dict with attributes for additional resources.*

``resources`` (list)
--------------------
*\- additional resources you need to download before test. Default:* ``[]``

Android
=======
Expand Down
2 changes: 1 addition & 1 deletion docs/tutorial.rst
Expand Up @@ -724,7 +724,7 @@ that. load.yaml:

.. code-block:: yaml
phantom:
phantom:
address: 203.0.113.1:80
load_profile:
load_type: rps
Expand Down
11 changes: 6 additions & 5 deletions setup.py
Expand Up @@ -2,16 +2,16 @@

setup(
name='yandextank',
version='1.12.5',
version='1.12.6',
description='a performance measurement tool',
longer_description='''
Yandex.Tank is a performance measurement and load testing automatization tool.
It uses other load generators such as JMeter, ab or phantom inside of it for
load generation and provides a common configuration system for them and
analytic tools for the results they produce.
''',
maintainer='Alexey Lavrenuke (load testing)',
maintainer_email='direvius@yandex-team.ru',
maintainer='Yandex Load Team',
maintainer_email='load@yandex-team.ru',
url='http://yandex.github.io/yandex-tank/',
namespace_packages=["yandextank", "yandextank.plugins"],
packages=find_packages(exclude=["tests", "tmp", "docs", "data"]),
Expand All @@ -20,8 +20,8 @@
'psutil>=1.2.1', 'requests>=2.5.1', 'paramiko>=1.16.0',
'pandas==0.24.2', 'numpy==1.15.4', 'future>=0.16.0',
'pip>=8.1.2',
'pyyaml>=4.2b1', 'cerberus==1.2', 'influxdb>=5.0.0', 'netort>=0.6.10',
'retrying>=1.3.3', 'pytest-runner'
'pyyaml>=4.2b1', 'cerberus==1.3.1', 'influxdb>=5.0.0', 'netort>=0.7.4',
'retrying>=1.3.3', 'pytest-runner', 'typing'
],
setup_requires=[
],
Expand Down Expand Up @@ -61,6 +61,7 @@
'yandextank.plugins.Console': ['config/*'],
'yandextank.plugins.DataUploader': ['config/*'],
'yandextank.plugins.InfluxUploader': ['config/*'],
'yandextank.plugins.OpenTSDBUploader': ['config/*'],
'yandextank.plugins.JMeter': ['config/*'],
'yandextank.plugins.JsonReport': ['config/*'],
'yandextank.plugins.Pandora': ['config/*'],
Expand Down
1 change: 1 addition & 0 deletions yandextank/config_converter/converter.py
Expand Up @@ -49,6 +49,7 @@ def parse_package_name(package_path):
'JsonReport': 'json_report|jsonreport',
'Pandora': 'pandora',
'InfluxUploader': 'influx',
'OpenTSDBUploader': 'opentsdb',

}

Expand Down
4 changes: 4 additions & 0 deletions yandextank/core/config/00-base.yaml
Expand Up @@ -40,3 +40,7 @@ overload:
influx:
enabled: false
package: yandextank.plugins.InfluxUploader
opentsdb:
enabled: false
package: yandextank.plugins.OpenTSDBUploader

12 changes: 10 additions & 2 deletions yandextank/plugins/Autostop/plugin.py
Expand Up @@ -20,6 +20,7 @@ def __init__(self, core, cfg, name):
AggregateResultListener.__init__(self)

self.cause_criterion = None
self.imbalance_rps = 0
self._criterions = {}
self.custom_criterions = []
self.counting = []
Expand Down Expand Up @@ -112,9 +113,16 @@ def on_aggregated_data(self, data, stat):
if not self.cause_criterion:
for criterion_text, criterion in self._criterions.iteritems():
if criterion.notify(data, stat):
self.log.warning(
"Autostop criterion requested test stop: %s", criterion_text)
self.cause_criterion = criterion
if self.cause_criterion.cause_second:
self.imbalance_rps = int(self.cause_criterion.cause_second[1]["metrics"]["reqps"])
if not self.imbalance_rps:
self.imbalance_rps = int(
self.cause_criterion.cause_second[0]["overall"]["interval_real"]["len"])
self.core.publish('autostop', 'rps', self.imbalance_rps)
self.core.publish('autostop', 'reason', criterion.explain())
self.log.warning(
"Autostop criterion requested test stop on %d rps: %s", self.imbalance_rps, criterion_text)
open(self._stop_report_path, 'w').write(criterion_text)
self.core.add_artifact_file(self._stop_report_path)

Expand Down
9 changes: 1 addition & 8 deletions yandextank/plugins/DataUploader/plugin.py
Expand Up @@ -361,15 +361,8 @@ def post_process(self, rc):
logger.debug("No autostop plugin loaded", exc_info=True)

if autostop and autostop.cause_criterion:
rps = 0
if autostop.cause_criterion.cause_second:
rps = autostop.cause_criterion.cause_second[
1]["metrics"]["reqps"]
if not rps:
rps = autostop.cause_criterion.cause_second[0][
"overall"]["interval_real"]["len"]
self.lp_job.set_imbalance_and_dsc(
int(rps), autostop.cause_criterion.explain())
autostop.imbalance_rps, autostop.cause_criterion.explain())

else:
logger.debug("No autostop cause detected")
Expand Down
3 changes: 2 additions & 1 deletion yandextank/plugins/InfluxUploader/decoder.py
Expand Up @@ -49,7 +49,8 @@ def decode_monitoring(self, data):
{"host": host, "comment": host_data.get("comment")},
second_data["timestamp"],
{
metric: value
# cast int to float. avoid https://github.com/yandex/yandex-tank/issues/776
metric: float(value) if isinstance(value, int) else value
for metric, value in host_data["metrics"].iteritems()
}
)
Expand Down
65 changes: 65 additions & 0 deletions yandextank/plugins/InfluxUploader/tests/test_influxdb_decoder.py
@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
from uuid import uuid4
from yandextank.plugins.InfluxUploader.decoder import Decoder


class TestDecoder(object):
def test_metrics_cast(self):
test_uuid = str(uuid4())
tank_tag = 'test_tank_tag'
comment = 'test comment'
raw_metrics = {
'metric1': -123,
'metric2': -123.456,
'metric3': 123,
'metric4': 123.456,
'metric5': 0,
'metric6': -0.1,
'metric7': 0.1,
'metric8': 'down',
}
timestamp = 123456789
host = '127.0.0.1'
data = [
{
'data': {
host: {
'comment': comment,
'metrics': raw_metrics
}
},
'timestamp': timestamp
}
]
expected_metrics = {
'metric1': -123.0,
'metric2': -123.456,
'metric3': 123.0,
'metric4': 123.456,
'metric5': 0.0,
'metric6': -0.1,
'metric7': 0.1,
'metric8': 'down'
}

decoder = Decoder(tank_tag, test_uuid, {}, True, True)
result_points = decoder.decode_monitoring(data)

assert (len(result_points) == 1)
r_point = result_points[0]
# check other props
assert (r_point['time'] == timestamp)
assert (r_point['measurement'] == 'monitoring')
assert (r_point['tags']['comment'] == comment)
assert (r_point['tags']['host'] == host)
assert (r_point['tags']['tank'] == tank_tag)
assert (r_point['tags']['uuid'] == test_uuid)
# check metric cast
assert (len(r_point['fields']) == len(expected_metrics))
for metric, value in r_point['fields'].iteritems():
if metric not in expected_metrics:
assert False
if not isinstance(value, type(expected_metrics[metric])):
assert False
if not value == expected_metrics[metric]:
assert False
6 changes: 5 additions & 1 deletion yandextank/plugins/NeUploader/config/schema.yaml
Expand Up @@ -9,4 +9,8 @@ db_name:
test_name:
description: test name
type: string
default: my test
default: my test
meta:
type: dict
keysrules:
forbidden: ['name', 'raw', 'aggregate', 'group', 'host', 'type']
49 changes: 28 additions & 21 deletions yandextank/plugins/NeUploader/plugin.py
@@ -1,7 +1,7 @@
import logging

import pandas
from netort.data_manager import DataSession
from netort.data_manager import DataSession, thread_safe_property

from yandextank.plugins.Phantom.reader import string_to_df_microsec
from yandextank.common.interfaces import AbstractPlugin,\
Expand Down Expand Up @@ -40,22 +40,20 @@ def start_test(self):
self.is_test_finished = lambda: -1
self.reader = []

@property
@thread_safe_property
def col_map(self):
if self._col_map is None:
self._col_map = {
'interval_real': self.data_session.new_true_metric,
'connect_time': self.data_session.new_true_metric,
'send_time': self.data_session.new_true_metric,
'latency': self.data_session.new_true_metric,
'receive_time': self.data_session.new_true_metric,
'interval_event': self.data_session.new_true_metric,
'net_code': self.data_session.new_event_metric,
'proto_code': self.data_session.new_event_metric
}
return self._col_map

@property
return {
'interval_real': self.data_session.new_true_metric,
'connect_time': self.data_session.new_true_metric,
'send_time': self.data_session.new_true_metric,
'latency': self.data_session.new_true_metric,
'receive_time': self.data_session.new_true_metric,
'interval_event': self.data_session.new_true_metric,
'net_code': self.data_session.new_event_metric,
'proto_code': self.data_session.new_event_metric
}

@thread_safe_property
def data_session(self):
"""
:rtype: DataSession
Expand All @@ -64,12 +62,19 @@ def data_session(self):
self._data_session = DataSession({'clients': self.clients_cfg},
test_start=self.core.status['generator']['test_start'] * 10**6)
self.add_cleanup(self._cleanup)
self._data_session.update_job({'name': self.cfg.get('test_name'),
'__type': 'tank'})
self._data_session.update_job(dict({'name': self.cfg.get('test_name'),
'__type': 'tank'},
**self.cfg.get('meta', {})))
return self._data_session

def _cleanup(self):
uploader_metainfo = self.map_uploader_tags(self.core.status.get('uploader'))
if self.core.status.get('autostop'):
autostop_rps = self.core.status.get('autostop', {}).get('rps', 0)
autostop_reason = self.core.status.get('autostop', {}).get('reason', '')
self.log.warning('Autostop: %s %s', autostop_rps, autostop_reason)
uploader_metainfo.update({'autostop_rps': autostop_rps, 'autostop_reason': autostop_reason})
uploader_metainfo.update(self.cfg.get('meta', {}))
self.data_session.update_job(uploader_metainfo)
self.data_session.close(test_end=self.core.status.get('generator', {}).get('test_end', 0) * 10**6)

Expand Down Expand Up @@ -113,7 +118,8 @@ def get_metric_obj(self, col, case):
raw=False,
aggregate=True,
source='tank',
importance='high' if col in self.importance_high else ''
importance='high' if col in self.importance_high else '',
**self.cfg.get('meta', {})
) for col, constructor in self.col_map.items()
}
self.metrics_objs[case] = case_metrics
Expand Down Expand Up @@ -148,7 +154,8 @@ def upload_monitoring(self, data):
self.monitoring_metrics[metric_name] = self.data_session.new_true_metric(name,
group=group,
host=panel,
type='monitoring')
type='monitoring',
**self.cfg.get('meta', {}))
self.monitoring_metrics[metric_name].put(df)

@staticmethod
Expand Down Expand Up @@ -184,6 +191,6 @@ def map_uploader_tags(self, uploader_tags):
return {}
else:
meta_tags_names = ['component', 'description', 'name', 'person', 'task', 'version', 'lunapark_jobno']
meta_tags = {key: uploader_tags.get(key, self.cfg.get(key, '')) for key in meta_tags_names}
meta_tags = {key: uploader_tags.get(key) for key in meta_tags_names if key in uploader_tags}
meta_tags.update({k: v if v is not None else '' for k, v in uploader_tags.get('meta', {}).items()})
return meta_tags
1 change: 1 addition & 0 deletions yandextank/plugins/OpenTSDBUploader/__init__.py
@@ -0,0 +1 @@
from .plugin import Plugin # noqa:F401
1 change: 1 addition & 0 deletions yandextank/plugins/OpenTSDBUploader/client/__init__.py
@@ -0,0 +1 @@
from .client import OpenTSDBClient # noqa:F401

0 comments on commit 398b9f2

Please sign in to comment.