Skip to content

Commit

Permalink
Merge pull request #621 from yandex/release
Browse files Browse the repository at this point in the history
Release 1.10.1
  • Loading branch information
fomars committed Jul 12, 2018
2 parents 1024c72 + 43992e9 commit bb4dfb2
Show file tree
Hide file tree
Showing 24 changed files with 15,260 additions and 88 deletions.
9 changes: 0 additions & 9 deletions Dockerfile-TankBAT

This file was deleted.

2 changes: 1 addition & 1 deletion Dockerfile-test
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM ubuntu:16.04
FROM load/yandex-tank-pip:testing
WORKDIR /yandextank
RUN apt-get update && \
apt-get install -y python-pip
Expand Down
3 changes: 1 addition & 2 deletions docs/core_and_modules.rst
Original file line number Diff line number Diff line change
Expand Up @@ -419,8 +419,7 @@ Artifacts
Multi-tests
-----------

To make several simultaneous tests with phantom, add proper amount of sections to special section ``multi`` for ``phantom``
with names ``phantom-N``. All subtests are executed in parallel. Multi-test ends as soon as one subtest stops.
To make several simultaneous tests with phantom, add proper amount of sections to special section ``multi`` for ``phantom``. All subtests are executed in parallel. Multi-test ends as soon as one subtest stops.

Example:

Expand Down
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name='yandextank',
version='1.10.0',
version='1.10.1',
description='a performance measurement tool',
longer_description='''
Yandex.Tank is a performance measurement and load testing automatization tool.
Expand All @@ -16,11 +16,12 @@
namespace_packages=["yandextank", "yandextank.plugins"],
packages=find_packages(exclude=["tests", "tmp", "docs", "data"]),
install_requires=[
'cryptography>=2.2.1', 'pyopenssl==18.0.0',
'psutil>=1.2.1', 'requests>=2.5.1', 'paramiko>=1.16.0',
'pandas>=0.18.0', 'numpy>=1.12.1', 'future>=0.16.0',
'pip>=8.1.2',
'pyyaml>=3.12', 'cerberus==1.2', 'influxdb>=5.0.0',
'netort>=0.0.11', 'pyopenssl>=17.5.0'
'netort>=0.0.11',
],
setup_requires=[
'pytest-runner', 'flake8',
Expand Down
9 changes: 5 additions & 4 deletions yandextank/aggregator/aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,15 +146,16 @@ def __init__(self, source, config, verbose_histogram):
self.groupby = 'tag'

def __iter__(self):
for ts, chunk in self.source:
for ts, chunk, rps in self.source:
by_tag = list(chunk.groupby([self.groupby]))
start_time = time.time()
result = {
"ts": ts,
"tagged":
u"ts": ts,
u"tagged":
{tag: self.worker.aggregate(data)
for tag, data in by_tag},
"overall": self.worker.aggregate(chunk),
u"overall": self.worker.aggregate(chunk),
u"counted_rps": rps
}
logger.debug(
"Aggregation time: %.2fms", (time.time() - start_time) * 1000)
Expand Down
12 changes: 8 additions & 4 deletions yandextank/aggregator/chopper.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,12 @@ def __iter__(self):
else:
self.cache[group_key] = group_data
while len(self.cache) > self.cache_size:
key = min(self.cache.keys())
yield (key, self.cache.pop(key, None))
yield self.__get_result()
while self.cache:
key = min(self.cache.keys())
yield (key, self.cache.pop(key, None))
yield self.__get_result()

def __get_result(self):
key = min(self.cache.keys())
result = self.cache.pop(key, None)
cardinality = len(result) if result is not None else 0
return (key, result, cardinality)
15 changes: 10 additions & 5 deletions yandextank/aggregator/tank_aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self, generator):
def load_config():
return json.loads(resource_string(__name__, 'config/phout.json').decode('utf8'))

def start_test(self):
def start_test(self, poll_period=1):
self.reader = self.generator.get_reader()
self.stats_reader = self.generator.get_stats_reader()
aggregator_config = self.load_config()
Expand All @@ -64,15 +64,15 @@ def start_test(self):
if self.reader and self.stats_reader:
pipeline = Aggregator(
TimeChopper(
DataPoller(source=self.reader, poll_period=1),
DataPoller(source=self.reader, poll_period=poll_period),
cache_size=3),
aggregator_config,
verbose_histogram)
self.drain = Drain(pipeline, self.results)
self.drain.start()
self.stats_drain = Drain(
Chopper(DataPoller(
source=self.stats_reader, poll_period=1)),
source=self.stats_reader, poll_period=poll_period)),
self.stats)
self.stats_drain.start()
else:
Expand Down Expand Up @@ -105,7 +105,9 @@ def _collect_data(self, end=False):
else:
self.stat_cache[ts] = item
if end and len(self.data_cache) > 0:
logger.info('Timestamps without stats:')
for ts, data_item in sorted(self.data_cache.items(), key=lambda i: i[0]):
logger.info(ts)
self.__notify_listeners(data_item, StatsReader.stats_item(ts, 0, 0))

def is_test_finished(self):
Expand All @@ -118,11 +120,14 @@ def end_test(self, retcode):
self.reader.close()
if self.stats_reader:
self.stats_reader.close()
if self.drain:
self.drain.wait()
self.stats_drain.wait()
self._collect_data(end=True)
if self.drain:
self.drain.join()
if self.stats_drain:
self.stats_drain.join()
self._collect_data(end=True)

return retcode

def add_result_listener(self, listener):
Expand Down

0 comments on commit bb4dfb2

Please sign in to comment.