Skip to content

Commit

Permalink
flake8 fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
thefab committed Feb 26, 2015
1 parent 3dacfc0 commit 49deff6
Showing 1 changed file with 32 additions and 19 deletions.
51 changes: 32 additions & 19 deletions tornadis/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,38 +12,46 @@


def get_parameters():
parser = argparse.ArgumentParser(description='Tornadis benchmarking utility', add_help=False)
parser = argparse.ArgumentParser(
description='Tornadis benchmarking utility', add_help=False)
parser.add_argument('--help', action='help')
parser.add_argument('-h', '--hostname', help="Server hostname (default 127.0.0.1)",
parser.add_argument('-h', '--hostname',
help="Server hostname (default 127.0.0.1)",
default="127.0.0.1")
parser.add_argument('-p', '--port', help="Server port (default 6379)", default=6379)
parser.add_argument('-p', '--port', help="Server port (default 6379)",
default=6379)
parser.add_argument('-a', '--password', help="Password for Redis Auth")
parser.add_argument('-c', '--clients', help="Number of parallel connections (default 5)",
parser.add_argument('-c', '--clients',
help="Number of parallel connections (default 5)",
type=int, default=5)
parser.add_argument('-n', '--requests', help="Total number of requests (default 100000)",
parser.add_argument('-n', '--requests',
help="Total number of requests (default 100000)",
type=int, default=10000)
parser.add_argument('-b', '--batch-size', help="Number of request to send in parallel",
parser.add_argument('-b', '--batch-size',
help="Number of request to send in parallel",
type=int, default=None)
parser.add_argument('-d', '--data-size', default=2,
help="Data size of SET/GET value in bytes (default 2)", type=int)
parser.add_argument('-d', '--data-size', default=2,
help="Data size of SET/GET value in bytes (default 2)",
type=int)
return parser.parse_args()


def group_iterable(iterable, group_size):
it = iter(iterable)
while True:
chunk = tuple(itertools.islice(it, group_size))
if not chunk:
return
yield chunk
chunk = tuple(itertools.islice(it, group_size))
if not chunk:
return
yield chunk


class Benchmark(object):

def __init__(self, params):
self.params = params
self.request_count = self.params.requests
self.requests_per_client = int(math.ceil(self.params.requests / float(self.params.clients)))
self.requests_per_client = int(math.ceil(self.params.requests /
float(self.params.clients)))
self.response_count = 0
self.value = '*' * self.params.data_size

Expand All @@ -58,19 +66,23 @@ def multiple_set(self, client_number):
if self.params.batch_size:
batches = group_iterable(futures, self.params.batch_size)
for batch in itertools.imap(list, batches):
print "Send {} requests with client {}".format(len(batch), client_number)
print "Send {} requests with client {}".format(len(batch),
client_number)
responses = yield batch
resp_count = len(responses)
print "Received {} responses with client {}".format(resp_count, client_number)
print "Received {} responses " \
"with client {}".format(resp_count, client_number)
self.response_count += resp_count
else:
print "Send {} requests with client {}".format(self.requests_per_client, client_number)
print "Send {} requests " \
"with client {}".format(self.requests_per_client,
client_number)
responses = yield list(futures)
resp_count = len(responses)
print "Received {} responses with client {}".format(resp_count, client_number)
print "Received {} responses with client {}".format(resp_count,
client_number)
self.response_count += resp_count


def stop_loop(self, future):
excep = future.exception()
if self.response_count == self.request_count:
Expand All @@ -83,7 +95,8 @@ def stop_loop(self, future):
def main():
params = get_parameters()
if params.requests % params.clients != 0:
print >> sys.stderr, "Number of requests must be a multiple of number of clients"
print >> sys.stderr, "Number of requests must be a multiple " \
"of number of clients"
sys.exit(-1)

loop = tornado.ioloop.IOLoop.instance()
Expand Down

0 comments on commit 49deff6

Please sign in to comment.