Skip to content

Commit

Permalink
Fix batch option in benchmark script
Browse files Browse the repository at this point in the history
The group_iterable was converting each group into a tuple, hence
consuming the generator expression and triggering Redis requests.
This new version of group_iterable instead returns a generator of
generators, which get consumed in batches, one after the other so that
not all requests get sent at once.
  • Loading branch information
Alex Marandon committed Feb 27, 2015
1 parent f65d13a commit 47d5703
Showing 1 changed file with 10 additions and 7 deletions.
17 changes: 10 additions & 7 deletions tornadis/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import tornado


logging.basicConfig(level=logging.CRITICAL)
# logging.basicConfig(level=logging.CRITICAL)


def get_parameters():
Expand Down Expand Up @@ -39,13 +39,15 @@ def get_parameters():
return parser.parse_args()


def group_iterable(iterable, group_size):
it = iter(iterable)
def group_iterable(iterable, total_size, group_size):
processed_size = 0
while True:
chunk = tuple(itertools.islice(it, group_size))
if not chunk:
if processed_size >= total_size:
return
yield chunk
else:
chunk = itertools.islice(iterable, group_size)
processed_size += group_size
yield chunk


class Benchmark(object):
Expand All @@ -67,7 +69,8 @@ def multiple_set(self, client_number):
futures = (client.call("SET", "benchmark-key", self.value)
for _ in xrange(self.requests_per_client))
if self.params.batch_size:
batches = group_iterable(futures, self.params.batch_size)
batches = group_iterable(futures, self.requests_per_client,
self.params.batch_size)
for batch in itertools.imap(list, batches):
print "Send {} requests with client {}".format(len(batch),
client_number)
Expand Down

0 comments on commit 47d5703

Please sign in to comment.