Skip to content

Commit

Permalink
actually tuples will be better so they can be used as dict keys
Browse files Browse the repository at this point in the history
  • Loading branch information
wimglenn committed Aug 30, 2018
1 parent b38fd96 commit 53f7479
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
12 changes: 6 additions & 6 deletions tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,9 @@ def test_grouper(iterable, n, fillvalue, result):


@pytest.mark.parametrize('iterable,chunk_size,overlap,result', [
('1234567', 3, 0, [['1', '2', '3'], ['4', '5', '6'], ['7']]),
('123456', 3, 0, [['1', '2', '3'], ['4', '5', '6']]),
('123456', 4, 2, [['1', '2', '3', '4'], ['3', '4', '5', '6']]),
('1234567', 3, 0, [('1', '2', '3'), ('4', '5', '6'), ('7',)]),
('123456', 3, 0, [('1', '2', '3'), ('4', '5', '6')]),
('123456', 4, 2, [('1', '2', '3', '4'), ('3', '4', '5', '6')]),
('', 3, 0, []),
])
def test_chunks(iterable, chunk_size, overlap, result):
Expand All @@ -105,6 +105,6 @@ def test_chunks_doesnt_get_stuck_due_to_big_overlap():
def test_chunks_from_infinite_generator():
gen = iter(int, 1)
g = chunks(gen, chunk_size=5)
assert next(g) == [0, 0, 0, 0, 0]
assert next(g) == [0, 0, 0, 0, 0]
assert next(g) == [0, 0, 0, 0, 0]
assert next(g) == (0, 0, 0, 0, 0)
assert next(g) == (0, 0, 0, 0, 0)
assert next(g) == (0, 0, 0, 0, 0)
4 changes: 2 additions & 2 deletions wimpy/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,11 @@ def chunks(iterable, chunk_size=3, overlap=0):
for i in range(chunk_size):
queue.append(next(it))
while True:
yield list(queue)
yield tuple(queue)
# after yielding a chunk, get enough elements for the next chunk
for i in range(chunk_size - overlap):
queue.append(next(it))
except StopIteration:
# if the iterator is exhausted, yield any remaining elements
if i > 0:
yield list(queue)[-i-overlap:]
yield tuple(queue)[-i-overlap:]

0 comments on commit 53f7479

Please sign in to comment.