Skip to content

Commit

Permalink
Increment offset by length of results in iter_all
Browse files Browse the repository at this point in the history
The response plays API has been returning null values for offset and limit in its body. By trusting these values from the API, the client set itself up for a TypeError (see #61).

This fixes the issue by incrementing offset by the length of the array of results returned by the index instead of the value reported by the API.
  • Loading branch information
Deconstrained committed May 13, 2021
1 parent 232cf20 commit 5347288
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 15 deletions.
5 changes: 3 additions & 2 deletions pdpyras.py
Original file line number Diff line number Diff line change
Expand Up @@ -1138,8 +1138,9 @@ def iter_all(self, path, params=None, paginate=True, page_size=None,
self.log.debug("Stopping iteration on endpoint \"%s\"; API "
"responded with invalid JSON.", path)
break
if 'limit' in response:
data['limit'] = response['limit']
#if 'limit' in response:
# data['limit'] = response['limit']
data['limit'] = len(response[r_name])
more = False
total_count = None
if paginate:
Expand Down
44 changes: 31 additions & 13 deletions test_pdpyras.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,18 +195,19 @@ def test_find(self, iter_all):
def test_iter_all(self, get):
sess = pdpyras.APISession('token')
sess.log = MagicMock() # Or go with self.debug(sess) to see output
page = lambda n, t: {
page = lambda n, t, l: {
'users': [{'id':i} for i in range(10*n, 10*(n+1))],
'total': t,
'more': n<(t/10)-1
'more': n<(t/10)-1,
'limit': l
}
iter_param = lambda p: json.dumps({
'limit':10, 'total': True, 'offset': 0
})
get.side_effect = [
Response(200, json.dumps(page(0, 30))),
Response(200, json.dumps(page(1, 30))),
Response(200, json.dumps(page(2, 30))),
Response(200, json.dumps(page(0, 30, 10))),
Response(200, json.dumps(page(1, 30, 10))),
Response(200, json.dumps(page(2, 30, 10))),
]
weirdurl='https://api.pagerduty.com/users?number=1'
hook = MagicMock()
Expand All @@ -221,33 +222,50 @@ def test_iter_all(self, get):
],
)
hook.assert_any_call({'id':14}, 15, 30)
get.reset_mock()

# Test stopping iteration on non-success status
get.reset_mock()
error_encountered = [
Response(200, json.dumps(page(0, 50))),
Response(200, json.dumps(page(1, 50))),
Response(200, json.dumps(page(2, 50))),
Response(400, json.dumps(page(3, 50))), # break
Response(200, json.dumps(page(4, 50))),
Response(200, json.dumps(page(0, 50, 10))),
Response(200, json.dumps(page(1, 50, 10))),
Response(200, json.dumps(page(2, 50, 10))),
Response(400, json.dumps(page(3, 50, 10))), # break
Response(200, json.dumps(page(4, 50, 10))),
]
get.side_effect = copy.deepcopy(error_encountered)
sess.raise_if_http_error = False
new_items = list(sess.iter_all(weirdurl))
self.assertEqual(items, new_items)
get.reset_mock()

# Now test raising an exception:
get.reset_mock()
get.side_effect = copy.deepcopy(error_encountered)
sess.raise_if_http_error = True
self.assertRaises(pdpyras.PDClientError, list, sess.iter_all(weirdurl))
get.reset_mock()

# Test reaching the iteration limit:
get.reset_mock()
bigiter = sess.iter_all('log_entries', page_size=100,
params={'offset': '9901'})
self.assertRaises(StopIteration, next, bigiter)

# Test (temporary, until underlying issue in the API is resolved) using
# the result count to increment offset instead of `limit` reported in
# API response
get.reset_mock()
iter_param = lambda p: json.dumps({
'limit':10, 'total': True, 'offset': 0
})
get.side_effect = [
Response(200, json.dumps(page(0, 30, None))),
Response(200, json.dumps(page(1, 30, 42))),
Response(200, json.dumps(page(2, 30, 2020))),
]
weirdurl='https://api.pagerduty.com/users?number=1'
hook = MagicMock()
items = list(sess.iter_all(weirdurl, item_hook=hook, total=True, page_size=10))
self.assertEqual(30, len(items))

@patch.object(pdpyras.APISession, 'rpost')
@patch.object(pdpyras.APISession, 'iter_all')
def test_persist(self, iterator, creator):
Expand Down

0 comments on commit 5347288

Please sign in to comment.