Skip to content

Commit

Permalink
Merge pull request #163 from rapidpro/rapidpro_api_v2_fix_3
Browse files Browse the repository at this point in the history
Use batch size of 99 until limit is fixed on RapidPro API side
  • Loading branch information
rowanseymour committed Oct 5, 2016
2 parents bb90d40 + ba6e8d2 commit 296cecd
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
3 changes: 2 additions & 1 deletion casepro/backend/rapidpro.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,8 @@ class RapidProBackend(BaseBackend):
"""
RapidPro instance as a backend
"""
BATCH_SIZE = 100
# TODO reset to 100 when limit is fixed on RapidPro side
BATCH_SIZE = 99

@staticmethod
def _get_client(org):
Expand Down
4 changes: 2 additions & 2 deletions casepro/backend/tests/test_rapidpro.py
Original file line number Diff line number Diff line change
Expand Up @@ -762,8 +762,8 @@ def test_archive_messages(self, mock_archive_messages):

# check messages were batched
mock_archive_messages.assert_has_calls([
call(messages=[m for m in range(0, 100)]),
call(messages=[m for m in range(100, 105)])
call(messages=[m for m in range(0, 99)]),
call(messages=[m for m in range(99, 105)])
])

# check doesn't blow up if passed something other than a list like a set
Expand Down

0 comments on commit 296cecd

Please sign in to comment.