Skip to content

Commit

Permalink
Batch calls to some rapidpro endpoints to avoid exceeding list size
Browse files Browse the repository at this point in the history
  • Loading branch information
rowanseymour committed Oct 5, 2016
1 parent b706bc8 commit 814a12c
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 23 deletions.
40 changes: 21 additions & 19 deletions casepro/backend/rapidpro.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import six

from dash.utils import is_dict_equal
from dash.utils import is_dict_equal, chunks
from dash.utils.sync import BaseSyncer, sync_local_to_set, sync_local_to_changes
from django.utils.timezone import now

Expand Down Expand Up @@ -197,6 +197,8 @@ class RapidProBackend(BaseBackend):
"""
RapidPro instance as a backend
"""
BATCH_SIZE = 100

@staticmethod
def _get_client(org):
return org.get_temba_client(api_version=2)
Expand Down Expand Up @@ -305,38 +307,38 @@ def stop_runs(self, org, contact):
client.bulk_interrupt_contacts(contacts=[contact.uuid])

def label_messages(self, org, messages, label):
if messages:
client = self._get_client(org)
client.bulk_label_messages(messages=[m.backend_id for m in messages], label=label.uuid)
client = self._get_client(org)
for batch in chunks(messages, self.BATCH_SIZE):
client.bulk_label_messages(messages=[m.backend_id for m in batch], label=label.uuid)

def unlabel_messages(self, org, messages, label):
if messages:
client = self._get_client(org)
client.bulk_unlabel_messages(messages=[m.backend_id for m in messages], label=label.uuid)
client = self._get_client(org)
for batch in chunks(messages, self.BATCH_SIZE):
client.bulk_unlabel_messages(messages=[m.backend_id for m in batch], label=label.uuid)

def archive_messages(self, org, messages):
if messages:
client = self._get_client(org)
client.bulk_archive_messages(messages=[m.backend_id for m in messages])
client = self._get_client(org)
for batch in chunks(messages, self.BATCH_SIZE):
client.bulk_archive_messages(messages=[m.backend_id for m in batch])

def archive_contact_messages(self, org, contact):
client = self._get_client(org)
client.bulk_archive_contacts(contacts=[contact.uuid])

def restore_messages(self, org, messages):
if messages:
client = self._get_client(org)
client.bulk_restore_messages(messages=[m.backend_id for m in messages])
client = self._get_client(org)
for batch in chunks(messages, self.BATCH_SIZE):
client.bulk_restore_messages(messages=[m.backend_id for m in batch])

def flag_messages(self, org, messages):
if messages:
client = self._get_client(org)
client.bulk_label_messages(messages=[m.backend_id for m in messages], label_name=SYSTEM_LABEL_FLAGGED)
client = self._get_client(org)
for batch in chunks(messages, self.BATCH_SIZE):
client.bulk_label_messages(messages=[m.backend_id for m in batch], label_name=SYSTEM_LABEL_FLAGGED)

def unflag_messages(self, org, messages):
if messages:
client = self._get_client(org)
client.bulk_unlabel_messages(messages=[m.backend_id for m in messages], label_name=SYSTEM_LABEL_FLAGGED)
client = self._get_client(org)
for batch in chunks(messages, self.BATCH_SIZE):
client.bulk_unlabel_messages(messages=[m.backend_id for m in batch], label_name=SYSTEM_LABEL_FLAGGED)

def fetch_contact_messages(self, org, contact, created_after, created_before):
"""
Expand Down
12 changes: 8 additions & 4 deletions casepro/backend/tests/test_rapidpro.py
Original file line number Diff line number Diff line change
Expand Up @@ -755,12 +755,16 @@ def test_archive_messages(self, mock_archive_messages):

self.assertNotCalled(mock_archive_messages)

msg1 = self.create_message(self.unicef, 123, self.bob, "Hello")
msg2 = self.create_message(self.unicef, 234, self.bob, "Goodbye")
# create more messages than can archived in one call to the RapidPro API
msgs = [self.create_message(self.unicef, m, self.bob, "Hello %d" % (m + 1)) for m in range(105)]

self.backend.archive_messages(self.unicef, [msg1, msg2])
self.backend.archive_messages(self.unicef, msgs)

mock_archive_messages.assert_called_once_with(messages=[123, 234])
# check messages were batched
mock_archive_messages.assert_has_calls([
call(messages=[m for m in range(0, 100)]),
call(messages=[m for m in range(100, 105)])
])

@patch('dash.orgs.models.TembaClient2.bulk_archive_contacts')
def test_archive_contact_messages(self, mock_archive_contacts):
Expand Down

0 comments on commit 814a12c

Please sign in to comment.