Skip to content
This repository has been archived by the owner on Jun 12, 2018. It is now read-only.

Commit

Permalink
Merge branch 'develop' into feature/issue-979-contact-imports-treat-c…
Browse files Browse the repository at this point in the history
…reated-at-as-an-extra
  • Loading branch information
jerith committed Jun 25, 2014
2 parents eab3088 + d47c474 commit 305ffde
Show file tree
Hide file tree
Showing 13 changed files with 235 additions and 23 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ python:
node_js:
- "0.10"
env:
- VUMITEST_REDIS_DB=1 VUMIGO_TEST_DB=postgres
- VUMITEST_REDIS_DB=1 VUMIGO_TEST_DB=postgres VUMI_TEST_TIMEOUT=10
services:
- riak
- postgresql
Expand Down
3 changes: 3 additions & 0 deletions go/base/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ def get_inline_instances(self, request, obj=None):
return []
return super(GoUserAdmin, self).get_inline_instances(request, obj=obj)

# loginas form template
change_form_template = 'loginas/change_form.html'

# The forms to add and change user instances
inlines = (UserProfileInline,)

Expand Down
24 changes: 24 additions & 0 deletions go/base/fixtures/sample-contacts-with-headers-and-extra-fields.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name,surname,msisdn
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Name 1,Surname 1,+27761234561
Name 2,Surname 2,+27761234562,baz
Name 3,Surname 3,+27761234563,foo,bar
Name 4,Surname 4,+27761234564
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name,surname,msisdn
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Extra rows,to get past dialect sniffing,+27761234560
Name 1,Surname 1,+27761234561
Name 2,Surname 2
+27761234563
Name 4,Surname 4,+27761234564
12 changes: 8 additions & 4 deletions go/base/tests/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,13 @@ def patch_settings(self, **kwargs):

@proxyable
def make_django_user(self, email='user@domain.com', password='password',
first_name="Test", last_name="User"):
user = get_user_model().objects.create_user(
email=email, password=password)
first_name="Test", last_name="User", superuser=False):
if superuser:
user = get_user_model().objects.create_superuser(
email=email, password=password)
else:
user = get_user_model().objects.create_user(
email=email, password=password)
user.first_name = first_name
user.last_name = last_name
user.save()
Expand All @@ -160,7 +164,7 @@ def create_user_profile(self, sender, instance, created, **kwargs):
return

user_helper = self.make_user(
unicode(instance.email), enable_search=False,
unicode(instance.email), enable_search=True,
django_user_pk=instance.pk)
base_models.UserProfile.objects.create(
user=instance, user_account=user_helper.account_key)
Expand Down
17 changes: 16 additions & 1 deletion go/contacts/parsers/csv_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,27 @@ def read_data_from_file(self, file_path, field_names, has_header):
if has_header:
reader.next()
for row in reader:
if None in row:
# Any extra fields are stuck in a list with a key of
# `None`. The presence of this key means we have a row
# with too many fields. We don't know how to handle
# this case, so we abort.
raise ContactParserException(
'Invalid row: too many fields.')
if None in row.values():
# Any missing fields are given a value of `None`. Since
# all legitimate field values are strings, this is a
# reliable indicator of a missing field. We don't know
# how to handle this case, so we abort.
raise ContactParserException(
'Invalid row: not enough fields.')

# Only process rows that actually have data
if any([column for column in row]):
# Our Riak client requires unicode for all keys & values
# stored.
unicoded_row = dict([(key, unicode(value or '', 'utf-8'))
for key, value in row.items()])
for key, value in row.items()])
yield unicoded_row
except csv.Error as e:
raise ContactParserException(e)
Expand Down
45 changes: 45 additions & 0 deletions go/contacts/parsers/test_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from django.core.files.base import ContentFile

from go.base.tests.helpers import GoDjangoTestCase
from go.contacts.parsers import ContactParserException
from go.contacts.parsers.csv_parser import CSVFileParser
from go.contacts.parsers.xls_parser import XLSFileParser

Expand Down Expand Up @@ -103,6 +104,50 @@ def test_contacts_with_none_entries(self):
'name': 'Name 3'},
])

def test_contacts_with_missing_fields(self):
csv_file = self.fixture(
'sample-contacts-with-headers-and-missing-fields.csv')
fp = default_storage.open(csv_file, 'rU')
contacts_iter = self.parser.parse_file(fp, zip(
['name', 'surname', 'msisdn'],
['string', 'string', 'msisdn_za']), has_header=True)
contacts = []
try:
for contact in contacts_iter:
if contact['name'] == 'Extra rows':
# We don't care about these rows.
continue
contacts.append(contact)
except ContactParserException as err:
self.assertEqual(err.args[0], 'Invalid row: not enough fields.')
self.assertEqual(contacts, [{
'msisdn': '+27761234561',
'surname': 'Surname 1',
'name': 'Name 1',
}])

def test_contacts_with_extra_fields(self):
csv_file = self.fixture(
'sample-contacts-with-headers-and-extra-fields.csv')
fp = default_storage.open(csv_file, 'rU')
contacts_iter = self.parser.parse_file(fp, zip(
['name', 'surname', 'msisdn'],
['string', 'string', 'msisdn_za']), has_header=True)
contacts = []
try:
for contact in contacts_iter:
if contact['name'] == 'Extra rows':
# We don't care about these rows.
continue
contacts.append(contact)
except ContactParserException as err:
self.assertEqual(err.args[0], 'Invalid row: too many fields.')
self.assertEqual(contacts, [{
'msisdn': '+27761234561',
'surname': 'Surname 1',
'name': 'Name 1',
}])


class TestXLSParser(ParserTestCase):
PARSER_CLASS = XLSFileParser
Expand Down
61 changes: 47 additions & 14 deletions go/contacts/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,8 @@ def test_contact_upload_into_new_group(self):
self.assertRedirects(response, group_url(group.key))
self.assertEqual(len(group.backlinks.contacts()), 0)

self.specify_columns(group.key)
response = self.specify_columns(group.key)
self.assertRedirects(response, group_url(group.key))
self.assertEqual(len(group.backlinks.contacts()), 3)
self.assertEqual(default_storage.listdir("tmp"), ([], []))

Expand All @@ -294,7 +295,8 @@ def test_contact_upload_into_existing_group(self):
self.assertRedirects(response, group_url(group.key))
group = self.contact_store.get_group(group.key)
self.assertEqual(len(group.backlinks.contacts()), 0)
self.specify_columns(group.key)
response = self.specify_columns(group.key)
self.assertRedirects(response, group_url(group.key))
self.assertEqual(len(group.backlinks.contacts()), 3)
self.assertEqual(default_storage.listdir("tmp"), ([], []))

Expand All @@ -309,7 +311,8 @@ def test_uploading_unicode_chars_in_csv(self):
})
self.assertRedirects(response, group_url(group.key))

self.specify_columns(group.key)
response = self.specify_columns(group.key)
self.assertRedirects(response, group_url(group.key))
group = self.contact_store.get_group(group.key)
self.assertEqual(len(group.backlinks.contacts()), 3)
self.assertEqual(len(mail.outbox), 1)
Expand All @@ -328,7 +331,7 @@ def test_uploading_windows_linebreaks_in_csv(self):
})
self.assertRedirects(response, group_url(group.key))

self.specify_columns(group.key, columns={
response = self.specify_columns(group.key, columns={
'column-0': 'msisdn',
'column-1': 'area',
'column-2': 'nairobi_1',
Expand All @@ -346,6 +349,7 @@ def test_uploading_windows_linebreaks_in_csv(self):
'normalize-6': '',
'normalize-7': '',
})
self.assertRedirects(response, group_url(group.key))
group = self.contact_store.get_group(group.key)
self.assertEqual(len(group.backlinks.contacts()), 2)
self.assertEqual(len(mail.outbox), 1)
Expand All @@ -364,10 +368,11 @@ def test_uploading_single_colum(self):
})
self.assertRedirects(response, group_url(group.key))

self.specify_columns(group.key, columns={
response = self.specify_columns(group.key, columns={
'column-0': 'msisdn',
'normalize-0': '',
})
self.assertRedirects(response, group_url(group.key))

group = self.contact_store.get_group(group.key)
self.assertEqual(len(group.backlinks.contacts()), 2)
Expand Down Expand Up @@ -453,7 +458,7 @@ def test_import_upload_is_truth(self):
})

self.assertRedirects(response, group_url(group.key))
self.specify_columns(group.key, columns={
response = self.specify_columns(group.key, columns={
'column-0': 'key',
'column-1': 'created_at',
'column-2': 'name',
Expand All @@ -469,6 +474,7 @@ def test_import_upload_is_truth(self):
'normalize-5': '',
'normalize-6': '',
}, import_rule='upload_is_truth')
self.assertRedirects(response, group_url(group.key))

group = self.contact_store.get_group(group.key)
self.assertEqual(len(group.backlinks.contacts()), 3)
Expand Down Expand Up @@ -565,7 +571,7 @@ def test_import_existing_is_truth(self):
})

self.assertRedirects(response, group_url(group1.key))
self.specify_columns(group1.key, columns={
response = self.specify_columns(group1.key, columns={
'column-0': 'key',
'column-1': 'created_at',
'column-2': 'name',
Expand All @@ -581,6 +587,7 @@ def test_import_existing_is_truth(self):
'normalize-5': '',
'normalize-6': '',
}, import_rule='existing_is_truth')
self.assertRedirects(response, group_url(group1.key))

group = self.contact_store.get_group(group1.key)
self.assertEqual(len(group.backlinks.contacts()), 3)
Expand Down Expand Up @@ -657,7 +664,8 @@ def test_uploading_unicode_chars_in_csv_into_new_group(self):
group = newest(self.contact_store.list_groups())
self.assertEqual(group.name, new_group_name)
self.assertRedirects(response, group_url(group.key))
self.specify_columns(group_key=group.key)
response = self.specify_columns(group_key=group.key)
self.assertRedirects(response, group_url(group.key))
self.assertEqual(len(group.backlinks.contacts()), 3)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue('successfully' in mail.outbox[0].subject)
Expand Down Expand Up @@ -839,6 +847,9 @@ def get_all_contacts(self, keys=None):
def get_latest_contact(self):
return max(self.get_all_contacts(), key=lambda c: c.created_at)

def list_group_keys(self):
return [group.key for group in self.contact_store.list_groups()]

def test_groups_creation(self):
response = self.client.post(reverse('contacts:groups'), {
'name': 'a new group',
Expand Down Expand Up @@ -911,10 +922,11 @@ def test_multiple_group_deletion(self):

# Delete the groups
groups_url = reverse('contacts:groups')
self.client.post(groups_url, {
response = self.client.post(groups_url, {
'group': [group_1.key, group_2.key],
'_delete': True,
})
self.assertRedirects(response, groups_url)
self.assertEqual(self.contact_store.list_groups(), [])

def test_removing_contacts_from_group(self):
Expand All @@ -923,15 +935,26 @@ def test_removing_contacts_from_group(self):
c2 = self.mkcontact(groups=[group])

group_url = reverse('contacts:group', kwargs={'group_key': group.key})
self.client.post(group_url, {
response = self.client.post(group_url, {
'_remove': True,
'contact': [c1.key]
})
self.assertRedirects(response, group_url)

self.assertEqual(
[c2.key],
self.contact_store.get_contacts_for_group(group))

def test_group_empty_post(self):
group = self.contact_store.new_group(TEST_GROUP_NAME)

self.assertEqual(self.list_group_keys(), [group.key])
group_url = reverse('contacts:group', kwargs={'group_key': group.key})
response = self.client.post(group_url)
self.assertRedirects(response, group_url)

self.assertEqual(self.list_group_keys(), [group.key])

def test_group_deletion(self):
group = self.contact_store.new_group(TEST_GROUP_NAME)

Expand Down Expand Up @@ -1142,6 +1165,9 @@ def mksmart_group(self, query, name='a smart group'):
self.assertRedirects(response, group_url(group.key))
return group

def list_group_keys(self):
return [group.key for group in self.contact_store.list_groups()]

def add_to_group(self, contact, group):
contact.add_to_group(group)
contact.save()
Expand All @@ -1152,14 +1178,21 @@ def test_smart_groups_creation(self):
self.assertEqual(u'a smart group', group.name)
self.assertEqual(u'msisdn:\+12*', group.query)

def test_smart_group_empty_post(self):
group = self.mksmart_group('msisdn:\+12*')
group_url = reverse('contacts:group', kwargs={'group_key': group.key})
response = self.client.post(group_url)
self.assertRedirects(response, group_url)
self.assertEqual(self.list_group_keys(), [group.key])

def test_smart_group_deletion(self):
group = self.mksmart_group('msisdn:\+12*')
response = self.client.post(
reverse('contacts:group', kwargs={'group_key': group.key}),
{'_delete_group': 1})
self.assertEqual(self.list_group_keys(), [group.key])
group_url = reverse('contacts:group', kwargs={'group_key': group.key})
response = self.client.post(group_url, {'_delete_group': 1})
self.assertRedirects(response, reverse('contacts:index'),
target_status_code=302)
self.assertTrue(group not in self.contact_store.list_groups())
self.assertEqual(self.list_group_keys(), [])

def test_smart_group_clearing(self):
contact = self.mkcontact()
Expand Down

0 comments on commit 305ffde

Please sign in to comment.