Skip to content

Commit

Permalink
OpenConceptLab/ocl_issues#615 | inline import can understand delete a…
Browse files Browse the repository at this point in the history
…ction from import file for org/source/collection
  • Loading branch information
snyaggarwal committed Mar 2, 2021
1 parent 1fa6344 commit a05c523
Show file tree
Hide file tree
Showing 2 changed files with 95 additions and 22 deletions.
75 changes: 64 additions & 11 deletions core/importers/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from core.collections.models import Collection
from core.common.constants import HEAD
from core.common.services import RedisService
from core.common.tasks import bulk_import_parts_inline
from core.common.tasks import bulk_import_parts_inline, delete_organization
from core.common.utils import drop_version
from core.concepts.models import Concept
from core.mappings.models import Mapping
Expand Down Expand Up @@ -91,6 +91,8 @@ def run(self):
CREATED = 1
UPDATED = 2
FAILED = 3
DELETED = 4
NOT_FOUND = 5


class BaseResourceImporter:
Expand Down Expand Up @@ -168,7 +170,10 @@ class OrganizationImporter(BaseResourceImporter):
allowed_fields = ["id", "company", "extras", "location", "name", "public_access", "website"]

def exists(self):
return Organization.objects.filter(mnemonic=self.get('id')).exists()
return self.get_queryset().exists()

def get_queryset(self):
return Organization.objects.filter(mnemonic=self.get('id'))

def parse(self):
super().parse()
Expand All @@ -180,6 +185,12 @@ def process(self):
return CREATED
return FAILED

def delete(self):
if self.exists():
delete_organization(self.get_queryset().first().id)
return DELETED
return NOT_FOUND


class SourceImporter(BaseResourceImporter):
mandatory_fields = {'id', 'short_code', 'name', 'full_name', 'owner_type', 'owner', 'source_type'}
Expand All @@ -189,9 +200,12 @@ class SourceImporter(BaseResourceImporter):
]

def exists(self):
return self.get_queryset().exists()

def get_queryset(self):
return Source.objects.filter(
**{self.get_owner_type_filter(): self.get('owner'), 'mnemonic': self.get('id')}
).exists()
)

def parse(self):
owner_type = self.get('owner_type').lower()
Expand All @@ -214,6 +228,17 @@ def process(self):
errors = Source.persist_new(source, self.user)
return errors or CREATED

def delete(self):
if self.exists():
source = self.get_queryset().first()
try:
source.delete()
return DELETED
except Exception as ex:
return dict(errors=ex.args)

return NOT_FOUND


class SourceVersionImporter(BaseResourceImporter):
mandatory_fields = {"id"}
Expand Down Expand Up @@ -250,9 +275,12 @@ class CollectionImporter(BaseResourceImporter):
]

def exists(self):
return self.get_queryset().exists()

def get_queryset(self):
return Collection.objects.filter(
**{self.get_owner_type_filter(): self.get('owner'), 'mnemonic': self.get('id')}
).exists()
)

def parse(self):
owner_type = self.get('owner_type').lower()
Expand All @@ -275,6 +303,17 @@ def process(self):
errors = Collection.persist_new(coll, self.user)
return errors or CREATED

def delete(self):
if self.exists():
collection = self.get_queryset().first()
try:
collection.delete()
return DELETED
except Exception as ex:
return dict(errors=ex.args)

return NOT_FOUND


class CollectionVersionImporter(BaseResourceImporter):
mandatory_fields = {"id"}
Expand Down Expand Up @@ -476,6 +515,8 @@ def __init__( # pylint: disable=too-many-arguments
self.exists = []
self.created = []
self.updated = []
self.deleted = []
self.not_found = []
self.failed = []
self.exception = []
self.others = []
Expand All @@ -484,7 +525,7 @@ def __init__( # pylint: disable=too-many-arguments
self.start_time = time.time()
self.elapsed_seconds = 0

def handle_item_import_result(self, result, item):
def handle_item_import_result(self, result, item): # pylint: disable=too-many-return-statements
if result is None:
self.exists.append(item)
return
Expand All @@ -494,6 +535,12 @@ def handle_item_import_result(self, result, item):
if result == FAILED:
self.failed.append(item)
return
if result == DELETED:
self.deleted.append(item)
return
if result == NOT_FOUND:
self.not_found.append(item)
return
if isinstance(result, dict):
item['errors'] = result
self.failed.append(item)
Expand Down Expand Up @@ -524,16 +571,19 @@ def run(self):
self.notify_progress()
item = original_item.copy()
item_type = item.pop('type', '').lower()
action = item.pop('__action', '').lower()
if not item_type:
self.unknown.append(original_item)
if item_type == 'organization':
org_importer = OrganizationImporter(item, self.user, self.update_if_exists)
self.handle_item_import_result(
OrganizationImporter(item, self.user, self.update_if_exists).run(), original_item
org_importer.delete() if action == 'delete' else org_importer.run(), original_item
)
continue
if item_type == 'source':
source_importer = SourceImporter(item, self.user, self.update_if_exists)
self.handle_item_import_result(
SourceImporter(item, self.user, self.update_if_exists).run(), original_item
source_importer.delete() if action == 'delete' else source_importer.run(), original_item
)
continue
if item_type == 'source version':
Expand All @@ -542,8 +592,9 @@ def run(self):
)
continue
if item_type == 'collection':
collection_importer = CollectionImporter(item, self.user, self.update_if_exists)
self.handle_item_import_result(
CollectionImporter(item, self.user, self.update_if_exists).run(), original_item
collection_importer.delete() if action == 'delete' else collection_importer.run(), original_item
)
continue
if item_type == 'collection version':
Expand Down Expand Up @@ -575,15 +626,17 @@ def run(self):

@property
def detailed_summary(self):
return "Processed: {}/{} | Created: {} | Updated: {} | Existing: {} | Time: {}secs".format(
self.processed, self.total, len(self.created), len(self.updated), len(self.exists), self.elapsed_seconds
return "Processed: {}/{} | Created: {} | Updated: {} | DELETED: {} | Existing: {} | Time: {}secs".format(
self.processed, self.total, len(self.created), len(self.updated), len(self.deleted),
len(self.exists), self.elapsed_seconds
)

@property
def json_result(self):
return dict(
total=self.total, processed=self.processed, created=self.created, updated=self.updated,
invalid=self.invalid, exists=self.exists, failed=self.failed, exception=self.exception,
invalid=self.invalid, exists=self.exists, failed=self.failed, deleted=self.deleted,
not_found=self.not_found, exception=self.exception,
others=self.others, unknown=self.unknown, elapsed_seconds=self.elapsed_seconds
)

Expand Down
42 changes: 31 additions & 11 deletions core/importers/tests.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import os
import unittest
import uuid

from celery_once import AlreadyQueued
Expand Down Expand Up @@ -52,35 +53,53 @@ def test_run(self, flex_importer_mock):

class BulkImportInlineTest(OCLTestCase):
def test_org_import(self):
self.assertFalse(Organization.objects.filter(mnemonic='DemoOrg').exists())
self.assertFalse(Organization.objects.filter(mnemonic='DATIM-MOH-BI-FY19').exists())

data = {
"type": "Organization", "id": "DemoOrg", "website": "", "name": "OCL Demo Organization", "company": "",
"public_access": "View"
}
importer = BulkImportInline(json.dumps(data), 'ocladmin', True)
OrganizationFactory(mnemonic='DATIM-MOH-BI-FY19', location='blah')
self.assertTrue(Organization.objects.filter(mnemonic='DATIM-MOH-BI-FY19').exists())

data = '{"type": "Organization", "__action": "DELETE", "id": "DATIM-MOH-BI-FY19"}\n' \
'{"name": "DATIM MOH Burundi", "extras": {"datim_moh_country_code": "BI", "datim_moh_period": "FY19",' \
' "datim_moh_object": true}, "location": "Burundi", "public_access": "None", "type": "Organization",' \
' "id": "DATIM-MOH-BI-FY19"}'
importer = BulkImportInline(data, 'ocladmin', True)
importer.run()

self.assertTrue(Organization.objects.filter(mnemonic='DemoOrg').exists())
self.assertEqual(importer.processed, 1)
self.assertTrue(Organization.objects.filter(mnemonic='DATIM-MOH-BI-FY19').exists())
self.assertEqual(importer.processed, 2)
self.assertEqual(len(importer.created), 1)
self.assertEqual(importer.created[0], data)
self.assertEqual(len(importer.deleted), 1)
self.assertTrue(importer.elapsed_seconds > 0)

data = {
"type": "Organization", "id": "DemoOrg", "website": "", "name": "OCL Demo Organization", "company": "",
"public_access": "View"
"name": "DATIM MOH Burundi", "extras": {
"datim_moh_country_code": "BI", "datim_moh_period": "FY19", "datim_moh_object": True
}, "location": "Burundi", "public_access": "None", "type": "Organization", "id": "DATIM-MOH-BI-FY19"
}
importer = BulkImportInline(json.dumps(data), 'ocladmin', True)
importer.run()

self.assertEqual(importer.processed, 1)
self.assertEqual(len(importer.created), 0)
self.assertEqual(len(importer.failed), 0)
self.assertEqual(len(importer.deleted), 0)
self.assertEqual(len(importer.exists), 1)
self.assertEqual(importer.exists[0], data)
self.assertTrue(importer.elapsed_seconds > 0)

data = {"type": "Organization", "__action": "DELETE", "id": "FOOBAR"}
importer = BulkImportInline(json.dumps(data), 'ocladmin', True)
importer.run()

self.assertEqual(importer.processed, 1)
self.assertEqual(len(importer.created), 0)
self.assertEqual(len(importer.failed), 0)
self.assertEqual(len(importer.deleted), 0)
self.assertEqual(len(importer.exists), 0)
self.assertEqual(len(importer.not_found), 1)
self.assertEqual(importer.not_found[0], data)
self.assertTrue(importer.elapsed_seconds > 0)

def test_source_import_success(self):
OrganizationFactory(mnemonic='DemoOrg')
self.assertFalse(Source.objects.filter(mnemonic='DemoSource').exists())
Expand Down Expand Up @@ -365,6 +384,7 @@ def test_sample_import(self):
self.assertEqual(len(importer.invalid), 0)
self.assertEqual(len(importer.others), 0)

@unittest.skip('[Skipped] PEPFAR (small) Import Sample')
def test_pepfar_import(self):
importer = BulkImportInline(
open(os.path.join(os.path.dirname(__file__), '..', 'samples/pepfar_datim_moh_fy19.json'), 'r').read(),
Expand Down

0 comments on commit a05c523

Please sign in to comment.