Skip to content

Commit

Permalink
OpenConceptLab/ocl_issues#957 | importers | indexing current latest a…
Browse files Browse the repository at this point in the history
…nd version objects for concept/mapping
  • Loading branch information
snyaggarwal committed Dec 31, 2023
1 parent 10e916e commit a8d36f8
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 15 deletions.
4 changes: 4 additions & 0 deletions core/concepts/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -749,6 +749,8 @@ def persist_new(cls, data, user=None, create_initial_version=True, create_parent
if create_initial_version:
initial_version = cls.create_initial_version(concept)
if initial_version.id:
if not concept._index:
concept.latest_version_id = initial_version.id
initial_version.set_locales(names, ConceptName)
initial_version.set_locales(descriptions, ConceptDescription)
initial_version.sources.set([parent])
Expand Down Expand Up @@ -831,6 +833,8 @@ def persist_clone(
obj.clean() # clean here to validate locales that can only be saved after obj is saved
obj.update_versioned_object()
if prev_latest_version:
if not obj._index: # pylint: disable=protected-access
obj.prev_latest_version_id = prev_latest_version.id
prev_latest_version._index = obj._index # pylint: disable=protected-access
prev_latest_version.is_latest_version = False
prev_latest_version.save(update_fields=['is_latest_version', '_index'])
Expand Down
26 changes: 20 additions & 6 deletions core/importers/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,14 +751,28 @@ def run(self): # pylint: disable=too-many-branches,too-many-statements,too-many
concept_importer = ConceptImporter(item, self.user, self.update_if_exists)
_result = concept_importer.delete() if action == 'delete' else concept_importer.run()
if get(concept_importer.instance, 'id'):
new_concept_ids.add(concept_importer.instance.versioned_object_id)
new_concept_ids.update(set(compact(
[
concept_importer.instance.versioned_object_id,
get(concept_importer.instance, 'prev_latest_version_id'),
get(concept_importer.instance, 'latest_version_id'),
concept_importer.instance.id,
]
)))
self.handle_item_import_result(_result, original_item)
continue
if item_type == 'mapping':
mapping_importer = MappingImporter(item, self.user, self.update_if_exists)
_result = mapping_importer.delete() if action == 'delete' else mapping_importer.run()
if get(mapping_importer.instance, 'id'):
new_mapping_ids.add(mapping_importer.instance.versioned_object_id)
new_mapping_ids.update(set(compact(
[
mapping_importer.instance.versioned_object_id,
get(mapping_importer.instance, 'prev_latest_version_id'),
get(mapping_importer.instance, 'latest_version_id'),
mapping_importer.instance.id,
]
)))
self.handle_item_import_result(_result, original_item)
continue
if item_type == 'reference':
Expand All @@ -768,13 +782,13 @@ def run(self): # pylint: disable=too-many-branches,too-many-statements,too-many
continue

if new_concept_ids:
for chunk in chunks(list(new_concept_ids), 1000):
for chunk in chunks(list(set(new_concept_ids)), 1000):
batch_index_resources.apply_async(
('concept', {'versioned_object_id__in': chunk}, True), queue='indexing')
('concept', {'id__in': chunk}, True), queue='indexing')
if new_mapping_ids:
for chunk in chunks(list(new_mapping_ids), 1000):
for chunk in chunks(list(set(new_mapping_ids)), 1000):
batch_index_resources.apply_async(
('mapping', {'versioned_object_id__in': chunk}, True), queue='indexing')
('mapping', {'id__in': chunk}, True), queue='indexing')

self.elapsed_seconds = time.time() - self.start_time

Expand Down
59 changes: 50 additions & 9 deletions core/importers/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,10 +269,14 @@ def test_concept_import(self, batch_index_resources_mock):

self.assertEqual(source.concepts_set.count(), 2)
self.assertEqual(Concept.objects.filter(mnemonic='Food').count(), 2)
concept = Concept.objects.filter(mnemonic='Food', id=F('versioned_object_id')).first()
self.assertEqual(concept.versions.count(), 1)
self.assertTrue(Concept.objects.filter(mnemonic='Food', is_latest_version=True).exists())
batch_index_resources_mock.apply_async.assert_called_with(('concept', {'id__in': ANY}, True), queue='indexing')
self.assertEqual(
Concept.objects.filter(mnemonic='Food', id=F('versioned_object_id')).first().versions.count(), 1
sorted(batch_index_resources_mock.apply_async.mock_calls[0][1][0][1]['id__in']),
sorted([concept.id, concept.get_latest_version().id])
)
self.assertTrue(Concept.objects.filter(mnemonic='Food', is_latest_version=True).exists())

data = {
"type": "Concept", "id": "Food", "concept_class": "Root",
Expand All @@ -290,11 +294,39 @@ def test_concept_import(self, batch_index_resources_mock):
self.assertEqual(importer.failed, [])
self.assertTrue(importer.elapsed_seconds > 0)
self.assertEqual(source.concepts_set.count(), 3)
concept = Concept.objects.filter(mnemonic='Food', id=F('versioned_object_id')).first()
self.assertEqual(concept.versions.count(), 2)
self.assertTrue(Concept.objects.filter(mnemonic='Food', is_latest_version=True, datatype='Rule').exists())
batch_index_resources_mock.apply_async.assert_called_with(('concept', {'id__in': ANY}, True), queue='indexing')
self.assertEqual(
Concept.objects.filter(mnemonic='Food', id=F('versioned_object_id')).first().versions.count(), 2
sorted(batch_index_resources_mock.apply_async.mock_calls[1][1][0][1]['id__in']),
sorted([concept.id, concept.get_latest_version().prev_version.id, concept.get_latest_version().id])
)

data = {
"type": "Concept", "id": "Food", "concept_class": "Root",
"datatype": "Foo", "source": "DemoSource", "owner": "DemoOrg", "owner_type": "Organization",
"names": [{"name": "Food", "locale": "en", "locale_preferred": "True", "name_type": "Fully Specified"}],
"descriptions": [],
}

importer = BulkImportInline(json.dumps(data), 'ocladmin', True)
importer.run()

self.assertEqual(importer.processed, 1)
self.assertEqual(len(importer.created), 0)
self.assertEqual(len(importer.updated), 1)
self.assertEqual(importer.failed, [])
self.assertTrue(importer.elapsed_seconds > 0)
self.assertEqual(source.concepts_set.count(), 4)
concept = Concept.objects.filter(mnemonic='Food', id=F('versioned_object_id')).first()
self.assertEqual(concept.versions.count(), 3)
self.assertTrue(Concept.objects.filter(mnemonic='Food', is_latest_version=True, datatype='Foo').exists())
batch_index_resources_mock.apply_async.assert_called_with(('concept', {'id__in': ANY}, True), queue='indexing')
self.assertEqual(
sorted(batch_index_resources_mock.apply_async.mock_calls[2][1][0][1]['id__in']),
sorted([concept.id, concept.get_latest_version().prev_version.id, concept.get_latest_version().id])
)
self.assertTrue(Concept.objects.filter(mnemonic='Food', is_latest_version=True, datatype='Rule').exists())
batch_index_resources_mock.apply_async.assert_called()

@patch('core.importers.models.batch_index_resources')
def test_concept_import_with_auto_assignment_mnemonic(self, batch_index_resources_mock):
Expand Down Expand Up @@ -402,10 +434,16 @@ def test_mapping_import(self, batch_index_resources_mock):
importer.run()

self.assertEqual(Mapping.objects.filter(map_type='Has Child').count(), 2)
mapping = Mapping.objects.filter(map_type='Has Child', id=F('versioned_object_id')).first()
self.assertEqual(mapping.versions.count(), 1)
self.assertTrue(Mapping.objects.filter(map_type='Has Child', is_latest_version=True).exists())
batch_index_resources_mock.apply_async.assert_called_with(('mapping', {'id__in': ANY}, True), queue='indexing')
self.assertEqual(
Mapping.objects.filter(map_type='Has Child', id=F('versioned_object_id')).first().versions.count(), 1
sorted(batch_index_resources_mock.apply_async.mock_calls[0][1][0][1]['id__in']),
sorted([mapping.id, mapping.get_latest_version().id])
)
self.assertTrue(Mapping.objects.filter(map_type='Has Child', is_latest_version=True).exists())


self.assertEqual(importer.processed, 1)
self.assertEqual(len(importer.created), 1)
self.assertEqual(importer.failed, [])
Expand All @@ -422,15 +460,18 @@ def test_mapping_import(self, batch_index_resources_mock):
importer = BulkImportInline(json.dumps(data), 'ocladmin', True)
importer.run()

mapping = Mapping.objects.filter(map_type='Has Child', id=F('versioned_object_id')).first()
self.assertEqual(mapping.versions.count(), 2)
batch_index_resources_mock.apply_async.assert_called_with(('mapping', {'id__in': ANY}, True), queue='indexing')
self.assertEqual(
Mapping.objects.filter(map_type='Has Child', id=F('versioned_object_id')).first().versions.count(), 2
sorted(batch_index_resources_mock.apply_async.mock_calls[1][1][0][1]['id__in']),
sorted([mapping.id, mapping.get_latest_version().prev_version.id, mapping.get_latest_version().id])
)
self.assertEqual(importer.processed, 1)
self.assertEqual(len(importer.created), 0)
self.assertEqual(len(importer.updated), 1)
self.assertEqual(importer.failed, [])
self.assertTrue(importer.elapsed_seconds > 0)
batch_index_resources_mock.apply_async.assert_called()

@patch('core.importers.models.batch_index_resources')
def test_mapping_import_with_autoid_assignment(self, batch_index_resources_mock):
Expand Down
4 changes: 4 additions & 0 deletions core/mappings/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,6 +516,8 @@ def persist_new(cls, data, user):
mapping.public_access = parent.public_access
mapping.save()
initial_version = cls.create_initial_version(mapping)
if initial_version.id and not mapping._index:
mapping.latest_version_id = initial_version.id
initial_version.sources.set([parent])
mapping.sources.set([parent])
mapping.set_checksums()
Expand Down Expand Up @@ -578,6 +580,8 @@ def persist_clone(cls, obj, user=None, **kwargs): # pylint: disable=too-many-st
obj.save()
obj.update_versioned_object()
if prev_latest_version:
if not obj._index: # pylint: disable=protected-access
obj.prev_latest_version_id = prev_latest_version.id
prev_latest_version.is_latest_version = False
prev_latest_version._index = obj._index # pylint: disable=protected-access
prev_latest_version.save(update_fields=['is_latest_version', '_index'])
Expand Down

0 comments on commit a8d36f8

Please sign in to comment.