Skip to content

Commit

Permalink
Merge branch 'master' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
snyaggarwal committed Oct 6, 2022
2 parents 9bac802 + 012d628 commit 7f2a5d3
Show file tree
Hide file tree
Showing 14 changed files with 248 additions and 32 deletions.
2 changes: 1 addition & 1 deletion core/__init__.py
@@ -1,4 +1,4 @@
API_VERSION = '2.2.41'
API_VERSION = '2.2.42'
API_BUILD = 'dev'
VERSION = API_VERSION + '-' + API_BUILD
__version__ = VERSION
9 changes: 9 additions & 0 deletions core/collections/serializers.py
@@ -1,3 +1,4 @@
import json
from datetime import datetime

from django.core.validators import RegexValidator
Expand Down Expand Up @@ -96,6 +97,14 @@ def prepare_object(self, validated_data, instance=None):
]:
setattr(collection, attr, validated_data.get(attr, get(collection, attr)))

for attr in ['jurisdiction', 'identifier', 'contact', 'meta']:
value = validated_data.get(attr, get(collection, attr))
try:
value = json.loads(value) if isinstance(value, str) else value
except: # pylint: disable=bare-except
pass
setattr(collection, attr, value)

collection.full_name = validated_data.get('full_name', collection.full_name) or collection.name
collection.autoexpand_head = validated_data.get('autoexpand_head', collection.autoexpand_head)
collection.autoexpand = validated_data.get('autoexpand', collection.autoexpand)
Expand Down
14 changes: 13 additions & 1 deletion core/collections/tests/tests.py
Expand Up @@ -2298,9 +2298,10 @@ def test_parse_string_expression_generic(self): # pylint: disable=too-many-stat
"/users/Me/sources/MySource/concepts/?q=foobar",
"/users/Me/sources/MySource/v1/concepts/?q=foobar&datatype=rule",
"/users/Me/collections/MyColl/v1/mappings/?mapType=Q-AND-A",
"/orgs/MyOrg/sources/MySource/concepts/foo%252Fbar/",
])
)
self.assertEqual(len(references), 5)
self.assertEqual(len(references), 6)

reference = references[0]
self.assertEqual(reference.expression, "/orgs/MyOrg/sources/MySource/concepts/c-1234/")
Expand Down Expand Up @@ -2367,6 +2368,17 @@ def test_parse_string_expression_generic(self): # pylint: disable=too-many-stat
'Include mappings from version "v1" of Me/MyColl having mapType equal to "Q-AND-A"'
)

reference = references[5]
self.assertEqual(reference.expression, "/orgs/MyOrg/sources/MySource/concepts/foo%252Fbar/")
self.assertEqual(reference.system, "/orgs/MyOrg/sources/MySource/")
self.assertEqual(reference.code, "foo%252Fbar")
self.assertEqual(reference.reference_type, 'concepts')
self.assertIsNone(reference.version)
self.assertIsNone(reference.valueset)
self.assertIsNone(reference.filter)
self.assertIsNone(reference.cascade)
self.assertEqual(reference.translation, 'Include latest concept "foo/bar" from MyOrg/MySource')

def test_parse_string_expression_concepts_mappings_explicit(self): # pylint: disable=too-many-statements
references = self.get_expanded_references(
expression=dict(concepts=[
Expand Down
9 changes: 6 additions & 3 deletions core/collections/translators.py
@@ -1,6 +1,7 @@
from pydash import get

from core.collections.constants import SOURCE_TO_CONCEPTS, SOURCE_MAPPINGS
from core.common.utils import is_url_encoded_string, decode_string


class CollectionReferenceTranslator:
Expand Down Expand Up @@ -46,17 +47,19 @@ def __get_cascade_translation(self):
english += 'PLUS its mappings '
return english

def translate(self): # pylint: disable=too-many-branches
def translate(self): # pylint: disable=too-many-branches,too-many-statements
english = f'{self.reference_effect} '
if not self.__has_any_repo_version() and not self.reference.resource_version:
english += 'latest '
entity = self.ref_entity
if self.reference.code:
code = self.reference.code
if code:
code = decode_string(decode_string(code)) if is_url_encoded_string(code) else code
if self.reference.resource_version:
english += f'version "{self.reference.resource_version}" of '
elif self.reference.transform:
english += 'latest version of '
english += f'{entity} "{self.reference.code}" from '
english += f'{entity} "{code}" from '
else:
english += f'{entity}s '
if self.reference.system or self.reference.valueset:
Expand Down
7 changes: 5 additions & 2 deletions core/collections/views.py
Expand Up @@ -261,8 +261,11 @@ def delete(self, request, *args, **kwargs): # pylint: disable=unused-argument
collection = self.get_object()

if not self.is_inline_requested():
task = delete_collection.delay(collection.id)
return Response(dict(task=task.id), status=status.HTTP_202_ACCEPTED)
try:
task = delete_collection.delay(collection.id)
return Response(dict(task=task.id), status=status.HTTP_202_ACCEPTED)
except AlreadyQueued:
return Response(dict(detail='Already Queued'), status=status.HTTP_409_CONFLICT)

result = delete_collection(collection.id)

Expand Down
38 changes: 36 additions & 2 deletions core/importers/models.py
Expand Up @@ -437,6 +437,23 @@ def process(self):

return PERMISSION_DENIED

def delete(self):
is_clean = self.clean()
if not is_clean:
return is_clean
if self.exists():
parent = self.data.get('parent')
try:
if parent.has_edit_access(self.user):
concept = self.get_queryset().first()
concept.retire(self.user)
return DELETED
return PERMISSION_DENIED
except Exception as ex:
return dict(errors=ex.args)

return NOT_FOUND


class MappingImporter(BaseResourceImporter):
mandatory_fields = {"map_type", "from_concept_url"}
Expand Down Expand Up @@ -553,6 +570,23 @@ def process(self):

return PERMISSION_DENIED

def delete(self):
is_clean = self.clean()
if not is_clean:
return is_clean
if self.exists():
parent = self.data.get('parent')
try:
if parent.has_edit_access(self.user):
mapping = self.get_queryset().first()
mapping.retire(self.user)
return DELETED
return PERMISSION_DENIED
except Exception as ex:
return dict(errors=ex.args)

return NOT_FOUND


class ReferenceImporter(BaseResourceImporter):
mandatory_fields = {"data"}
Expand Down Expand Up @@ -708,7 +742,7 @@ def run(self): # pylint: disable=too-many-branches,too-many-statements,too-many
continue
if item_type == 'concept':
concept_importer = ConceptImporter(item, self.user, self.update_if_exists)
_result = concept_importer.run()
_result = concept_importer.delete() if action == 'delete' else concept_importer.run()
if get(concept_importer.instance, 'id'):
parent_url = concept_importer.instance.parent.uri
if parent_url not in new_concept_ids:
Expand All @@ -718,7 +752,7 @@ def run(self): # pylint: disable=too-many-branches,too-many-statements,too-many
continue
if item_type == 'mapping':
mapping_importer = MappingImporter(item, self.user, self.update_if_exists)
_result = mapping_importer.run()
_result = mapping_importer.delete() if action == 'delete' else mapping_importer.run()
if get(mapping_importer.instance, 'id'):
parent_url = mapping_importer.instance.parent.uri
if parent_url not in new_mapping_ids:
Expand Down
120 changes: 117 additions & 3 deletions core/importers/tests.py
Expand Up @@ -425,11 +425,66 @@ def test_sample_import(self, batch_index_resources_mock):
self.assertEqual(len(importer.created), 49)
self.assertEqual(len(importer.exists), 3)
self.assertEqual(len(importer.updated), 12)
self.assertEqual(len(importer.deleted), 0)
self.assertEqual(len(importer.failed), 0)
self.assertEqual(len(importer.invalid), 0)
self.assertEqual(len(importer.others), 0)
self.assertEqual(len(importer.permission_denied), 0)
batch_index_resources_mock.apply_async.assert_called()
self.assertEqual(batch_index_resources_mock.apply_async.call_count, 2)

data = {
"type": "Concept", "id": "Corn", "concept_class": "Root",
"datatype": "None", "source": "DemoSource", "owner": "DemoOrg", "owner_type": "Organization",
"names": [{"name": "Food", "locale": "en", "locale_preferred": "True", "name_type": "Fully Specified"}],
"descriptions": [], '__action': 'delete'
}

importer = BulkImportInline(json.dumps(data), 'ocladmin', True)
importer.run()

self.assertEqual(importer.processed, 1)
self.assertEqual(len(importer.created), 0)
self.assertEqual(len(importer.exists), 0)
self.assertEqual(len(importer.updated), 0)
self.assertEqual(len(importer.deleted), 1)
self.assertEqual(len(importer.failed), 0)
self.assertEqual(len(importer.invalid), 0)
self.assertEqual(len(importer.others), 0)
self.assertEqual(len(importer.permission_denied), 0)
self.assertEqual(batch_index_resources_mock.apply_async.call_count, 2) # no new indexing call
concept = Concept.objects.filter(mnemonic='Corn').first()
self.assertTrue(concept.get_latest_version().retired)
self.assertTrue(concept.versioned_object.retired)
self.assertFalse(concept.get_latest_version().prev_version.retired)

data = {
"to_concept_url": "/orgs/DemoOrg/sources/DemoSource/concepts/Corn/",
"from_concept_url": "/orgs/DemoOrg/sources/DemoSource/concepts/Vegetable/",
"type": "Mapping", "source": "DemoSource",
"extras": None, "owner": "DemoOrg", "map_type": "Has Child", "owner_type": "Organization",
"external_id": None, '__action': 'delete'
}

importer = BulkImportInline(json.dumps(data), 'ocladmin', True)
importer.run()

self.assertEqual(importer.processed, 1)
self.assertEqual(len(importer.created), 0)
self.assertEqual(len(importer.exists), 0)
self.assertEqual(len(importer.updated), 0)
self.assertEqual(len(importer.deleted), 1)
self.assertEqual(len(importer.failed), 0)
self.assertEqual(len(importer.invalid), 0)
self.assertEqual(len(importer.others), 0)
self.assertEqual(len(importer.permission_denied), 0)
self.assertEqual(batch_index_resources_mock.apply_async.call_count, 2) # no new indexing call
mapping = Mapping.objects.filter(
to_concept__uri="/orgs/DemoOrg/sources/DemoSource/concepts/Corn/",
from_concept__uri="/orgs/DemoOrg/sources/DemoSource/concepts/Vegetable/",
).first()
self.assertTrue(mapping.get_latest_version().retired)
self.assertTrue(mapping.versioned_object.retired)
self.assertFalse(mapping.get_latest_version().prev_version.retired)

@patch('core.importers.models.batch_index_resources')
def test_csv_import_with_retired_concepts(self, batch_index_resources_mock):
Expand All @@ -440,8 +495,8 @@ def test_csv_import_with_retired_concepts(self, batch_index_resources_mock):
importer = BulkImportInline(data, 'ocladmin', True)
importer.run()

self.assertEqual(importer.processed, 10)
self.assertEqual(len(importer.created), 10)
self.assertEqual(importer.processed, 11)
self.assertEqual(len(importer.created), 11)
self.assertEqual(len(importer.failed), 0)
self.assertEqual(len(importer.exists), 0)
self.assertEqual(len(importer.updated), 0)
Expand All @@ -455,6 +510,51 @@ def test_csv_import_with_retired_concepts(self, batch_index_resources_mock):
Concept.objects.filter(parent__mnemonic='MyDemoSource', is_latest_version=True, retired=True).count(), 1)
self.assertEqual(
Concept.objects.filter(parent__mnemonic='MyDemoSource', is_latest_version=True, retired=False).count(), 3)
self.assertEqual(
Mapping.objects.filter(
map_type="Parent-child", parent__mnemonic='MyDemoSource', is_latest_version=True, retired=False
).count(), 1)
self.assertEqual(
Mapping.objects.filter(
map_type="Parent-child-retired", parent__mnemonic='MyDemoSource', is_latest_version=True, retired=True
).count(), 1)

@patch('core.importers.models.batch_index_resources')
def test_csv_import_with_retired_concepts_and_mappings(self, batch_index_resources_mock):
file_content = open(
os.path.join(os.path.dirname(__file__), '..', 'samples/ocl_csv_import_example_test_retired.csv'), 'r'
).read()
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file_content), allow_special_characters=True).process()
importer = BulkImportInline(data, 'ocladmin', True)
importer.run()

self.assertEqual(importer.processed, 12)
self.assertEqual(len(importer.created), 12)
self.assertEqual(len(importer.failed), 0)
self.assertEqual(len(importer.exists), 0)
self.assertEqual(len(importer.updated), 0)
self.assertEqual(len(importer.invalid), 0)
self.assertEqual(len(importer.others), 0)
self.assertEqual(len(importer.permission_denied), 0)
batch_index_resources_mock.apply_async.assert_called()

self.assertTrue(
Concept.objects.filter(mnemonic='Act', is_latest_version=True, retired=False).exists())
self.assertTrue(
Concept.objects.filter(mnemonic='Child', is_latest_version=True, retired=False).exists())
self.assertTrue(
Concept.objects.filter(mnemonic='Child_of_child', is_latest_version=True, retired=False).exists())
self.assertTrue(
Concept.objects.filter(mnemonic='Ret', is_latest_version=True, retired=True).exists())
self.assertTrue(
Concept.objects.filter(mnemonic='Ret-with-mappings', is_latest_version=True, retired=True).exists())
self.assertTrue(
Mapping.objects.filter(map_type='Child-Parent', is_latest_version=True, retired=False).exists())
self.assertTrue(
Mapping.objects.filter(map_type='SAME-AS', is_latest_version=True, retired=True).exists())
self.assertTrue(
Mapping.objects.filter(map_type='Parent-child', is_latest_version=True, retired=False).exists())

@unittest.skip('[Skipped] Gets hung sometimes')
@patch('core.importers.models.batch_index_resources')
Expand Down Expand Up @@ -995,6 +1095,20 @@ def test_post_file_url_400(self):
self.assertEqual(response.status_code, 400)
self.assertEqual(response.data, dict(exception='No content to import'))

def test_post_invalid_csv_400(self):
file = open(
os.path.join(os.path.dirname(__file__), '..', 'samples/invalid_import_csv.csv'), 'r'
)

response = self.client.post(
"/importers/bulk-import-inline/?update_if_exists=true",
{'file': file},
HTTP_AUTHORIZATION='Token ' + self.token,
)

self.assertEqual(response.status_code, 400)
self.assertEqual(response.data, {'exception': 'No content to import'})

@patch('core.common.tasks.bulk_import_parallel_inline')
def test_post_inline_parallel_202(self, bulk_import_mock):
task_id = 'ace5abf4-3b7f-4e4a-b16f-d1c041088c3e-ocladmin~priority'
Expand Down
40 changes: 26 additions & 14 deletions core/importers/views.py
Expand Up @@ -68,10 +68,13 @@ def post(self, request, import_queue=None):
return Response(dict(exception=NO_CONTENT_TO_IMPORT), status=status.HTTP_400_BAD_REQUEST)

if is_csv_file(name=file.name):
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file.read().decode('utf-8')),
allow_special_characters=True
).process()
try:
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file.read().decode('utf-8')),
allow_special_characters=True
).process()
except Exception as ex: # pylint: disable=broad-except
return Response(dict(exception=f'Bad CSV ({str(ex)})'), status=status.HTTP_400_BAD_REQUEST)
else:
data = file.read()

Expand Down Expand Up @@ -101,8 +104,11 @@ def post(self, request, import_queue=None):
return Response(dict(exception=NO_CONTENT_TO_IMPORT), status=status.HTTP_400_BAD_REQUEST)

if is_csv_file(name=file_url):
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file.text), allow_special_characters=True).process()
try:
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file.text), allow_special_characters=True).process()
except Exception as ex: # pylint: disable=broad-except
return Response(dict(exception=f'Bad CSV ({str(ex)})'), status=status.HTTP_400_BAD_REQUEST)
else:
data = file.text

Expand Down Expand Up @@ -272,10 +278,13 @@ def post(self, request, import_queue=None):
return Response(dict(exception=NO_CONTENT_TO_IMPORT), status=status.HTTP_400_BAD_REQUEST)

if file_name and is_csv_file(name=file_name):
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file_content),
allow_special_characters=True
).process()
try:
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file_content),
allow_special_characters=True
).process()
except Exception as ex: # pylint: disable=broad-except
return Response(dict(exception=f'Bad CSV ({str(ex)})'), status=status.HTTP_400_BAD_REQUEST)
elif file:
data = file_content
else:
Expand Down Expand Up @@ -317,10 +326,13 @@ def post(self, request, import_queue=None):
return Response(dict(exception=NO_CONTENT_TO_IMPORT), status=status.HTTP_400_BAD_REQUEST)

if file_name and is_csv_file(name=file_name):
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file_content),
allow_special_characters=True
).process()
try:
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file_content),
allow_special_characters=True
).process()
except Exception as ex: # pylint: disable=broad-except
return Response(dict(exception=f'Bad CSV ({str(ex)})'), status=status.HTTP_400_BAD_REQUEST)
elif file:
data = file_content
else:
Expand Down
2 changes: 2 additions & 0 deletions core/samples/invalid_import_csv.csv
@@ -0,0 +1,2 @@
resource_type,id,name,company,website,location,public_access,logo_url,description,text,attr:Ex_Num,attr:ex_name,full_name,owner_id,owner_type,source_type,default_locale,supported_locales,custom_validation_schema,external_id,canonical_url,hierarchy_meaning,hierarchy_root_url,internal_reference_id,meta,collection_reference,publisher,purpose,copyright,revision_date,experimental,jurisdiction,content_type,case_sensitive,compositional,version_needed,external_id ,retired,datatype,concept_class,source,description[1],description[2],name[1],name_type[1],parent_concept_urls[0],map_type[0],map_from_concept_id[0],map_to_concept_id[0],map_type,to_concept_url,from_concept_url,attr:extra_names,collection_type,immutable,jurisdiction[1],jurisdiction[2],collection_url,data:expressions
Foobar,DemoOrg,My Demo Organization,DemoLand Inc.,https://www.demoland.fake,DemoLand,View,https://thumbs.dreamstime.com/b/demo-icon-demo-147077326.jpg,Generic Demo description text,This organization is demo-tastic!,6,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,

0 comments on commit 7f2a5d3

Please sign in to comment.