Skip to content

Commit

Permalink
reverting file read encoding
Browse files Browse the repository at this point in the history
  • Loading branch information
snyaggarwal committed Sep 14, 2021
1 parent a067032 commit 68e31a6
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 24 deletions.
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[MESSAGES CONTROL]
disable=missing-docstring,too-few-public-methods,no-member,too-many-ancestors,attribute-defined-outside-init,fixme,broad-except,import-outside-toplevel,too-many-instance-attributes,unsupported-membership-test,too-many-public-methods,too-many-lines,unused-private-member,consider-using-with,duplicate-code
disable=missing-docstring,too-few-public-methods,no-member,too-many-ancestors,attribute-defined-outside-init,fixme,broad-except,import-outside-toplevel,too-many-instance-attributes,unsupported-membership-test,too-many-public-methods,too-many-lines,unused-private-member,consider-using-with,duplicate-code,unspecified-encoding
[MASTER]
ignore=migrations,fixtures,scripts,commands,documents.py,password_validation.py,v1_dump
[FORMAT]
Expand Down
2 changes: 1 addition & 1 deletion core/common/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def upload_file(
): # pylint: disable=too-many-arguments
read_directive = 'rb' if binary else 'r'
file_path = file_path if file_path else key
return cls.upload(key, open(file_path, read_directive).read(), headers, metadata) # pylint: disable=unspecified-encoding
return cls.upload(key, open(file_path, read_directive).read(), headers, metadata)

@classmethod
def upload_public(cls, file_path, file_content):
Expand Down
18 changes: 9 additions & 9 deletions core/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def write_export_file(
if version.is_head:
filters['is_latest_version'] = True

with open('export.json', 'w', encoding='utf-8') as out:
with open('export.json', 'w') as out:
out.write('%s, "concepts": [' % resource_string[:-1])

resource_name = resource_type.title()
Expand All @@ -241,14 +241,14 @@ def write_export_file(
id__in=batch_queryset.values_list('concept_id')).filter(**filters).order_by('-id')
if queryset.exists():
if start > 0:
with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write(', ')
concept_versions = queryset.prefetch_related('names', 'descriptions')
data = concept_serializer_class(concept_versions, many=True).data
concept_string = json.dumps(data, cls=encoders.JSONEncoder)
concept_string = concept_string[1:-1]

with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write(concept_string)

start += batch_size
Expand All @@ -263,7 +263,7 @@ def write_export_file(
references_qs = version.references
total_references = references_qs.count()

with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write('], "references": [')
if total_references:
logger.info(
Expand All @@ -278,15 +278,15 @@ def write_export_file(
reference_serializer = reference_serializer_class(references, many=True)
reference_string = json.dumps(reference_serializer.data, cls=encoders.JSONEncoder)
reference_string = reference_string[1:-1]
with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write(reference_string)
if end != total_references:
out.write(', ')
logger.info('Done serializing references.')
else:
logger.info('%s has no references to serialize.' % resource_name)

with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write('], "mappings": [')

if mappings_qs.exists():
Expand All @@ -302,13 +302,13 @@ def write_export_file(
id__in=batch_queryset.values_list('mapping_id')).filter(**filters).order_by('-id')
if queryset.exists():
if start > 0:
with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write(', ')

data = mapping_serializer_class(queryset, many=True).data
mapping_string = json.dumps(data, cls=encoders.JSONEncoder)
mapping_string = mapping_string[1:-1]
with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write(mapping_string)

start += batch_size
Expand All @@ -319,7 +319,7 @@ def write_export_file(
else:
logger.info('%s has no mappings to serialize.' % resource_name)

with open('export.json', 'a', encoding='utf-8') as out:
with open('export.json', 'a') as out:
out.write(']}')

with zipfile.ZipFile('export.zip', 'w', zipfile.ZIP_DEFLATED) as _zip:
Expand Down
23 changes: 10 additions & 13 deletions core/importers/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,8 +336,7 @@ def test_mapping_import(self):
def test_reference_import(self):
importer = BulkImportInline(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_collection_references.json'), 'r',
encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_collection_references.json'), 'r'
).read(),
'ocladmin', True
)
Expand All @@ -357,8 +356,7 @@ def test_reference_import(self):
# duplicate run
importer = BulkImportInline(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_collection_references.json'), 'r',
encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_collection_references.json'), 'r'
).read(),
'ocladmin', True
)
Expand All @@ -378,7 +376,7 @@ def test_reference_import(self):
def test_sample_import(self):
importer = BulkImportInline(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r', encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r'
).read(),
'ocladmin', True
)
Expand All @@ -399,7 +397,7 @@ def test_openmrs_schema_csv_import(self):
OrganizationSourceFactory(
mnemonic='Implementationtest', organization=org, custom_validation_schema=CUSTOM_VALIDATION_SCHEMA_OPENMRS)
file_content = open(
os.path.join(os.path.dirname(__file__), '..', 'samples/msfocp_concepts.csv'), 'r', encoding='utf-8').read()
os.path.join(os.path.dirname(__file__), '..', 'samples/msfocp_concepts.csv'), 'r').read()
data = OclStandardCsvToJsonConverter(
input_list=csv_file_data_to_input_list(file_content),
allow_special_characters=True
Expand All @@ -417,8 +415,7 @@ def test_openmrs_schema_csv_import(self):
def test_pepfar_import(self):
importer = BulkImportInline(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/pepfar_datim_moh_fy19.json'), 'r',
encoding='utf-8').read(),
os.path.join(os.path.dirname(__file__), '..', 'samples/pepfar_datim_moh_fy19.json'), 'r').read(),
'ocladmin', True
)
importer.run()
Expand All @@ -439,7 +436,7 @@ def test_make_parts(self, redis_service_mock):

importer = BulkImportParallelRunner(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r', encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r'
).read(),
'ocladmin', True
)
Expand All @@ -465,7 +462,7 @@ def test_is_any_process_alive(self, redis_service_mock):
redis_service_mock.return_value = Mock()
importer = BulkImportParallelRunner(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r', encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r'
).read(),
'ocladmin', True
)
Expand Down Expand Up @@ -506,7 +503,7 @@ def test_get_overall_tasks_progress(self, redis_service_mock):
redis_service_mock.return_value = redis_instance_mock
importer = BulkImportParallelRunner(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r', encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r'
).read(),
'ocladmin', True
)
Expand All @@ -520,7 +517,7 @@ def test_update_elapsed_seconds(self, redis_service_mock):

importer = BulkImportParallelRunner(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r', encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r'
).read(),
'ocladmin', True
)
Expand All @@ -535,7 +532,7 @@ def test_notify_progress(self, redis_service_mock): # pylint: disable=no-self-u
redis_service_mock.return_value = redis_instance_mock
importer = BulkImportParallelRunner(
open(
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r', encoding='utf-8'
os.path.join(os.path.dirname(__file__), '..', 'samples/sample_ocldev.json'), 'r'
).read(),
'ocladmin', True, None, 'task-id'
)
Expand Down

0 comments on commit 68e31a6

Please sign in to comment.