Skip to content

Commit

Permalink
Unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
chandrashekar-s committed Mar 25, 2024
1 parent 070438e commit 02d314b
Show file tree
Hide file tree
Showing 17 changed files with 18,237 additions and 106 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -83,7 +83,7 @@ public HapiConverter merge(HapiConverter other) throws HapiMergeException {
"Cannot merge Boolean FHIR Type PrimitiveConverter with %s ",
other != null
? String.format(
"%s FHIR Type %s", other.getElementType(), other.getClass().getName())
"%s FHIR Type %s", other.getElementType(), other.getClass().getName())
: null));
}
};
Expand All @@ -110,7 +110,7 @@ public HapiConverter merge(HapiConverter other) throws HapiMergeException {
"Cannot merge Integer FHIR Type PrimitiveConverter with %s ",
other != null
? String.format(
"%s FHIR Type %s", other.getElementType(), other.getClass().getName())
"%s FHIR Type %s", other.getElementType(), other.getClass().getName())
: null));
}
};
Expand Down Expand Up @@ -164,7 +164,7 @@ public HapiConverter merge(HapiConverter other) throws HapiMergeException {
"Cannot merge Double FHIR Type PrimitiveConverter with %s ",
other != null
? String.format(
"%s FHIR Type %s", other.getElementType(), other.getClass().getName())
"%s FHIR Type %s", other.getElementType(), other.getClass().getName())
: null));
}
};
Expand Down Expand Up @@ -376,7 +376,8 @@ public HapiConverter merge(HapiConverter other) throws HapiMergeException {
Map<String, HapiConverter<Schema>> currentChoiceTypes = this.getElements();
Map<String, HapiConverter<Schema>> otherChoiceTypes = otherConverter.getElements();

Map<String, HapiConverter<Schema>> mergedChoiceTypes = new HashMap<>();
// Use a linked hash map to preserve the order of the fields in the merged converter
Map<String, HapiConverter<Schema>> mergedChoiceTypes = new LinkedHashMap<>();
for (String key : currentChoiceTypes.keySet()) {
if (!otherChoiceTypes.containsKey(key)) {
mergedChoiceTypes.put(key, currentChoiceTypes.get(key));
Expand Down Expand Up @@ -932,9 +933,11 @@ private static HapiCompositeConverter createCompositeConverter(
.map(
(StructureField<HapiConverter<Schema>> field) -> {
String desc =
field.extensionUrl() != null
? "Extension field for " + field.extensionUrl()
: "Field for FHIR property " + field.propertyName();
recordName.endsWith("Reference")
? "Reference field"
: field.extensionUrl() != null
? "Extension field for " + field.extensionUrl()
: "Field for FHIR property " + field.propertyName();

return new Field(
field.fieldName(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
package com.cerner.bunsen.avro;

import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import com.cerner.bunsen.ProfileMapperFhirContexts;
import com.cerner.bunsen.common.R4UsCoreProfileData;
import com.cerner.bunsen.common.Stu3UsCoreProfileData;
import com.cerner.bunsen.exception.HapiMergeException;
import com.cerner.bunsen.exception.ProfileMapperException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Parser;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

public class AvroConverterMergeTest {

@Before
public void setUp() throws URISyntaxException, ProfileMapperException {
ProfileMapperFhirContexts.getInstance().deRegisterFhirContexts(FhirVersionEnum.R4);
}

@Test
public void validateMergedTestPatientSchema()
throws ProfileMapperException, HapiMergeException, IOException {
FhirContext fhirContext =
ProfileMapperFhirContexts.getInstance()
.contextFromClasspathFor(FhirVersionEnum.R4, "/other-profile-definitions");

List<String> patientProfiles =
Arrays.asList(
"http://hl7.org/fhir/StructureDefinition/Patient",
"http://hl7.org/fhir/bunsen/test/StructureDefinition/bunsen-test-patient");

AvroConverter mergedConverter = AvroConverter.forResources(fhirContext, patientProfiles);

InputStream inputStream =
this.getClass().getResourceAsStream("/other-schemas/bunsen-test-patient-schema.json");
Schema expectedSchema = new Parser().parse(inputStream);

Assert.assertEquals(expectedSchema.toString(), mergedConverter.getSchema().toString());
}

@Test
public void validateMergedR4UsCoreSchemas()
throws ProfileMapperException, HapiMergeException, IOException {
FhirContext fhirContext =
ProfileMapperFhirContexts.getInstance()
.contextFromClasspathFor(FhirVersionEnum.R4, "/r4-us-core-definitions");
validateSchema(
"/r4-us-core-schemas/us-core-patient-schema.json",
R4UsCoreProfileData.US_CORE_PATIENT_PROFILES,
fhirContext);
validateSchema(
"/r4-us-core-schemas/us-core-observation-schema.json",
R4UsCoreProfileData.US_CORE_OBSERVATION_PROFILES,
fhirContext);
validateSchema(
"/r4-us-core-schemas/us-core-condition-schema.json",
R4UsCoreProfileData.US_CORE_CONDITION_PROFILES,
fhirContext);
}

@Test
public void validateMergedStu3UsCoreSchemas()
throws ProfileMapperException, HapiMergeException, IOException {
FhirContext fhirContext =
ProfileMapperFhirContexts.getInstance()
.contextFromClasspathFor(FhirVersionEnum.DSTU3, "/stu3-us-core-definitions");
validateSchema(
"/stu3-us-core-schemas/us-core-patient-schema.json",
Stu3UsCoreProfileData.US_CORE_PATIENT_PROFILES,
fhirContext);
validateSchema(
"/stu3-us-core-schemas/us-core-observation-schema.json",
Stu3UsCoreProfileData.US_CORE_OBSERVATION_PROFILES,
fhirContext);
}

private void validateSchema(
String expectedSchemaFile, List<String> profileResourceTypeUrls, FhirContext fhirContext)
throws HapiMergeException, IOException {
AvroConverter converter = AvroConverter.forResources(fhirContext, profileResourceTypeUrls);
InputStream inputStream = this.getClass().getResourceAsStream(expectedSchemaFile);
Schema expectedSchema = new Parser().parse(inputStream);
Assert.assertEquals(expectedSchema.toString(), converter.getSchema().toString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import com.cerner.bunsen.ProfileMapperFhirContexts;
import com.cerner.bunsen.exception.HapiMergeException;
import com.cerner.bunsen.exception.ProfileMapperException;
import com.cerner.bunsen.r4.TestData;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.List;
import org.apache.avro.generic.GenericData.Record;
import org.hl7.fhir.r4.model.CodeableConcept;
Expand All @@ -30,13 +32,17 @@ public class R4AvroConverterCustomProfileTest {
private static Patient testBunsenTestProfilePatientDecoded;

@BeforeClass
public static void setUp() throws URISyntaxException, ProfileMapperException {
public static void setUp() throws URISyntaxException, ProfileMapperException, HapiMergeException {
ProfileMapperFhirContexts.getInstance().deRegisterFhirContexts(FhirVersionEnum.R4);
FhirContext fhirContext =
ProfileMapperFhirContexts.getInstance()
.contextFromClasspathFor(FhirVersionEnum.R4, "/other-profile-definitions");
List<String> patientProfiles =
Arrays.asList(
"http://hl7.org/fhir/StructureDefinition/Patient",
"http://hl7.org/fhir/bunsen/test/StructureDefinition/bunsen-test-patient");
AvroConverter converterBunsenTestProfilePatient =
AvroConverter.forResource(fhirContext, TestData.BUNSEN_TEST_PATIENT);
AvroConverter.forResources(fhirContext, patientProfiles);

avroBunsenTestProfilePatient =
(Record) converterBunsenTestProfilePatient.resourceToAvro(testBunsenTestProfilePatient);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import com.cerner.bunsen.ProfileMapperFhirContexts;
import com.cerner.bunsen.common.R4UsCoreProfileData;
import com.cerner.bunsen.exception.HapiMergeException;
import com.cerner.bunsen.exception.ProfileMapperException;
import com.cerner.bunsen.r4.TestData;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
Expand Down Expand Up @@ -105,8 +106,7 @@ public class R4AvroConverterUsCoreTest {

/** Initialize test data. */
@BeforeClass
public static void convertTestData()
throws IOException, URISyntaxException, ProfileMapperException {
public static void convertTestData() throws ProfileMapperException, HapiMergeException {

// TODO update these conversions to actually use the wire/binary format, i.e., create
// the serialized format from the Avro object then re-read/convert that format back to an
Expand All @@ -118,7 +118,7 @@ public static void convertTestData()
.contextFromClasspathFor(FhirVersionEnum.R4, "/r4-us-core-definitions");

AvroConverter observationConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_OBSERVATION_VITALS_SIGNS);
AvroConverter.forResources(fhirContext, R4UsCoreProfileData.US_CORE_OBSERVATION_PROFILES);

avroObservation = (Record) observationConverter.resourceToAvro(testObservation);

Expand All @@ -137,26 +137,27 @@ public static void convertTestData()
testTaskDecoded = (Task) taskConverter.avroToResource(avroTask);

AvroConverter patientConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_PATIENT);
AvroConverter.forResources(fhirContext, R4UsCoreProfileData.US_CORE_PATIENT_PROFILES);

avroPatient = (Record) patientConverter.resourceToAvro(testPatient);

testPatientDecoded = (Patient) patientConverter.avroToResource(avroPatient);

AvroConverter conditionConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_CONDITION_PROBLEMS_HEALTH_CONCERNS);
AvroConverter.forResources(fhirContext, R4UsCoreProfileData.US_CORE_CONDITION_PROFILES);

avroCondition = (Record) conditionConverter.resourceToAvro(testCondition);

testConditionDecoded = (Condition) conditionConverter.avroToResource(avroCondition);

AvroConverter medicationConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_MEDICATION);
AvroConverter.forResources(fhirContext, R4UsCoreProfileData.US_CORE_MEDICATION_PROFILES);

Record avroMedication = (Record) medicationConverter.resourceToAvro(testMedicationOne);

testMedicationDecoded = (Medication) medicationConverter.avroToResource(avroMedication);

// TODO: Contained resources are not supported yet for multiple profiles
AvroConverter medicationRequestConverter =
AvroConverter.forResource(
fhirContext,
Expand All @@ -170,7 +171,7 @@ public static void convertTestData()
(MedicationRequest) medicationRequestConverter.avroToResource(avroMedicationRequest);

AvroConverter encounterConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_ENCOUNTER);
AvroConverter.forResources(fhirContext, R4UsCoreProfileData.US_CORE_ENCOUNTER_PROFILES);
avroEncounter = (Record) encounterConverter.resourceToAvro(testEncounter);
testEncounterDecoded = (Encounter) encounterConverter.avroToResource(avroEncounter);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import com.cerner.bunsen.ProfileMapperFhirContexts;
import com.cerner.bunsen.exception.HapiMergeException;
import com.cerner.bunsen.exception.ProfileMapperException;
import com.cerner.bunsen.stu3.TestData;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.List;
import org.apache.avro.generic.GenericData.Record;
import org.hl7.fhir.dstu3.model.CodeableConcept;
Expand All @@ -32,13 +34,17 @@ public class Stu3AvroConverterCustomProfileTest {
private static Patient testBunsenTestProfilePatientDecoded;

@BeforeClass
public static void setUp() throws URISyntaxException, ProfileMapperException {
public static void setUp() throws URISyntaxException, ProfileMapperException, HapiMergeException {
ProfileMapperFhirContexts.getInstance().deRegisterFhirContexts(FhirVersionEnum.DSTU3);
FhirContext fhirContext =
ProfileMapperFhirContexts.getInstance()
.contextFromClasspathFor(FhirVersionEnum.DSTU3, "/other-profile-definitions");
List<String> patientProfiles =
Arrays.asList(
"http://hl7.org/fhir/StructureDefinition/Patient",
"http://hl7.org/fhir/bunsen/test/StructureDefinition/bunsen-test-patient");
AvroConverter converterBunsenTestProfilePatient =
AvroConverter.forResource(fhirContext, TestData.BUNSEN_TEST_PATIENT);
AvroConverter.forResources(fhirContext, patientProfiles);

avroBunsenTestProfilePatient =
(Record) converterBunsenTestProfilePatient.resourceToAvro(testBunsenTestProfilePatient);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import com.cerner.bunsen.ProfileMapperFhirContexts;
import com.cerner.bunsen.common.Stu3UsCoreProfileData;
import com.cerner.bunsen.exception.HapiMergeException;
import com.cerner.bunsen.exception.ProfileMapperException;
import com.cerner.bunsen.stu3.TestData;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
Expand Down Expand Up @@ -90,13 +91,13 @@ public class Stu3AvroConverterUsCoreTest {

/** Initialize test data. */
@BeforeClass
public static void convertTestData()
throws IOException, URISyntaxException, ProfileMapperException {
public static void convertTestData() throws ProfileMapperException, HapiMergeException {
ProfileMapperFhirContexts.getInstance().deRegisterFhirContexts(FhirVersionEnum.DSTU3);
fhirContext =
ProfileMapperFhirContexts.getInstance()
.contextFromClasspathFor(FhirVersionEnum.DSTU3, "/stu3-us-core-definitions");
AvroConverter observationConverter = AvroConverter.forResource(fhirContext, "Observation");
AvroConverter observationConverter =
AvroConverter.forResources(fhirContext, Stu3UsCoreProfileData.US_CORE_OBSERVATION_PROFILES);

avroObservation = (Record) observationConverter.resourceToAvro(testObservation);

Expand All @@ -109,26 +110,27 @@ public static void convertTestData()
(Observation) observationConverter.avroToResource(avroObservationNullStatus);

AvroConverter patientConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_PATIENT);
AvroConverter.forResources(fhirContext, Stu3UsCoreProfileData.US_CORE_PATIENT_PROFILES);

avroPatient = (Record) patientConverter.resourceToAvro(testPatient);

testPatientDecoded = (Patient) patientConverter.avroToResource(avroPatient);

AvroConverter conditionConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_CONDITION);
AvroConverter.forResources(fhirContext, Stu3UsCoreProfileData.US_CORE_CONDITION_PROFILES);

avroCondition = (Record) conditionConverter.resourceToAvro(testCondition);

testConditionDecoded = (Condition) conditionConverter.avroToResource(avroCondition);

AvroConverter medicationConverter =
AvroConverter.forResource(fhirContext, TestData.US_CORE_MEDICATION);
AvroConverter.forResources(fhirContext, Stu3UsCoreProfileData.US_CORE_MEDICATION_PROFILES);

Record avroMedication = (Record) medicationConverter.resourceToAvro(testMedicationOne);

testMedicationDecoded = (Medication) medicationConverter.avroToResource(avroMedication);

// TODO: Contained resources are not supported yet for multiple profiles
AvroConverter medicationRequestConverter =
AvroConverter.forResource(
fhirContext,
Expand Down
Loading

0 comments on commit 02d314b

Please sign in to comment.