Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Resolve "Need to implement the ancestor and descendant filters for LOINC." #1505

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -9,7 +9,6 @@
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.dstu3.TransactionProcessorVersionAdapterDstu3;
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu3;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryDstu3;
Expand All @@ -21,6 +20,7 @@
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.dstu3.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
Expand Down Expand Up @@ -99,6 +99,11 @@ public IValidatorModule instanceValidatorDstu3() {
return val;
}

@Bean
public DefaultProfileValidationSupport defaultProfileValidationSupport() {
return new DefaultProfileValidationSupport();
}

@Bean
public JpaValidationSupportChainDstu3 jpaValidationSupportChain() {
return new JpaValidationSupportChainDstu3();
Expand Down
Expand Up @@ -8,7 +8,7 @@
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorR4;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryR4;
Expand All @@ -20,14 +20,12 @@
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR4;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
import ca.uhn.fhir.jpa.provider.GraphQLProvider;
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.utils.GraphQLEngine;
import org.hl7.fhir.r5.utils.IResourceValidator;
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
Expand Down Expand Up @@ -102,6 +100,11 @@ public IValidatorModule instanceValidatorR4() {
return val;
}

@Bean
public DefaultProfileValidationSupport defaultProfileValidationSupport() {
return new DefaultProfileValidationSupport();
}

@Bean
public JpaValidationSupportChainR4 jpaValidationSupportChain() {
return new JpaValidationSupportChainR4();
Expand Down
Expand Up @@ -20,6 +20,7 @@
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainR5;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.r5.hapi.ctx.DefaultProfileValidationSupport;
import org.hl7.fhir.r5.hapi.ctx.IValidationSupport;
import org.hl7.fhir.r5.hapi.validation.CachingValidationSupport;
import org.hl7.fhir.r5.hapi.validation.FhirInstanceValidator;
Expand Down Expand Up @@ -99,6 +100,11 @@ public IValidatorModule instanceValidatorR5() {
return val;
}

@Bean
public DefaultProfileValidationSupport defaultProfileValidationSupport() {
return new DefaultProfileValidationSupport();
}

@Bean
public JpaValidationSupportChainR5 jpaValidationSupportChain() {
return new JpaValidationSupportChainR5();
Expand Down
Expand Up @@ -112,7 +112,7 @@

@SuppressWarnings("WeakerAccess")
@Repository
public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao, ApplicationContextAware {
public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao, IJpaDao<T>, ApplicationContextAware {

public static final long INDEX_STATUS_INDEXED = 1L;
public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
Expand Down Expand Up @@ -1017,7 +1017,8 @@ protected ResourceTable updateEntityForDelete(RequestDetails theRequest, Resourc
}

@SuppressWarnings("unchecked")
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
@Override
public ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
Validate.notNull(theEntity);
Expand Down Expand Up @@ -1256,6 +1257,7 @@ protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResou
return theEntity;
}

@Override
public ResourceTable updateInternal(RequestDetails theRequestDetails, T theResource, boolean thePerformIndexing, boolean theForceUpdateVersion,
ResourceTable theEntity, IIdType theResourceId, IBaseResource theOldResource) {

Expand Down
Expand Up @@ -149,21 +149,29 @@ public class DaoConfig {
private boolean myFilterParameterEnabled = false;
private StoreMetaSourceInformationEnum myStoreMetaSourceInformation = StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID;
/**
* EXPERIMENTAL - Do not use in production! Do not change default of {@code false}!
* Do not change default of {@code true}!
*
* @since 4.1.0
*/
private boolean myPreExpandValueSetsExperimental = false;
private boolean myPreExpandValueSets = true;
/**
* EXPERIMENTAL - Do not use in production! Do not change default of {@code 0}!
* Do not change default of {@code 0}!
*
* @since 4.1.0
*/
private int myPreExpandValueSetsDefaultOffsetExperimental = 0;
private int myPreExpandValueSetsDefaultOffset = 0;
/**
* EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}!
* Do not change default of {@code 1000}!
*
* @since 4.1.0
*/
private int myPreExpandValueSetsDefaultCountExperimental = 1000;
private int myPreExpandValueSetsDefaultCount = 1000;
/**
* EXPERIMENTAL - Do not use in production! Do not change default of {@code 1000}!
* Do not change default of {@code 1000}!
*
* @since 4.1.0
*/
private int myPreExpandValueSetsMaxCountExperimental = 1000;
private int myPreExpandValueSetsMaxCount = 1000;

/**
* Constructor
Expand Down Expand Up @@ -920,7 +928,7 @@ public boolean isAllowInlineMatchUrlReferences() {
* <p>
* Default is {@literal true} beginning in HAPI FHIR 2.4, since this
* feature is now specified in the FHIR specification. (Previously it
* was an experimental/rpposed feature)
* was an experimental/proposed feature)
* </p>
*
* @since 1.5
Expand Down Expand Up @@ -1621,34 +1629,6 @@ public void setWebsocketContextPath(String theWebsocketContextPath) {
myModelConfig.setWebsocketContextPath(theWebsocketContextPath);
}

/**
* EXPERIMENTAL - Do not use in production!
* <p>
* If set to {@code true}, ValueSets and expansions are stored in terminology tables. This is to facilitate
* future optimization of the $expand operation on large ValueSets.
* </p>
* <p>
* The default value for this setting is {@code false}.
* </p>
*/
public boolean isPreExpandValueSetsExperimental() {
return myPreExpandValueSetsExperimental;
}

/**
* EXPERIMENTAL - Do not use in production!
* <p>
* If set to {@code true}, ValueSets and expansions are stored in terminology tables. This is to facilitate
* future optimization of the $expand operation on large ValueSets.
* </p>
* <p>
* The default value for this setting is {@code false}.
* </p>
*/
public void setPreExpandValueSetsExperimental(boolean thePreExpandValueSetsExperimental) {
myPreExpandValueSetsExperimental = thePreExpandValueSetsExperimental;
}

/**
* If set to <code>true</code> the _filter search parameter will be enabled on this server. Note that _filter
* is very powerful, but also potentially dangerous as it can allow a user to create a query for which there
Expand Down Expand Up @@ -1720,83 +1700,118 @@ public boolean isStoreRequestId() {
}

/**
* EXPERIMENTAL - Do not use in production!
* <p>
* If set to {@code true}, ValueSets and expansions are stored in terminology tables. This is to facilitate
* optimization of the $expand operation on large ValueSets.
* </p>
* <p>
* The default value for this setting is {@code true}.
* </p>
*
* @since 4.1.0
dmuylwyk marked this conversation as resolved.
Show resolved Hide resolved
*/
public boolean isPreExpandValueSets() {
return myPreExpandValueSets;
}

/**
* <p>
* If set to {@code true}, ValueSets and expansions are stored in terminology tables. This is to facilitate
* optimization of the $expand operation on large ValueSets.
* </p>
* <p>
* The default value for this setting is {@code true}.
* </p>
dmuylwyk marked this conversation as resolved.
Show resolved Hide resolved
*
* @since 4.1.0
*/
public void setPreExpandValueSets(boolean thePreExpandValueSets) {
myPreExpandValueSets = thePreExpandValueSets;
}

/**
* <p>
* This is the default value of {@code offset} parameter for the ValueSet {@code $expand} operation when
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
* {@link DaoConfig#isPreExpandValueSets()} returns {@code true}.
* </p>
* <p>
* The default value for this setting is {@code 0}.
* </p>
*
* @since 4.1.0
*/
public int getPreExpandValueSetsDefaultOffsetExperimental() {
return myPreExpandValueSetsDefaultOffsetExperimental;
public int getPreExpandValueSetsDefaultOffset() {
return myPreExpandValueSetsDefaultOffset;
}

/**
* EXPERIMENTAL - Do not use in production!
* <p>
* This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
* {@link DaoConfig#isPreExpandValueSets()} returns {@code true}.
* </p>
* <p>
* The default value for this setting is {@code 1000}.
* </p>
*
* @since 4.1.0
*/
public int getPreExpandValueSetsDefaultCountExperimental() {
return myPreExpandValueSetsDefaultCountExperimental;
public int getPreExpandValueSetsDefaultCount() {
return myPreExpandValueSetsDefaultCount;
}

/**
* EXPERIMENTAL - Do not use in production!
* <p>
* This is the default value of {@code count} parameter for the ValueSet {@code $expand} operation when
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
* {@link DaoConfig#isPreExpandValueSets()} returns {@code true}.
* </p>
* <p>
* If {@code thePreExpandValueSetsDefaultCountExperimental} is greater than
* {@link DaoConfig#getPreExpandValueSetsMaxCountExperimental()}, the lesser value is used.
* If {@code thePreExpandValueSetsDefaultCount} is greater than
* {@link DaoConfig#getPreExpandValueSetsMaxCount()}, the lesser value is used.
* </p>
* <p>
* The default value for this setting is {@code 1000}.
* </p>
*
* @since 4.1.0
*/
public void setPreExpandValueSetsDefaultCountExperimental(int thePreExpandValueSetsDefaultCountExperimental) {
myPreExpandValueSetsDefaultCountExperimental = Math.min(thePreExpandValueSetsDefaultCountExperimental, getPreExpandValueSetsMaxCountExperimental());
public void setPreExpandValueSetsDefaultCount(int thePreExpandValueSetsDefaultCount) {
myPreExpandValueSetsDefaultCount = Math.min(thePreExpandValueSetsDefaultCount, getPreExpandValueSetsMaxCount());
}

/**
* EXPERIMENTAL - Do not use in production!
* <p>
* This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
* {@link DaoConfig#isPreExpandValueSets()} returns {@code true}.
* </p>
* <p>
* The default value for this setting is {@code 1000}.
* </p>
*
* @since 4.1.0
*/
public int getPreExpandValueSetsMaxCountExperimental() {
return myPreExpandValueSetsMaxCountExperimental;
public int getPreExpandValueSetsMaxCount() {
return myPreExpandValueSetsMaxCount;
}

/**
* EXPERIMENTAL - Do not use in production!
* <p>
* This is the max value of {@code count} parameter for the ValueSet {@code $expand} operation when
* {@link DaoConfig#isPreExpandValueSetsExperimental()} returns {@code true}.
* {@link DaoConfig#isPreExpandValueSets()} returns {@code true}.
* </p>
* <p>
* If {@code thePreExpandValueSetsMaxCountExperimental} is lesser than
* {@link DaoConfig#getPreExpandValueSetsDefaultCountExperimental()}, the default {@code count} is lowered to the
* If {@code thePreExpandValueSetsMaxCount} is lesser than
* {@link DaoConfig#getPreExpandValueSetsDefaultCount()}, the default {@code count} is lowered to the
* new max {@code count}.
* </p>
* <p>
* The default value for this setting is {@code 1000}.
* </p>
*
* @since 4.1.0
*/
public void setPreExpandValueSetsMaxCountExperimental(int thePreExpandValueSetsMaxCountExperimental) {
myPreExpandValueSetsMaxCountExperimental = thePreExpandValueSetsMaxCountExperimental;
setPreExpandValueSetsDefaultCountExperimental(Math.min(getPreExpandValueSetsDefaultCountExperimental(), getPreExpandValueSetsMaxCountExperimental()));
public void setPreExpandValueSetsMaxCount(int thePreExpandValueSetsMaxCount) {
myPreExpandValueSetsMaxCount = thePreExpandValueSetsMaxCount;
setPreExpandValueSetsDefaultCount(Math.min(getPreExpandValueSetsDefaultCount(), getPreExpandValueSetsMaxCount()));
}

public enum IndexEnabledEnum {
Expand Down
Expand Up @@ -73,8 +73,8 @@ protected void postPersist(ResourceTable theEntity, Subscription theSubscription


@Override
protected ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion,
Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
public ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion,
Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);

if (theDeletedTimestampOrNull != null) {
Expand Down
@@ -0,0 +1,18 @@
package ca.uhn.fhir.jpa.dao;

import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;

import java.util.Date;

public interface IJpaDao<T extends IBaseResource> {
@SuppressWarnings("unchecked")
ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, ResourceTable
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry);

ResourceTable updateInternal(RequestDetails theRequestDetails, T theResource, boolean thePerformIndexing, boolean theForceUpdateVersion,
ResourceTable theEntity, IIdType theResourceId, IBaseResource theOldResource);
}
Expand Up @@ -909,10 +909,13 @@ private Map<BUNDLEENTRY, ResourceTable> doTransactionWriteOperations(final Servl
IPrimitiveType<Date> deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource);
Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null;

IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(nextResource.getClass());
IJpaDao jpaDao = (IJpaDao) dao;

if (updatedEntities.contains(nextOutcome.getEntity())) {
myDao.updateInternal(theRequest, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource());
jpaDao.updateInternal(theRequest, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource());
} else if (!nonUpdatedEntities.contains(nextOutcome.getEntity())) {
myDao.updateEntity(theRequest, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theUpdateTime, false, true);
jpaDao.updateEntity(theRequest, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theUpdateTime, false, true);
}
}

Expand Down
Expand Up @@ -125,8 +125,8 @@ protected void preDelete(CodeSystem theResourceToDelete, ResourceTable theEntity
}

@Override
protected ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
public ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);

CodeSystem csDstu3 = (CodeSystem) theResource;
Expand Down