Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
Expand All @@ -34,21 +35,22 @@
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
import ca.uhn.fhir.jpa.util.InterceptorUtil;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SimplePreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.SimplePreResourceShowDetails;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseResource;
Expand All @@ -70,6 +72,7 @@
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;

public class PersistedJpaBundleProvider implements IBundleProvider {

Expand All @@ -78,7 +81,9 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
/*
* Autowired fields
*/

private final RequestDetails myRequest;
@Autowired
protected PlatformTransactionManager myTxManager;
@PersistenceContext
private EntityManager myEntityManager;
@Autowired
Expand All @@ -90,22 +95,22 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
protected PlatformTransactionManager myTxManager;
@Autowired
private FhirContext myContext;
@Autowired
private ISearchCoordinatorSvc mySearchCoordinatorSvc;
@Autowired
private ISearchCacheSvc mySearchCacheSvc;
@Autowired
private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Autowired
private DaoConfig myDaoConfig;

/*
* Non autowired fields (will be different for every instance
* of this class, since it's a prototype
*/

private final RequestDetails myRequest;
@Autowired
private MemoryCacheService myMemoryCacheService;
private Search mySearchEntity;
private String myUuid;
private SearchCacheStatusEnum myCacheStatus;
Expand Down Expand Up @@ -241,17 +246,57 @@ public boolean ensureSearchEntityLoaded() {

if (mySearchEntity.getSearchType() == SearchTypeEnum.HISTORY) {
if (mySearchEntity.getTotalCount() == null) {
new TransactionTemplate(myTxManager).executeWithoutResult(t->{
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
Long count = historyBuilder.fetchCount(getRequestPartitionId());
mySearchEntity.setTotalCount(count.intValue());
});
calculateHistoryCount();
}
}

return true;
}

/**
* Note that this method is called outside a DB transaction, and uses a loading cache
* (assuming the default {@literal COUNT_CACHED} mode) so this effectively throttles
* access to the database by preventing multiple concurrent DB calls for an expensive
* count operation.
*/
private void calculateHistoryCount() {
MemoryCacheService.HistoryCountKey key;
if (mySearchEntity.getResourceId() != null) {
key = MemoryCacheService.HistoryCountKey.forInstance(mySearchEntity.getResourceId());
} else if (mySearchEntity.getResourceType() != null) {
key = MemoryCacheService.HistoryCountKey.forType(mySearchEntity.getResourceType());
} else {
key = MemoryCacheService.HistoryCountKey.forSystem();
}

Function<MemoryCacheService.HistoryCountKey, Integer> supplier = k -> new TransactionTemplate(myTxManager).execute(t -> {
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
Long count = historyBuilder.fetchCount(getRequestPartitionId());
return count.intValue();
});

boolean haveOffset = mySearchEntity.getLastUpdatedLow() != null || mySearchEntity.getLastUpdatedHigh() != null;

switch (myDaoConfig.getHistoryCountMode()) {
case COUNT_ACCURATE: {
int count = supplier.apply(key);
mySearchEntity.setTotalCount(count);
break;
}
case CACHED_ONLY_WITHOUT_OFFSET: {
if (!haveOffset) {
int count = myMemoryCacheService.get(MemoryCacheService.CacheEnum.HISTORY_COUNT, key, supplier);
mySearchEntity.setTotalCount(count);
}
break;
}
case COUNT_DISABLED: {
break;
}
}

}

@Override
public InstantDt getPublished() {
ensureSearchEntityLoaded();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
Expand Down Expand Up @@ -192,6 +193,7 @@ public String logUpdateQueriesForCurrentThread() {

/**
* Log all captured SELECT queries
*
* @return
*/
public String logSelectQueriesForCurrentThread(int... theIndexes) {
Expand All @@ -217,15 +219,23 @@ public String logSelectQueriesForCurrentThread(int... theIndexes) {
* Log all captured SELECT queries
*/
public List<SqlQuery> logSelectQueries() {
return logSelectQueries(true, true);
}

/**
* Log all captured SELECT queries
*/
public List<SqlQuery> logSelectQueries(boolean theInlineParams, boolean theFormatSql) {
List<SqlQuery> queries = getSelectQueries();
List<String> queriesStrings = queries
.stream()
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
.map(t -> CircularQueueCaptureQueriesListener.formatQueryAsSql(t, theInlineParams, theFormatSql))
.collect(Collectors.toList());
ourLog.info("Select Queries:\n{}", String.join("\n", queriesStrings));
return queries;
}


/**
* Log first captured SELECT query
*/
Expand Down Expand Up @@ -353,8 +363,16 @@ public int countDeleteQueriesForCurrentThread() {
}


@Nonnull
static String formatQueryAsSql(SqlQuery theQuery) {
String formattedSql = theQuery.getSql(true, true);
boolean inlineParams = true;
boolean formatSql = true;
return formatQueryAsSql(theQuery, inlineParams, formatSql);
}

@Nonnull
static String formatQueryAsSql(SqlQuery theQuery, boolean inlineParams, boolean formatSql) {
String formattedSql = theQuery.getSql(inlineParams, formatSql);
StringBuilder b = new StringBuilder();
b.append("SqlQuery at ");
b.append(new InstantType(new Date(theQuery.getQueryTimestamp())).getValueAsString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,18 @@
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired;

import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import java.util.EnumMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;

import static org.apache.commons.lang3.StringUtils.isNotBlank;

/**
* This class acts as a central spot for all of the many Caffeine caches we use in HAPI FHIR.
* <p>
Expand Down Expand Up @@ -120,7 +122,8 @@ public enum CacheEnum {
CONCEPT_TRANSLATION(TranslationQuery.class),
MATCH_URL(String.class),
CONCEPT_TRANSLATION_REVERSE(TranslationQuery.class),
RESOURCE_CONDITIONAL_CREATE_VERSION(IIdType.class);
RESOURCE_CONDITIONAL_CREATE_VERSION(IIdType.class),
HISTORY_COUNT(HistoryCountKey.class);

private final Class<?> myKeyType;

Expand All @@ -137,6 +140,17 @@ public static class TagDefinitionCacheKey {
private final String myCode;
private final int myHashCode;

public TagDefinitionCacheKey(TagTypeEnum theType, String theSystem, String theCode) {
myType = theType;
mySystem = theSystem;
myCode = theCode;
myHashCode = new HashCodeBuilder(17, 37)
.append(myType)
.append(mySystem)
.append(myCode)
.toHashCode();
}

@Override
public boolean equals(Object theO) {
boolean retVal = false;
Expand All @@ -156,17 +170,49 @@ public boolean equals(Object theO) {
public int hashCode() {
return myHashCode;
}
}

public TagDefinitionCacheKey(TagTypeEnum theType, String theSystem, String theCode) {
myType = theType;
mySystem = theSystem;
myCode = theCode;
myHashCode = new HashCodeBuilder(17, 37)
.append(myType)
.append(mySystem)
.append(myCode)
.toHashCode();

public static class HistoryCountKey {
private final String myTypeName;
private final Long myInstanceId;
private final int myHashCode;

private HistoryCountKey(String theTypeName, Long theInstanceId) {
myTypeName = theTypeName;
myInstanceId = theInstanceId;
myHashCode = new HashCodeBuilder().append(myTypeName).append(myInstanceId).toHashCode();
}

@Override
public boolean equals(Object theO) {
boolean retVal = false;
if (theO instanceof HistoryCountKey) {
HistoryCountKey that = (HistoryCountKey) theO;
retVal = new EqualsBuilder().append(myTypeName, that.myTypeName).append(myInstanceId, that.myInstanceId).isEquals();
}
return retVal;
}

@Override
public int hashCode() {
return myHashCode;
}

public static HistoryCountKey forSystem() {
return new HistoryCountKey(null, null);
}

public static HistoryCountKey forType(@Nonnull String theType) {
assert isNotBlank(theType);
return new HistoryCountKey(theType, null);
}

public static HistoryCountKey forInstance(@Nonnull Long theInstanceId) {
assert theInstanceId != null;
return new HistoryCountKey(null, theInstanceId);
}

}

}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.dao.dstu2;

import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
Expand Down Expand Up @@ -120,6 +121,7 @@ public final void after() {
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
myDaoConfig.setTreatReferencesAsLogical(new DaoConfig().getTreatReferencesAsLogical());
myDaoConfig.setEnforceReferentialIntegrityOnDelete(new DaoConfig().isEnforceReferentialIntegrityOnDelete());
myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE);
}

private void assertGone(IIdType theId) {
Expand Down Expand Up @@ -651,6 +653,8 @@ public void testDeleteFailsIfIncomingLinks() {

@Test
public void testDeleteResource() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

int initialHistory = myPatientDao.history(null, null, mySrd).size();

IIdType id1;
Expand Down Expand Up @@ -694,7 +698,7 @@ public void testDeleteResource() {
}

IBundleProvider history = myPatientDao.history(null, null, mySrd);
assertEquals(4 + initialHistory, history.size().intValue());
assertEquals(4 + initialHistory, history.sizeOrThrowNpe());
List<IBaseResource> resources = history.getResources(0, 4);
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) resources.get(0)));

Expand Down Expand Up @@ -1036,6 +1040,7 @@ public void testHistoryByForcedId() {

@Test
public void testHistoryOverMultiplePages() throws Exception {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);
String methodName = "testHistoryOverMultiplePages";

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
Expand Down Expand Up @@ -126,6 +127,7 @@ public final void after() {
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
myDaoConfig.setTreatReferencesAsLogical(new DaoConfig().getTreatReferencesAsLogical());
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE);
}

private void assertGone(IIdType theId) {
Expand Down Expand Up @@ -895,6 +897,8 @@ public void testDeleteFailsIfIncomingLinks() {

@Test
public void testDeleteResource() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

int initialHistory = myPatientDao.history(null, null, mySrd).size();

IIdType id1;
Expand Down Expand Up @@ -1282,6 +1286,7 @@ public void testHistoryByForcedId() {
@Test
public void testHistoryOverMultiplePages() throws Exception {
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

String methodName = "testHistoryOverMultiplePages";

Expand Down Expand Up @@ -1432,7 +1437,9 @@ public void testHistoryOverMultiplePages() throws Exception {
}

@Test
public void testHistoryReflectsMetaOperations() throws Exception {
public void testHistoryReflectsMetaOperations() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

Patient inPatient = new Patient();
inPatient.addName().setFamily("version1");
inPatient.getMeta().addProfile("http://example.com/1");
Expand Down Expand Up @@ -1517,6 +1524,8 @@ public void testHistoryWithDeletedResource() throws Exception {

@Test
public void testHistoryWithFromAndTo() throws Exception {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

String methodName = "testHistoryWithFromAndTo";

Patient patient = new Patient();
Expand Down Expand Up @@ -1548,6 +1557,7 @@ public void testHistoryWithFromAndTo() throws Exception {

@Test
public void testHistoryWithFutureSinceDate() throws Exception {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

Date before = new Date();
Thread.sleep(10);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.dao.r4;

import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.SqlQuery;
Expand Down Expand Up @@ -46,6 +47,7 @@ public void afterResetDao() {
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
myDaoConfig.setDeleteEnabled(new DaoConfig().isDeleteEnabled());
myDaoConfig.setMatchUrlCache(new DaoConfig().getMatchUrlCache());
myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE);
}

@BeforeEach
Expand Down Expand Up @@ -401,6 +403,8 @@ public void assertNoPartitionSelectors() {

@Test
public void testHistory_Server() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

runInTransaction(() -> {
Patient p = new Patient();
p.setId("A");
Expand Down Expand Up @@ -457,6 +461,8 @@ public void testHistory_Server() {
*/
@Test
public void testHistory_Server_WithTags() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

runInTransaction(() -> {
Patient p = new Patient();
p.getMeta().addTag("system", "code1", "displaY1");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
Expand All @@ -14,6 +15,7 @@
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
Expand Down Expand Up @@ -43,8 +45,6 @@
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;

import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import org.apache.commons.io.IOUtils;
Expand Down Expand Up @@ -131,7 +131,6 @@
import static org.apache.commons.lang3.StringUtils.countMatches;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
Expand Down Expand Up @@ -163,8 +162,9 @@ public final void after() {
myDaoConfig.setEnforceReferentialIntegrityOnDelete(new DaoConfig().isEnforceReferentialIntegrityOnDelete());
myDaoConfig.setEnforceReferenceTargetTypes(new DaoConfig().isEnforceReferenceTargetTypes());
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
}
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE);
}

@BeforeEach
public void before() {
Expand Down Expand Up @@ -673,9 +673,9 @@ public void testChoiceParamQuantityWithNormalizedQuantitySearchSupported() {
IBundleProvider found = myObservationDao.search(new SearchParameterMap(Observation.SP_VALUE_QUANTITY, new QuantityParam("ne123", UcumServiceUtil.UCUM_CODESYSTEM_URL, "cm")).setLoadSynchronous(true));
assertEquals(0, found.size().intValue());
}

}

@Test
public void testChoiceParamQuantityPrecision() {
Observation o3 = new Observation();
Expand Down Expand Up @@ -754,9 +754,9 @@ public void testChoiceParamQuantityPrecisionWithNormalizedQuantitySearchSupporte
List<IIdType> list = toUnqualifiedVersionlessIds(found);
assertThat(list, Matchers.empty());
}

}

@Test
public void testChoiceParamString() {

Expand Down Expand Up @@ -1370,6 +1370,8 @@ public void testDeleteFailsIfIncomingLinks() {

@Test
public void testDeleteResource() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

int initialHistory = myPatientDao.history(null, null, mySrd).size();

IIdType id1;
Expand Down Expand Up @@ -1559,7 +1561,7 @@ public void testDeleteWithHas() {
myObservationDao.read(obs1id);
myObservationDao.read(obs2id);
}

@Test
public void testDeleteWithMatchUrl() {
String methodName = "testDeleteWithMatchUrl";
Expand Down Expand Up @@ -1845,6 +1847,8 @@ public void testHistoryByForcedId() {

@Test
public void testHistoryOverMultiplePages() throws Exception {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

String methodName = "testHistoryOverMultiplePages";

Patient patient = new Patient();
Expand Down Expand Up @@ -1995,6 +1999,8 @@ public void testHistoryOverMultiplePages() throws Exception {

@Test
public void testHistoryReflectsMetaOperations() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

Patient inPatient = new Patient();
inPatient.addName().setFamily("version1");
inPatient.getMeta().addProfile("http://example.com/1");
Expand Down Expand Up @@ -2079,6 +2085,8 @@ public void testHistoryWithDeletedResource() {

@Test
public void testHistoryWithFromAndTo() throws Exception {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

String methodName = "testHistoryWithFromAndTo";

Patient patient = new Patient();
Expand Down Expand Up @@ -2110,6 +2118,7 @@ public void testHistoryWithFromAndTo() throws Exception {

@Test
public void testHistoryWithFutureSinceDate() throws Exception {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);

Date before = new Date();
Thread.sleep(10);
Expand Down Expand Up @@ -3158,10 +3167,10 @@ public void testSaveAndReturnCollectionBundle() throws IOException {
assertTrue(next.getResource().getIdElement().hasIdPart());
}
}

@Test()
public void testSortByComposite() {

IIdType pid0;
IIdType oid1;
IIdType oid2;
Expand All @@ -3179,55 +3188,55 @@ public void testSortByComposite() {
obs.getSubject().setReferenceElement(pid0);
obs.getCode().addCoding().setCode("2345-7").setSystem("http://loinc.org");
obs.setValue(new StringType("200"));

oid1 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();

ourLog.info("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}

{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
obs.getCode().addCoding().setCode("2345-7").setSystem("http://loinc.org");
obs.setValue(new StringType("300"));

oid2 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();

ourLog.info("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}

{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
obs.getCode().addCoding().setCode("2345-7").setSystem("http://loinc.org");
obs.setValue(new StringType("150"));

oid3 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();

ourLog.info("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}

{
Observation obs = new Observation();
obs.addIdentifier().setSystem("urn:system").setValue("FOO");
obs.getSubject().setReferenceElement(pid0);
obs.getCode().addCoding().setCode("2345-7").setSystem("http://loinc.org");
obs.setValue(new StringType("250"));

oid4 = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();

ourLog.info("Observation: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(obs));
}


SearchParameterMap pm = new SearchParameterMap();
pm.setSort(new SortSpec(Observation.SP_CODE_VALUE_STRING));


IBundleProvider found = myObservationDao.search(pm);

List<IIdType> list = toUnqualifiedVersionlessIds(found);
assertEquals(4, list.size());
assertEquals(oid3, list.get(0));
Expand Down Expand Up @@ -3350,20 +3359,20 @@ public void testSortById() {
IIdType id2 = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();

p = new Patient();
p.setId(methodName+"1");
p.setId(methodName + "1");
p.addIdentifier().setSystem("urn:system").setValue(methodName);
IIdType idMethodName1 = myPatientDao.update(p, mySrd).getId().toUnqualifiedVersionless();
assertEquals(methodName+"1", idMethodName1.getIdPart());
assertEquals(methodName + "1", idMethodName1.getIdPart());

p = new Patient();
p.addIdentifier().setSystem("urn:system").setValue(methodName);
IIdType id3 = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();

p = new Patient();
p.setId(methodName+"2");
p.setId(methodName + "2");
p.addIdentifier().setSystem("urn:system").setValue(methodName);
IIdType idMethodName2 = myPatientDao.update(p, mySrd).getId().toUnqualifiedVersionless();
assertEquals(methodName+"2", idMethodName2.getIdPart());
assertEquals(methodName + "2", idMethodName2.getIdPart());

p = new Patient();
p.addIdentifier().setSystem("urn:system").setValue(methodName);
Expand Down Expand Up @@ -3533,7 +3542,7 @@ public void testSortByQuantity() {
assertThat(actual, contains(id4, id3, id2, id1));

}

@Test
@Disabled
public void testSortByQuantityWithNormalizedQuantitySearchFullSupported() {
Expand Down Expand Up @@ -4015,15 +4024,15 @@ public void testTagsAndProfilesAndSecurityLabelsWithCreateAndReadAndSearch() {
published = (ArrayList<Coding>) retrieved.getMeta().getTag();
sort(published);
assertEquals(3, published.size());
assertEquals( "Dog", published.get(0).getCode());
assertEquals( "Puppies", published.get(0).getDisplay());
assertEquals( null, published.get(0).getSystem());
assertEquals( "Cat", published.get(1).getCode());
assertEquals( "Kittens", published.get(1).getDisplay());
assertEquals( "http://foo", published.get(1).getSystem());
assertEquals( "Cow", published.get(2).getCode());
assertEquals( "Calves", published.get(2).getDisplay());
assertEquals( "http://foo", published.get(2).getSystem());
assertEquals("Dog", published.get(0).getCode());
assertEquals("Puppies", published.get(0).getDisplay());
assertEquals(null, published.get(0).getSystem());
assertEquals("Cat", published.get(1).getCode());
assertEquals("Kittens", published.get(1).getDisplay());
assertEquals("http://foo", published.get(1).getSystem());
assertEquals("Cow", published.get(2).getCode());
assertEquals("Calves", published.get(2).getDisplay());
assertEquals("http://foo", published.get(2).getSystem());

secLabels = retrieved.getMeta().getSecurity();
sortCodings(secLabels);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
Expand All @@ -26,6 +27,7 @@
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.ClasspathUtil;
import org.apache.commons.io.IOUtils;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IAnyResource;
Expand Down Expand Up @@ -464,7 +466,7 @@ public void testCircularCreateAndDelete() {
*/
@Test
public void testContainedArePreservedForBug410() throws IOException {
String input = IOUtils.toString(getClass().getResourceAsStream("/r4/bug-410-bundle.xml"), StandardCharsets.UTF_8);
String input = ClasspathUtil.loadResource("/r4/bug-410-bundle.xml");
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, input);

Bundle output = mySystemDao.transaction(mySrd, bundle);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,286 @@
package ca.uhn.fhir.jpa.dao.r4;

import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.StopWatch;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;

import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertEquals;

public class JpaHistoryR4Test extends BaseJpaR4SystemTest {

private static final Logger ourLog = LoggerFactory.getLogger(JpaHistoryR4Test.class);

@AfterEach
public void after() {
myDaoConfig.setHistoryCountMode(DaoConfig.DEFAULT_HISTORY_COUNT_MODE);
}

@Test
public void testTypeHistory_TotalDisabled() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_DISABLED);
create20Patients();

/*
* Perform initial history
*/

myCaptureQueriesListener.clear();
IBundleProvider history = myPatientDao.history(null, null, new SystemRequestDetails());

// Simulate the server requesting the Bundle.total value
assertEquals(null, history.size());

// Simulate the server actually loading the resources
history.getResources(0, 10);

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
// Resource query happens but not count query
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), not(startsWith("select count")));

}

@Test
public void testTypeHistory_CountAccurate() {
myDaoConfig.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);
create20Patients();

/*
* Perform initial history
*/

myCaptureQueriesListener.clear();
IBundleProvider history = myPatientDao.history(null, null, new SystemRequestDetails());

// Simulate the server requesting the Bundle.total value
assertEquals(20, history.sizeOrThrowNpe());

// Simulate the server actually loading the resources
history.getResources(0, 10);

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), startsWith("select count"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false).toLowerCase(Locale.ROOT), containsString(" from hfj_res_ver "));

/*
* Subsequent history should also perform count
*/

myCaptureQueriesListener.clear();
history = myPatientDao.history(null, null, new SystemRequestDetails());

// Simulate the server requesting the Bundle.total value
assertEquals(20, history.sizeOrThrowNpe());

// Simulate the server actually loading the resources
history.getResources(0, 10);

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), startsWith("select count"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false).toLowerCase(Locale.ROOT), containsString(" from hfj_res_ver "));

}

@Test
public void testTypeHistory_CountCacheEnabled() {
create20Patients();

/*
* Perform initial history
*/

myCaptureQueriesListener.clear();
IBundleProvider history = myPatientDao.history(null, null, new SystemRequestDetails());

// Simulate the server requesting the Bundle.total value
assertEquals(20, history.sizeOrThrowNpe());

// Simulate the server actually loading the resources
history.getResources(0, 10);

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logSelectQueries(false, false);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), startsWith("select count"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false).toLowerCase(Locale.ROOT), containsString(" from hfj_res_ver "));
runInTransaction(() -> assertEquals(0, mySearchEntityDao.count()));

/*
* Perform history a second time (no count should be performed)
*/

myCaptureQueriesListener.clear();
history = myPatientDao.history(null, null, new SystemRequestDetails());

// Simulate the server requesting the Bundle.total value
assertEquals(20, history.sizeOrThrowNpe());

// Simulate the server actually loading the resources
history.getResources(0, 10);

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logSelectQueries(false, false);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), containsString(" from hfj_res_ver "));
runInTransaction(() -> assertEquals(0, mySearchEntityDao.count()));

}

@Test
public void testTypeHistory_CountCacheEnabled_WithOffset() {
create20Patients();
sleepAtLeast(10);

/*
* Perform initial history
*/

myCaptureQueriesListener.clear();
IBundleProvider history = myPatientDao.history(null, new Date(), new SystemRequestDetails());

// No count since there is an offset
assertEquals(null, history.size());

// Simulate the server actually loading the resources
assertEquals(20, history.getResources(0, 999).size());

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), not(startsWith("select count")));

}

@Test
public void testSystemHistory_CountCacheEnabled() {
create20Patients();

/*
* Perform initial history
*/

myCaptureQueriesListener.clear();
IBundleProvider history = mySystemDao.history(null, null, new SystemRequestDetails());

// Simulate the server requesting the Bundle.total value
assertEquals(20, history.sizeOrThrowNpe());

// Simulate the server actually loading the resources
history.getResources(0, 10);

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logSelectQueries(false, false);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), startsWith("select count"));
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false).toLowerCase(Locale.ROOT), containsString(" from hfj_res_ver "));
runInTransaction(() -> assertEquals(0, mySearchEntityDao.count()));

/*
* Perform history a second time (no count should be performed)
*/

myCaptureQueriesListener.clear();
history = mySystemDao.history(null, null, new SystemRequestDetails());

// Simulate the server requesting the Bundle.total value
assertEquals(20, history.sizeOrThrowNpe());

// Simulate the server actually loading the resources
history.getResources(0, 10);

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logSelectQueries(false, false);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false).toLowerCase(Locale.ROOT), containsString(" from hfj_res_ver "));
runInTransaction(() -> assertEquals(0, mySearchEntityDao.count()));

}

@Test
public void testSystemHistory_CountCacheEnabled_Concurrent() throws ExecutionException, InterruptedException {
create20Patients();
myCaptureQueriesListener.clear();

ExecutorService threadPool = Executors.newFixedThreadPool(20);
try {
Runnable task = () -> {
IBundleProvider history = mySystemDao.history(null, null, new SystemRequestDetails());
assertEquals(20, history.sizeOrThrowNpe());
assertEquals(20, history.getResources(0, 999).size());
};
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < 20; i++) {
futures.add(threadPool.submit(task));
}

for (Future<?> next : futures) {
next.get();
}

} finally {
threadPool.shutdown();
}

assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());

// We fetch the history resources 20 times, but should only fetch the
// count(*) once, for a total of 21
assertEquals(20 + 1, myCaptureQueriesListener.countSelectQueries());

}

private void create20Patients() {
BundleBuilder bb = new BundleBuilder(myFhirCtx);
int count = 20;
for (int i = 0; i < count; i++) {
Patient p = new Patient();
p.setActive(true);
bb.addTransactionCreateEntry(p);
}
StopWatch sw = new StopWatch();
mySystemDao.transaction(new SystemRequestDetails(), (Bundle) bb.getBundle());
ourLog.info("Created {} patients in {}", count, sw);
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ca.uhn.fhirtest.config;

import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
Expand Down Expand Up @@ -80,6 +81,7 @@ public DaoConfig daoConfig() {
retVal.setFilterParameterEnabled(true);
retVal.setDefaultSearchParamsCanBeOverridden(false);
retVal.getModelConfig().setIndexOnContainedResources(true);
// retVal.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE);
return retVal;
}

Expand Down