Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ISPN-12186 Update to Hibernate Search 6.0.0.Beta9 #8612

Merged
merged 1 commit into from Aug 12, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions build-configuration/pom.xml
Expand Up @@ -141,7 +141,7 @@
<version.hibernate_dep.hibernate-commons-annotations>5.0.5.Final</version.hibernate_dep.hibernate-commons-annotations>
<version.hibernate_dep.jandex>2.0.5.Final</version.hibernate_dep.jandex>
<version.hibernate_dep.javassist>3.23.2-GA</version.hibernate_dep.javassist>
<version.hibernate.search>6.0.0.Beta8</version.hibernate.search>
<version.hibernate.search>6.0.0.Beta9</version.hibernate.search>
<version.infinispan>12.0.0-SNAPSHOT</version.infinispan>
<version.infinispan.arquillian>1.2.0.Beta3</version.infinispan.arquillian>
<version.infinispan.doclets>1.3.0</version.infinispan.doclets>
Expand All @@ -167,7 +167,7 @@
<version.junit>4.13</version.junit>
<version.junit5>5.6.2</version.junit5>
<version.log4j>2.13.2</version.log4j>
<version.lucene>8.5.2</version.lucene>
<version.lucene>8.6.0</version.lucene>
<version.lucene.module.slot>5.5.5</version.lucene.module.slot>
<version.metainf-services>1.7</version.metainf-services>
<version.mockito>2.27.0</version.mockito>
Expand Down
Expand Up @@ -56,46 +56,45 @@ public KeyTransformationHandler(ClassLoader classLoader) {
*/
public Object stringToKey(String s) {
char type = s.charAt(0);
int idx = s.lastIndexOf(":");
switch (type) {
case 'S':
// this is a String, NOT a Short. For Short see case 'X'.
return s.substring(2, idx);
return s.substring(2);
case 'I':
// This is an Integer
return Integer.valueOf(s.substring(2, idx));
return Integer.valueOf(s.substring(2));
case 'Y':
// This is a BYTE
return Byte.valueOf(s.substring(2, idx));
return Byte.valueOf(s.substring(2));
case 'L':
// This is a Long
return Long.valueOf(s.substring(2, idx));
return Long.valueOf(s.substring(2));
case 'X':
// This is a SHORT
return Short.valueOf(s.substring(2, idx));
return Short.valueOf(s.substring(2));
case 'D':
// This is a Double
return Double.valueOf(s.substring(2, idx));
return Double.valueOf(s.substring(2));
case 'F':
// This is a Float
return Float.valueOf(s.substring(2, idx));
return Float.valueOf(s.substring(2));
case 'B':
// This is a Boolean, NOT a Byte. For Byte see case 'Y'.
return Boolean.valueOf(s.substring(2, idx));
return Boolean.valueOf(s.substring(2));
case 'C':
// This is a Character
return Character.valueOf(s.charAt(2));
case 'U':
// This is a java.util.UUID
return UUID.fromString(s.substring(2, idx));
return UUID.fromString(s.substring(2));
case 'A':
// This is an array of bytes encoded as a Base64 string
return Base64.getDecoder().decode(s.substring(2, idx));
return Base64.getDecoder().decode(s.substring(2));
case 'T':
// this is a custom Transformable or a type with a registered Transformer
int indexOfSecondDelimiter = s.indexOf(':', 2);
String keyClassName = s.substring(2, indexOfSecondDelimiter);
String keyAsString = s.substring(indexOfSecondDelimiter + 1, idx);
String keyAsString = s.substring(indexOfSecondDelimiter + 1);
Transformer t = getTransformer(keyClassName);
if (t != null) {
return t.fromString(keyAsString);
Expand All @@ -118,51 +117,51 @@ private Transformer getTransformer(String keyClassName) {
}

/**
* Stringify a key to encode the id and the infinispan segment field.
* Stringify a key so Lucene can use it as document id.
*
* @param key the key
* @return a string form of the key
*/
public String keyToString(Object key, int segmentId) {
public String keyToString(Object key) {
// This string should be in the format of:
// "<TYPE>:<KEY>:<SEGMENT>" for internally supported types or "T:<KEY_CLASS>:<KEY>:<SEGMENT>" for custom types
// "<TYPE>:<KEY>" for internally supported types or "T:<KEY_CLASS>:<KEY>" for custom types
// e.g.:
// "S:my string key:1"
// "I:75:123"
// "D:5.34:12"
// "B:f:12"
// "T:com.myorg.MyType:STRING_GENERATED_BY_TRANSFORMER_FOR_MY_TYPE:20"
// "S:my string key"
// "I:75"
// "D:5.34"
// "B:f"
// "T:com.myorg.MyType:STRING_GENERATED_BY_TRANSFORMER_FOR_MY_TYPE"

// First going to check if the key is a primitive or a String. Otherwise, check if it's a transformable.
// If none of those conditions are satisfied, we'll throw a CacheException.

// Using 'X' for Shorts and 'Y' for Bytes because 'S' is used for Strings and 'B' is being used for Booleans.
if (key instanceof byte[])
return "A:" + Base64.getEncoder().encodeToString((byte[]) key) + ":" + segmentId; //todo [anistor] need to profile Base64 versus simple hex encoding of the raw bytes
return "A:" + Base64.getEncoder().encodeToString((byte[]) key); //todo [anistor] need to profile Base64 versus simple hex encoding of the raw bytes
if (key instanceof String)
return "S:" + key + ":" + segmentId;
return "S:" + key;
else if (key instanceof Integer)
return "I:" + key + ":" + segmentId;
return "I:" + key;
else if (key instanceof Boolean)
return "B:" + key + ":" + segmentId;
return "B:" + key;
else if (key instanceof Long)
return "L:" + key + ":" + segmentId;
return "L:" + key;
else if (key instanceof Float)
return "F:" + key + ":" + segmentId;
return "F:" + key;
else if (key instanceof Double)
return "D:" + key + ":" + segmentId;
return "D:" + key;
else if (key instanceof Short)
return "X:" + key + ":" + segmentId;
return "X:" + key;
else if (key instanceof Byte)
return "Y:" + key + ":" + segmentId;
return "Y:" + key;
else if (key instanceof Character)
return "C:" + key + ":" + segmentId;
return "C:" + key;
else if (key instanceof UUID)
return "U:" + key + ":" + segmentId;
return "U:" + key;
else {
Transformer t = getTransformer(key.getClass());
if (t != null) {
return "T:" + key.getClass().getName() + ":" + t.toString(key) + ":" + segmentId;
return "T:" + key.getClass().getName() + ":" + t.toString(key);
} else {
throw CONTAINER.noTransformerForKey(key.getClass().getName());
}
Expand Down
Expand Up @@ -340,21 +340,21 @@ void purgeIndex(IntSet segments) {
* Remove entries from all indexes by key.
*/
void removeFromIndexes(Object key, int segment) {
Futures.unwrappedExceptionJoin(getSearchIndexer().purge(keyToString(key, segment), segment+""));
Futures.unwrappedExceptionJoin(getSearchIndexer().purge(keyToString(key), String.valueOf(segment)));
}

// Method that will be called when data needs to be removed from Lucene.
private void removeFromIndexes(Object value, Object key, int segment) {
Futures.unwrappedExceptionJoin(getSearchIndexer().delete(keyToString(key, segment), value));
Futures.unwrappedExceptionJoin(getSearchIndexer().delete(keyToString(key), String.valueOf(segment), value));
}

private void updateIndexes(boolean usingSkipIndexCleanupFlag, Object value, Object key, int segment) {
// Note: it's generally unsafe to assume there is no previous entry to cleanup: always use UPDATE
// unless the specific flag is allowing this.
if (usingSkipIndexCleanupFlag) {
Futures.unwrappedExceptionJoin(getSearchIndexer().add(keyToString(key, segment), value));
Futures.unwrappedExceptionJoin(getSearchIndexer().add(keyToString(key), String.valueOf(segment), value));
} else {
Futures.unwrappedExceptionJoin(getSearchIndexer().addOrUpdate(keyToString(key, segment), value));
Futures.unwrappedExceptionJoin(getSearchIndexer().addOrUpdate(keyToString(key), String.valueOf(segment), value));
}
}

Expand All @@ -380,8 +380,8 @@ private Object extractKey(Object storedKey) {
return keyDataConversion.extractIndexable(storedKey);
}

private String keyToString(Object key, int segment) {
return keyTransformationHandler.keyToString(key, segment);
private String keyToString(Object key) {
return keyTransformationHandler.keyToString(key);
}

public KeyTransformationHandler getKeyTransformationHandler() {
Expand Down
Expand Up @@ -682,7 +682,7 @@ public SearchQueryBuilder transformParsingResult(IckleParsingResult<TypeMetadata
}

private SearchQueryParsingResult transformToSearchQueryParsingResult(IckleParsingResult<TypeMetadata> parsingResult, Map<String, Object> namedParameters) {
SearchQueryMaker<TypeMetadata> queryMaker = new SearchQueryMaker<>(getSearchMapping());
SearchQueryMaker<TypeMetadata> queryMaker = new SearchQueryMaker<>(getSearchMapping(), propertyHelper);
return queryMaker
.transform(parsingResult, namedParameters, getTargetedClass(parsingResult), getTargetedNamedType(parsingResult));
}
Expand Down
Expand Up @@ -19,6 +19,11 @@ static SearchProjectionInfo field(SearchProjectionFactory<EntityReference, ?> fa
return new SearchProjectionInfo(factory.field(absoluteFieldPath, type).toProjection(), false);
}

static SearchProjectionInfo multiField(SearchProjectionFactory<EntityReference, ?> factory,
String absoluteFieldPath, Class<?> type) {
return new SearchProjectionInfo(factory.field(absoluteFieldPath, type).multi().toProjection(), false);
}

static SearchProjectionInfo composite(SearchProjectionFactory<EntityReference, ?> factory,
SearchProjection<?>[] children) {
return new SearchProjectionInfo(factory.composite(children).toProjection(), false);
Expand Down
Expand Up @@ -19,6 +19,7 @@
import org.hibernate.search.backend.lucene.LuceneBackend;
import org.hibernate.search.backend.lucene.LuceneExtension;
import org.hibernate.search.backend.lucene.search.predicate.dsl.LuceneSearchPredicateFactory;
import org.hibernate.search.engine.backend.common.spi.FieldPaths;
import org.hibernate.search.engine.backend.metamodel.IndexFieldDescriptor;
import org.hibernate.search.engine.backend.metamodel.IndexValueFieldDescriptor;
import org.hibernate.search.engine.search.predicate.SearchPredicate;
Expand All @@ -31,6 +32,7 @@
import org.hibernate.search.engine.search.predicate.dsl.RangePredicateFieldMoreStep;
import org.hibernate.search.engine.search.predicate.dsl.SimpleQueryFlag;
import org.hibernate.search.engine.search.projection.SearchProjection;
import org.hibernate.search.engine.search.projection.dsl.FieldProjectionValueStep;
import org.hibernate.search.engine.search.projection.dsl.SearchProjectionFactory;
import org.hibernate.search.engine.search.sort.SearchSort;
import org.hibernate.search.engine.search.sort.dsl.CompositeSortComponentsStep;
Expand Down Expand Up @@ -58,6 +60,7 @@
import org.infinispan.objectfilter.impl.syntax.PropertyValueExpr;
import org.infinispan.objectfilter.impl.syntax.Visitor;
import org.infinispan.objectfilter.impl.syntax.parser.IckleParsingResult;
import org.infinispan.objectfilter.impl.syntax.parser.ObjectPropertyHelper;
import org.infinispan.query.logging.Log;
import org.infinispan.search.mapper.common.EntityReference;
import org.infinispan.search.mapper.mapping.SearchIndexedEntity;
Expand All @@ -84,13 +87,15 @@ public final class SearchQueryMaker<TypeMetadata> implements Visitor<PredicateFi
private static final char LUCENE_WILDCARD_ESCAPE_CHARACTER = '\\';

private final SearchMapping searchMapping;
private final ObjectPropertyHelper<TypeMetadata> propertyHelper;

private Map<String, Object> namedParameters;
private LuceneSearchPredicateFactory predicateFactory;
private SearchIndexedEntity indexedEntity;

SearchQueryMaker(SearchMapping searchMapping) {
SearchQueryMaker(SearchMapping searchMapping, ObjectPropertyHelper<TypeMetadata> propertyHelper) {
this.searchMapping = searchMapping;
this.propertyHelper = propertyHelper;
}

public SearchQueryParsingResult transform(IckleParsingResult<TypeMetadata> parsingResult, Map<String, Object> namedParameters,
Expand All @@ -111,14 +116,14 @@ public SearchQueryParsingResult transform(IckleParsingResult<TypeMetadata> parsi
searchMapping.indexedEntity(targetedNamedType);

SearchPredicate predicate = makePredicate(parsingResult.getWhereClause()).toPredicate();
SearchProjectionInfo projection = makeProjection(scope.projection(), parsingResult.getProjections(),
SearchProjectionInfo projection = makeProjection(parsingResult.getTargetEntityMetadata(), scope.projection(), parsingResult.getProjections(),
parsingResult.getProjectedTypes());
SearchSort sort = makeSort(scope.sort(), parsingResult.getSortFields());

return new SearchQueryParsingResult(targetedType, targetedNamedType, projection, predicate, sort);
}

private SearchProjectionInfo makeProjection(SearchProjectionFactory<EntityReference, ?> projectionFactory,
private SearchProjectionInfo makeProjection(TypeMetadata typeMetadata, SearchProjectionFactory<EntityReference, ?> projectionFactory,
String[] projections, Class<?>[] projectedTypes) {
if (projections == null || projections.length == 0) {
return SearchProjectionInfo.entity(projectionFactory);
Expand All @@ -131,7 +136,12 @@ private SearchProjectionInfo makeProjection(SearchProjectionFactory<EntityRefere
if (KEY.equals(projections[0])) {
return SearchProjectionInfo.entityReference(projectionFactory);
}
return SearchProjectionInfo.field(projectionFactory, projections[0], projectedTypes[0]);
boolean isRepeatedProperty = propertyHelper.isRepeatedProperty(typeMetadata, FieldPaths.split(projections[0]));
if (isRepeatedProperty) {
return SearchProjectionInfo.multiField(projectionFactory, projections[0], projectedTypes[0]);
} else {
return SearchProjectionInfo.field(projectionFactory, projections[0], projectedTypes[0]);
}
}

SearchProjection<?>[] searchProjections = new SearchProjection<?>[projections.length];
Expand All @@ -141,7 +151,9 @@ private SearchProjectionInfo makeProjection(SearchProjectionFactory<EntityRefere
} else if (KEY.equals(projections[i])) {
searchProjections[i] = projectionFactory.entityReference().toProjection();
} else {
searchProjections[i] = projectionFactory.field(projections[i], projectedTypes[i]).toProjection();
boolean isMultiField = propertyHelper.isRepeatedProperty(typeMetadata, FieldPaths.split(projections[i]));
FieldProjectionValueStep<?, ?> projectionStep = projectionFactory.field(projections[i], projectedTypes[i]);
searchProjections[i] = isMultiField ? projectionStep.multi().toProjection() : projectionStep.toProjection();
}
}
return SearchProjectionInfo.composite(projectionFactory, searchProjections);
Expand Down

This file was deleted.

Expand Up @@ -17,7 +17,6 @@

import org.apache.lucene.search.BooleanQuery;
import org.hibernate.search.backend.lucene.analysis.LuceneAnalysisConfigurer;
import org.hibernate.search.mapper.pojo.mapping.definition.programmatic.ProgrammaticMappingConfigurationContext;
import org.infinispan.AdvancedCache;
import org.infinispan.Cache;
import org.infinispan.commons.CacheConfigurationException;
Expand Down Expand Up @@ -337,10 +336,6 @@ private SearchMappingHolder createSearchMapping(IndexingConfiguration indexingCo
searchMappingHolder.setEntityLoader(new EntityLoader(cache, keyTransformationHandler));
SearchMappingBuilder builder = searchMappingHolder.builder(SearchMappingBuilder.introspector(MethodHandles.lookup()));
builder.addEntityTypes(types);
ProgrammaticMappingConfigurationContext programmaticMapping = builder.programmaticMapping();
for (Class<?> type : types) {
programmaticMapping.type(type).routingKeyBinder(new CacheRoutingKeyBridge.Binder());
}
searchMappingHolder.build();
}

Expand Down
Expand Up @@ -62,7 +62,7 @@ public CompletableFuture<?> updateIndex(Object key, Object value, int segment) {
return CompletableFuture.completedFuture(null);
}

final String idInString = keyTransformationHandler.keyToString(key, segment);
return searchMappingHolder.getSearchMapping().getSearchIndexer().addOrUpdate(idInString, value);
final String idInString = keyTransformationHandler.keyToString(key);
return searchMappingHolder.getSearchMapping().getSearchIndexer().addOrUpdate(idInString, String.valueOf(segment), value);
}
}