diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/RangeAggregationSpecificsIT.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/RangeAggregationSpecificsIT.java
index 489dfda53f3..dfd72c4bc5a 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/RangeAggregationSpecificsIT.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/RangeAggregationSpecificsIT.java
@@ -14,23 +14,24 @@
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
+import org.hibernate.search.engine.backend.common.DocumentReference;
import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement;
import org.hibernate.search.engine.backend.types.Aggregable;
import org.hibernate.search.engine.backend.types.Searchable;
-import org.hibernate.search.engine.backend.types.dsl.StandardIndexFieldTypeOptionsStep;
import org.hibernate.search.engine.backend.work.execution.spi.IndexIndexingPlan;
-import org.hibernate.search.engine.backend.common.DocumentReference;
import org.hibernate.search.engine.search.aggregation.AggregationKey;
import org.hibernate.search.engine.search.query.dsl.SearchQueryOptionsStep;
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.AggregationDescriptor;
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.RangeAggregationDescriptor;
import org.hibernate.search.integrationtest.backend.tck.testsupport.types.FieldTypeDescriptor;
-import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModel;
+import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModelsByType;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.TckConfiguration;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.ValueWrapper;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.rule.SearchSetupHelper;
@@ -39,60 +40,69 @@
import org.hibernate.search.util.common.data.RangeBoundInclusion;
import org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert;
import org.hibernate.search.util.impl.integrationtest.mapper.stub.SimpleMappedIndex;
-
-import org.assertj.core.api.Assertions;
import org.hibernate.search.util.impl.test.annotation.PortedFromSearch5;
-import org.hibernate.search.util.impl.test.singleinstance.BeforeAll;
-import org.hibernate.search.util.impl.test.singleinstance.InstanceRule;
-import org.hibernate.search.util.impl.test.singleinstance.SingleInstanceRunnerWithParameters;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.assertj.core.api.Assertions;
+
/**
* Tests behavior specific to the range aggregation on supported field types.
*
* Behavior common to all single-field aggregations is tested in {@link SingleFieldAggregationBaseIT}.
*/
@RunWith(Parameterized.class)
-@Parameterized.UseParametersRunnerFactory(SingleInstanceRunnerWithParameters.Factory.class)
public class RangeAggregationSpecificsIT {
private static final String AGGREGATION_NAME = "aggregationName";
+ private static Set> supportedFieldTypes;
+ private static List> dataSets;
+
@Parameterized.Parameters(name = "{0}")
- public static Object[][] supportedTypes() {
- List combinations = new ArrayList<>();
+ public static Object[][] parameters() {
+ supportedFieldTypes = new LinkedHashSet<>();
+ dataSets = new ArrayList<>();
+ List parameters = new ArrayList<>();
AggregationDescriptor aggregationDescriptor = new RangeAggregationDescriptor();
- for ( FieldTypeDescriptor> fieldTypeDescriptor : FieldTypeDescriptor.getAll() ) {
- if ( aggregationDescriptor.getSingleFieldAggregationExpectations( fieldTypeDescriptor ).isSupported() ) {
- combinations.add( new Object[] {
- fieldTypeDescriptor
- } );
+ for ( FieldTypeDescriptor> fieldType : FieldTypeDescriptor.getAll() ) {
+ if ( aggregationDescriptor.getSingleFieldAggregationExpectations( fieldType ).isSupported() ) {
+ supportedFieldTypes.add( fieldType );
+ DataSet> dataSet = new DataSet<>( fieldType );
+ dataSets.add( dataSet );
+ parameters.add( new Object[] { fieldType, dataSet } );
}
}
- return combinations.toArray( new Object[0][] );
+ return parameters.toArray( new Object[0][] );
}
- @InstanceRule
- public SearchSetupHelper setupHelper = new SearchSetupHelper();
+ @ClassRule
+ public static final SearchSetupHelper setupHelper = new SearchSetupHelper();
- private final FieldTypeDescriptor typeDescriptor;
- private final List ascendingValues;
+ private static final SimpleMappedIndex index =
+ SimpleMappedIndex.of( "Main", IndexBinding::new );
- private SimpleMappedIndex index = SimpleMappedIndex.of( "Main", IndexBinding::new );
+ @BeforeClass
+ public static void setup() {
+ setupHelper.start().withIndex( index ).setup();
- public RangeAggregationSpecificsIT(FieldTypeDescriptor typeDescriptor) {
- this.typeDescriptor = typeDescriptor;
- this.ascendingValues = typeDescriptor.getAscendingUniqueTermValues().getSingle();
+ for ( DataSet> dataSet : dataSets ) {
+ dataSet.init();
+ }
}
- @BeforeAll
- public void setup() {
- setupHelper.start().withIndex( index ).setup();
+ private final FieldTypeDescriptor fieldType;
+ private final DataSet dataSet;
+ private final List ascendingValues;
- initData();
+ public RangeAggregationSpecificsIT(FieldTypeDescriptor fieldType, DataSet dataSet) {
+ this.fieldType = fieldType;
+ this.dataSet = dataSet;
+ this.ascendingValues = dataSet.ascendingValues;
}
@Test
@@ -100,15 +110,16 @@ public void setup() {
public void rangeAtMost() {
assumeNonCanonicalRangesSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( Range.atMost( ascendingValues.get( 2 ) ) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -122,15 +133,16 @@ public void rangeAtMost() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.RangeFacetingTest.testRangeBelowExcludeLimit")
public void rangeLessThan() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( Range.lessThan( ascendingValues.get( 2 ) ) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -144,15 +156,16 @@ public void rangeLessThan() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.RangeFacetingTest.testRangeAbove")
public void rangeAtLeast() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( Range.atLeast( ascendingValues.get( 3 ) ) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -168,15 +181,16 @@ public void rangeAtLeast() {
public void rangeGreaterThan() {
assumeNonCanonicalRangesSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( Range.greaterThan( ascendingValues.get( 3 ) ) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -190,19 +204,20 @@ public void rangeGreaterThan() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.RangeFacetingTest.testRangeWithExcludeLimitsAtEachLevel")
public void rangesCanonical() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.ranges( Arrays.asList(
Range.canonical( null, ascendingValues.get( 3 ) ),
Range.canonical( ascendingValues.get( 3 ), ascendingValues.get( 5 ) ),
Range.canonical( ascendingValues.get( 5 ), null )
) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -220,13 +235,13 @@ public void rangesCanonical() {
public void rangesBetweenIncludingAllBounds() {
assumeNonCanonicalRangesSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.ranges( Arrays.asList(
Range.between( null, RangeBoundInclusion.INCLUDED,
ascendingValues.get( 2 ), RangeBoundInclusion.INCLUDED ),
@@ -236,6 +251,7 @@ public void rangesBetweenIncludingAllBounds() {
null, RangeBoundInclusion.INCLUDED )
) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -264,19 +280,20 @@ public void rangesBetweenIncludingAllBounds() {
@Test
public void rangesOverlap() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.ranges( Arrays.asList(
Range.canonical( null, ascendingValues.get( 3 ) ),
Range.canonical( ascendingValues.get( 1 ), ascendingValues.get( 5 ) ),
Range.canonical( ascendingValues.get( 2 ), null )
) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -292,11 +309,11 @@ public void rangesOverlap() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.RangeFacetingTest.testRangeQueryWithNullToAndFrom")
public void rangeNull() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
Assertions.assertThatThrownBy( () ->
index.createScope().aggregation().range()
- .field( fieldPath, typeDescriptor.getJavaType() )
+ .field( fieldPath, fieldType.getJavaType() )
.range( null )
)
.isInstanceOf( IllegalArgumentException.class )
@@ -306,11 +323,11 @@ public void rangeNull() {
@Test
public void rangesNull() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
Assertions.assertThatThrownBy( () ->
index.createScope().aggregation().range()
- .field( fieldPath, typeDescriptor.getJavaType() )
+ .field( fieldPath, fieldType.getJavaType() )
.ranges( null )
)
.isInstanceOf( IllegalArgumentException.class )
@@ -320,11 +337,11 @@ public void rangesNull() {
@Test
public void rangesContainingNull() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
Assertions.assertThatThrownBy( () ->
index.createScope().aggregation().range()
- .field( fieldPath, typeDescriptor.getJavaType() )
+ .field( fieldPath, fieldType.getJavaType() )
.ranges( Arrays.asList(
Range.canonical( ascendingValues.get( 0 ), ascendingValues.get( 1 ) ),
null
@@ -338,11 +355,11 @@ public void rangesContainingNull() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.RangeFacetingTest.testUnsupportedRangeParameterTypeThrowsException")
public void superClassFieldType() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
Assertions.assertThatThrownBy( () ->
index.createScope().aggregation().range()
- .field( fieldPath, typeDescriptor.getJavaType().getSuperclass() )
+ .field( fieldPath, fieldType.getJavaType().getSuperclass() )
)
.isInstanceOf( SearchException.class )
.hasMessageContaining( "Invalid type" )
@@ -356,18 +373,19 @@ public void superClassFieldType() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.RangeFacetingTest.testRangeQueryForDoubleWithZeroCount")
public void predicate() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
index.createScope().query()
- .where( f -> f.id().matchingAny( Arrays.asList( "document_1", "document_5" ) ) )
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .where( f -> f.id().matchingAny( Arrays.asList( dataSet.name + "_document_1", dataSet.name + "_document_5" ) ) )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( null, ascendingValues.get( 2 ) )
.range( ascendingValues.get( 2 ), ascendingValues.get( 5 ) )
.range( ascendingValues.get( 5 ), null )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -387,13 +405,13 @@ public void predicate() {
*/
@Test
public void limitAndOffset() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( null, ascendingValues.get( 2 ) )
.range( ascendingValues.get( 2 ), ascendingValues.get( 5 ) )
.range( ascendingValues.get( 5 ), null )
@@ -416,13 +434,13 @@ public void limitAndOffset() {
*/
@Test
public void rangeOverlap() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( ascendingValues.get( 0 ), null )
.range( null, ascendingValues.get( 2 ) )
.range( ascendingValues.get( 2 ), ascendingValues.get( 5 ) )
@@ -431,6 +449,7 @@ public void rangeOverlap() {
.range( ascendingValues.get( 5 ), null )
.range( null, ascendingValues.get( 6 ) )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -452,17 +471,18 @@ public void rangeOverlap() {
*/
@Test
public void order_asDefined() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.range().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.range().field( fieldPath, fieldType.getJavaType() )
.range( null, ascendingValues.get( 2 ) )
.range( ascendingValues.get( 2 ), ascendingValues.get( 5 ) )
.range( ascendingValues.get( 5 ), null )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -486,35 +506,6 @@ private void assumeNonCanonicalRangesSupported() {
return index.createScope().query().where( f -> f.matchAll() );
}
- private void initData() {
- List documentFieldValues = ascendingValues.subList( 0, 7 );
-
- IndexIndexingPlan> plan = index.createIndexingPlan();
- for ( int i = 0; i < documentFieldValues.size(); i++ ) {
- F value = documentFieldValues.get( i );
- plan.add( referenceProvider( "document_" + i ), document -> {
- document.addValue( index.binding().fieldModel.reference, value );
- document.addValue( index.binding().fieldWithConverterModel.reference, value );
- } );
- }
- plan.add( referenceProvider( "document_empty" ), document -> { } );
- plan.execute().join();
-
- // Check that all documents are searchable
- SearchResultAssert.assertThat(
- index.createScope().query()
- .where( f -> f.matchAll() )
- .toQuery()
- )
- .hasTotalHitCount( documentFieldValues.size() + 1 /* +1 for the empty document */ );
- }
-
- private SimpleFieldModel mapField(IndexSchemaElement parent, String prefix,
- Consumer> additionalConfiguration) {
- return SimpleFieldModel.mapper( typeDescriptor, additionalConfiguration )
- .map( parent, prefix + typeDescriptor.getUniqueName() );
- }
-
@SuppressWarnings("unchecked")
private Consumer> containsExactly(Consumer> expectationBuilder) {
List> expected = new ArrayList<>();
@@ -523,26 +514,59 @@ private Consumer> containsExactly(Consumer> ex
.containsExactly( normalize( expected ).toArray( new Map.Entry[0] ) );
}
- private class IndexBinding {
- final SimpleFieldModel fieldModel;
- final SimpleFieldModel fieldWithConverterModel;
- final SimpleFieldModel fieldWithAggregationDisabledModel;
+ private static class DataSet {
+ final FieldTypeDescriptor fieldType;
+ final String name;
+ final List ascendingValues;
+ final List documentFieldValues;
+
+ private DataSet(FieldTypeDescriptor fieldType) {
+ this.fieldType = fieldType;
+ this.name = fieldType.getUniqueName();
+ this.ascendingValues = fieldType.getAscendingUniqueTermValues().getSingle();
+ this.documentFieldValues = ascendingValues.subList( 0, 7 );
+ }
+
+ private void init() {
+ IndexIndexingPlan> plan = index.createIndexingPlan();
+ for ( int i = 0; i < documentFieldValues.size(); i++ ) {
+ F value = documentFieldValues.get( i );
+ plan.add( referenceProvider( name + "_document_" + i, name ), document -> {
+ document.addValue( index.binding().fieldModels.get( fieldType ).reference, value );
+ document.addValue( index.binding().fieldWithConverterModels.get( fieldType ).reference, value );
+ } );
+ }
+ plan.add( referenceProvider( name + "_document_empty", name ), document -> { } );
+ plan.execute().join();
+
+ // Check that all documents are searchable
+ SearchResultAssert.assertThat(
+ index.createScope().query()
+ .where( f -> f.matchAll() )
+ .routing( name )
+ .toQuery()
+ )
+ .hasTotalHitCount( documentFieldValues.size() + 1 /* +1 for the empty document */ );
+ }
+ }
+
+ private static class IndexBinding {
+ final SimpleFieldModelsByType fieldModels;
+ final SimpleFieldModelsByType fieldWithConverterModels;
+ final SimpleFieldModelsByType fieldWithAggregationDisabledModels;
IndexBinding(IndexSchemaElement root) {
- fieldModel = mapField(
- root, "",
- c -> c.aggregable( Aggregable.YES )
+ fieldModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "", c -> c.aggregable( Aggregable.YES )
.searchable( Searchable.NO ) // Range aggregations should not need this
);
- fieldWithConverterModel = mapField(
- root, "converted_",
- c -> c.aggregable( Aggregable.YES )
+ fieldWithConverterModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "converted_", c -> c.aggregable( Aggregable.YES )
.dslConverter( ValueWrapper.class, ValueWrapper.toIndexFieldConverter() )
.projectionConverter( ValueWrapper.class, ValueWrapper.fromIndexFieldConverter() )
);
- fieldWithAggregationDisabledModel = mapField(
- root, "nonAggregable_",
- c -> c.aggregable( Aggregable.NO )
+ fieldWithAggregationDisabledModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "nonAggregable_", c -> c.aggregable( Aggregable.NO )
);
}
}
diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationBaseIT.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationBaseIT.java
index 72ccfbc1a8d..e64f3fb6e8a 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationBaseIT.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationBaseIT.java
@@ -10,10 +10,11 @@
import static org.hibernate.search.util.impl.integrationtest.mapper.stub.StubMapperUtils.referenceProvider;
import java.util.ArrayList;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Optional;
+import java.util.Set;
import java.util.function.BiFunction;
-import java.util.function.Consumer;
import java.util.function.Function;
import org.hibernate.search.engine.backend.document.DocumentElement;
@@ -30,16 +31,16 @@
import org.hibernate.search.engine.backend.work.execution.spi.IndexIndexingPlan;
import org.hibernate.search.engine.search.aggregation.AggregationKey;
import org.hibernate.search.engine.search.aggregation.SearchAggregation;
-import org.hibernate.search.engine.search.common.ValueConvert;
import org.hibernate.search.engine.search.aggregation.dsl.AggregationFinalStep;
import org.hibernate.search.engine.search.aggregation.dsl.SearchAggregationFactory;
+import org.hibernate.search.engine.search.common.ValueConvert;
import org.hibernate.search.engine.search.predicate.dsl.PredicateFinalStep;
import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory;
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.AggregationDescriptor;
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.expectations.AggregationScenario;
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.expectations.SupportedSingleFieldAggregationExpectations;
import org.hibernate.search.integrationtest.backend.tck.testsupport.types.FieldTypeDescriptor;
-import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModel;
+import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModelsByType;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.StandardFieldMapper;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.TypeAssertionHelper;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.ValueWrapper;
@@ -48,76 +49,69 @@
import org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert;
import org.hibernate.search.util.impl.integrationtest.mapper.stub.SimpleMappedIndex;
import org.hibernate.search.util.impl.integrationtest.mapper.stub.StubMappingScope;
-import org.assertj.core.api.Assertions;
import org.hibernate.search.util.impl.test.annotation.PortedFromSearch5;
import org.hibernate.search.util.impl.test.annotation.TestForIssue;
-import org.hibernate.search.util.impl.test.singleinstance.BeforeAll;
-import org.hibernate.search.util.impl.test.singleinstance.InstanceRule;
-import org.hibernate.search.util.impl.test.singleinstance.SingleInstanceRunnerWithParameters;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.assertj.core.api.Assertions;
+
/**
* Tests basic behavior common to all single-field aggregations (range, terms, ...)
* on supported types.
*/
@RunWith(Parameterized.class)
-@Parameterized.UseParametersRunnerFactory(SingleInstanceRunnerWithParameters.Factory.class)
public class SingleFieldAggregationBaseIT {
private static final String AGGREGATION_NAME = "aggregationName";
- @Parameterized.Parameters(name = "{0} - {1}")
- public static Object[][] aggregationTypeCombinations() {
- List combinations = new ArrayList<>();
+ private static Set> supportedFieldTypes;
+ private static List> dataSets;
+
+ @Parameterized.Parameters(name = "{0}")
+ public static Object[][] parameters() {
+ supportedFieldTypes = new LinkedHashSet<>();
+ dataSets = new ArrayList<>();
+ List parameters = new ArrayList<>();
for ( AggregationDescriptor aggregationDescriptor : AggregationDescriptor.getAll() ) {
for ( FieldTypeDescriptor> fieldTypeDescriptor : FieldTypeDescriptor.getAll() ) {
Optional extends SupportedSingleFieldAggregationExpectations>> expectations =
aggregationDescriptor.getSingleFieldAggregationExpectations( fieldTypeDescriptor ).getSupported();
if ( expectations.isPresent() ) {
- combinations.add( new Object[] {
- aggregationDescriptor,
- fieldTypeDescriptor,
- expectations.get()
- } );
+ supportedFieldTypes.add( fieldTypeDescriptor );
+ DataSet> dataSet = new DataSet<>( expectations.get() );
+ dataSets.add( dataSet );
+ parameters.add( new Object[] { expectations.get(), dataSet } );
}
}
}
- return combinations.toArray( new Object[0][] );
+ return parameters.toArray( new Object[0][] );
}
- @InstanceRule
- public SearchSetupHelper setupHelper = new SearchSetupHelper();
+ @ClassRule
+ public static final SearchSetupHelper setupHelper = new SearchSetupHelper();
- private final FieldTypeDescriptor typeDescriptor;
- private final SupportedSingleFieldAggregationExpectations expectations;
-
- private SimpleMappedIndex mainIndex =
+ private static final SimpleMappedIndex mainIndex =
SimpleMappedIndex.of( "Main", IndexBinding::new );
- private SimpleMappedIndex compatibleIndex =
+ private static final SimpleMappedIndex compatibleIndex =
SimpleMappedIndex.of( "Compatible", IndexBinding::new );
- private SimpleMappedIndex rawFieldCompatibleIndex =
+ private static final SimpleMappedIndex rawFieldCompatibleIndex =
SimpleMappedIndex.of( "RawFieldCompatible", RawFieldCompatibleIndexBinding::new );
- private SimpleMappedIndex incompatibleIndex =
+ private static final SimpleMappedIndex incompatibleIndex =
SimpleMappedIndex.of( "Incompatible", IncompatibleIndexBinding::new );
- private SimpleMappedIndex emptyIndex =
+ private static final SimpleMappedIndex emptyIndex =
SimpleMappedIndex.of( "Empty", IndexBinding::new );
- private SimpleMappedIndex nullOnlyIndex =
+ private static final SimpleMappedIndex nullOnlyIndex =
SimpleMappedIndex.of( "NullOnly", IndexBinding::new );
- private SimpleMappedIndex multiValuedIndex =
+ private static final SimpleMappedIndex multiValuedIndex =
SimpleMappedIndex.of( "MultiValued", MultiValuedIndexBinding::new );
- public SingleFieldAggregationBaseIT(AggregationDescriptor thisIsJustForTestName,
- FieldTypeDescriptor typeDescriptor,
- SupportedSingleFieldAggregationExpectations expectations) {
- this.typeDescriptor = typeDescriptor;
- this.expectations = expectations;
- }
-
- @BeforeAll
- public void setup() {
+ @BeforeClass
+ public static void setup() {
setupHelper.start()
.withIndexes(
mainIndex,
@@ -130,7 +124,20 @@ public void setup() {
)
.setup();
- initData();
+ for ( DataSet> dataSet : dataSets ) {
+ dataSet.init();
+ }
+ }
+
+ private final SupportedSingleFieldAggregationExpectations expectations;
+ private final FieldTypeDescriptor fieldType;
+ private final DataSet dataSet;
+
+ public SingleFieldAggregationBaseIT(SupportedSingleFieldAggregationExpectations expectations,
+ DataSet dataSet) {
+ this.expectations = expectations;
+ this.fieldType = expectations.fieldType();
+ this.dataSet = dataSet;
}
@Test
@@ -144,18 +151,19 @@ public void setup() {
})
public void simple() {
// Need a separate method to handle the scenario generics
- doTest_simple( expectations.simple( typeDescriptor ) );
+ doTest_simple( expectations.simple() );
}
private void doTest_simple(AggregationScenario scenario) {
StubMappingScope scope = mainIndex.createScope();
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
scope.query()
.where( f -> f.matchAll() )
.aggregation( aggregationKey, f -> scenario.setup( f, fieldPath ) )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -167,12 +175,12 @@ private void doTest_simple(AggregationScenario scenario) {
@Test
public void aggregationObject() {
// Need a separate method to handle the scenario generics
- doTest_aggregationObject( expectations.simple( typeDescriptor ) );
+ doTest_aggregationObject( expectations.simple() );
}
private void doTest_aggregationObject(AggregationScenario scenario) {
StubMappingScope scope = mainIndex.createScope();
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchAggregation aggregation = scenario.setup( scope.aggregation(), fieldPath )
@@ -182,6 +190,7 @@ private void doTest_aggregationObject(AggregationScenario scenario) {
mainIndex.createScope().query()
.where( f -> f.matchAll() )
.aggregation( aggregationKey, aggregation )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -193,12 +202,12 @@ private void doTest_aggregationObject(AggregationScenario scenario) {
@Test
public void aggregationObject_reuse_onScopeTargetingSameIndexes() {
// Need a separate method to handle the scenario generics
- doTest_aggregationObject_reuse_onScopeTargetingSameIndexes( expectations.simple( typeDescriptor ) );
+ doTest_aggregationObject_reuse_onScopeTargetingSameIndexes( expectations.simple() );
}
private void doTest_aggregationObject_reuse_onScopeTargetingSameIndexes(AggregationScenario scenario) {
StubMappingScope scope = mainIndex.createScope();
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchAggregation aggregation = scenario.setup( scope.aggregation(), fieldPath )
@@ -208,6 +217,7 @@ private void doTest_aggregationObject_reuse_onScopeTargetingSameIndexes(Aggr
scope.query()
.where( f -> f.matchAll() )
.aggregation( aggregationKey, aggregation )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -220,6 +230,7 @@ private void doTest_aggregationObject_reuse_onScopeTargetingSameIndexes(Aggr
scope.query()
.where( f -> f.matchAll() )
.aggregation( aggregationKey, aggregation )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -233,6 +244,7 @@ private void doTest_aggregationObject_reuse_onScopeTargetingSameIndexes(Aggr
scope.query()
.where( f -> f.matchAll() )
.aggregation( aggregationKey, aggregation )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -244,12 +256,12 @@ private void doTest_aggregationObject_reuse_onScopeTargetingSameIndexes(Aggr
@Test
public void aggregationObject_reuse_onScopeTargetingDifferentIndexes() {
// Need a separate method to handle the scenario generics
- doTest_aggregationObject_reuse_onScopeTargetingDifferentIndexes( expectations.simple( typeDescriptor ) );
+ doTest_aggregationObject_reuse_onScopeTargetingDifferentIndexes( expectations.simple() );
}
private void doTest_aggregationObject_reuse_onScopeTargetingDifferentIndexes(AggregationScenario scenario) {
StubMappingScope scope = mainIndex.createScope();
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchAggregation aggregation = scenario.setup( scope.aggregation(), fieldPath )
@@ -260,6 +272,7 @@ private void doTest_aggregationObject_reuse_onScopeTargetingDifferentIndexes
compatibleIndex.createScope().query()
.where( f -> f.matchAll() )
.aggregation( aggregationKey, aggregation )
+ .routing( dataSet.name )
.toQuery()
)
.isInstanceOf( SearchException.class )
@@ -272,6 +285,7 @@ private void doTest_aggregationObject_reuse_onScopeTargetingDifferentIndexes
mainIndex.createScope( compatibleIndex ).query()
.where( f -> f.matchAll() )
.aggregation( aggregationKey, aggregation )
+ .routing( dataSet.name )
.toQuery()
)
.isInstanceOf( SearchException.class )
@@ -284,9 +298,9 @@ private void doTest_aggregationObject_reuse_onScopeTargetingDifferentIndexes
@TestForIssue(jiraKey = { "HSEARCH-1968" })
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.NoQueryResultsFacetingTest")
public void noMatch() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.withoutMatch( typeDescriptor );
+ AggregationScenario> scenario = expectations.withoutMatch();
testValidAggregation(
scenario, mainIndex.createScope(),
f -> f.id().matching( "none" ), // Don't match any document
@@ -305,9 +319,9 @@ public void noMatch() {
"org.hibernate.search.test.query.facet.EdgeCaseFacetTest"
})
public void emptyIndex() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.withoutMatch( typeDescriptor );
+ AggregationScenario> scenario = expectations.withoutMatch();
testValidAggregation(
scenario, emptyIndex.createScope(), fieldPath
);
@@ -321,9 +335,9 @@ public void emptyIndex() {
@TestForIssue(jiraKey = "HSEARCH-2955")
@PortedFromSearch5(original = "org.hibernate.search.test.facet.NoIndexedValueFacetingTest")
public void nullOnlyIndex() {
- String fieldPath = nullOnlyIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = nullOnlyIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.withoutMatch( typeDescriptor );
+ AggregationScenario> scenario = expectations.withoutMatch();
testValidAggregation(
scenario, nullOnlyIndex.createScope(), fieldPath
);
@@ -340,9 +354,9 @@ public void nullOnlyIndex() {
"org.hibernate.search.test.query.facet.MultiValuedFacetingTest"
})
public void multiValued() {
- String fieldPath = multiValuedIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = multiValuedIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.onMultiValuedIndex( typeDescriptor );
+ AggregationScenario> scenario = expectations.onMultiValuedIndex();
testValidAggregation(
scenario, multiValuedIndex.createScope(), fieldPath
);
@@ -352,7 +366,7 @@ public void multiValued() {
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testNullFieldNameThrowsException")
public void nullFieldPath() {
// Try to pass a "null" field type
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
Assertions.assertThatThrownBy( () -> scenario.setup( mainIndex.createScope().aggregation(), null ) )
.isInstanceOf( IllegalArgumentException.class )
@@ -362,7 +376,7 @@ public void nullFieldPath() {
@Test
public void nullFieldType() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
// Try to pass a "null" field type
AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.nullType() );
@@ -375,9 +389,9 @@ public void nullFieldType() {
@Test
public void invalidFieldType_conversionEnabled() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.wrongType( typeDescriptor ) );
+ AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.wrongType( fieldType ) );
Assertions.assertThatThrownBy( () -> scenario.setup( mainIndex.createScope().aggregation(), fieldPath ) )
.isInstanceOf( SearchException.class )
@@ -388,9 +402,9 @@ public void invalidFieldType_conversionEnabled() {
@Test
public void invalidFieldType_conversionDisabled() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.wrongType( typeDescriptor ) );
+ AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.wrongType( fieldType ) );
Assertions.assertThatThrownBy( () -> scenario.setupWithConverterSetting(
mainIndex.createScope().aggregation(), fieldPath, ValueConvert.NO
@@ -406,7 +420,7 @@ public void invalidFieldType_conversionDisabled() {
public void unknownField() {
String fieldPath = "unknownField";
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
Assertions.assertThatThrownBy( () -> scenario.setup( mainIndex.createScope().aggregation(), fieldPath ) )
.isInstanceOf( SearchException.class )
@@ -419,7 +433,7 @@ public void unknownField() {
public void objectField_nested() {
String fieldPath = mainIndex.binding().nestedObject.relativeFieldName;
- AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.identity( typeDescriptor ) );
+ AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.identity( fieldType ) );
Assertions.assertThatThrownBy( () -> scenario.setup( mainIndex.createScope().aggregation(), fieldPath ) )
.isInstanceOf( SearchException.class )
@@ -432,7 +446,7 @@ public void objectField_nested() {
public void objectField_flattened() {
String fieldPath = mainIndex.binding().flattenedObject.relativeFieldName;
- AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.identity( typeDescriptor ) );
+ AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.identity( fieldType ) );
Assertions.assertThatThrownBy( () -> scenario.setup( mainIndex.createScope().aggregation(), fieldPath ) )
.isInstanceOf( SearchException.class )
@@ -445,9 +459,9 @@ public void objectField_flattened() {
@TestForIssue(jiraKey = "HSEARCH-1748")
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.FacetUnknownFieldFailureTest.testKnownFieldNameNotConfiguredForFacetingThrowsException")
public void aggregationsDisabled() {
- String fieldPath = mainIndex.binding().fieldWithAggregationDisabledModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldWithAggregationDisabledModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.identity( typeDescriptor ) );
+ AggregationScenario> scenario = expectations.withFieldType( TypeAssertionHelper.identity( fieldType ) );
Assertions.assertThatThrownBy( () -> scenario.setup( mainIndex.createScope().aggregation(), fieldPath ) )
.isInstanceOf( SearchException.class )
@@ -457,10 +471,10 @@ public void aggregationsDisabled() {
@Test
public void withConverter_conversionEnabled() {
- String fieldPath = mainIndex.binding().fieldWithConverterModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldWithConverterModels.get( fieldType ).relativeFieldName;
AggregationScenario> scenario = expectations.withFieldType(
- TypeAssertionHelper.wrapper( typeDescriptor )
+ TypeAssertionHelper.wrapper( fieldType )
);
testValidAggregation(
scenario, mainIndex.createScope(), fieldPath
@@ -469,9 +483,9 @@ public void withConverter_conversionEnabled() {
@Test
public void withConverter_conversionDisabled() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
testValidAggregationWithConverterSetting(
scenario, mainIndex.createScope(), fieldPath, ValueConvert.NO
);
@@ -479,9 +493,9 @@ public void withConverter_conversionDisabled() {
@Test
public void withConverter_invalidFieldType() {
- String fieldPath = mainIndex.binding().fieldWithConverterModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldWithConverterModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
Assertions.assertThatThrownBy( () -> scenario.setup( mainIndex.createScope().aggregation(), fieldPath ) )
.isInstanceOf( SearchException.class )
@@ -496,9 +510,9 @@ public void withConverter_invalidFieldType() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testMultipleFacets")
public void duplicated_differentKeys() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
// A separate method is needed in order to write type-safe code
doTestDuplicatedDifferentKeys( fieldPath, scenario );
@@ -512,6 +526,7 @@ private void doTestDuplicatedDifferentKeys(String fieldPath, AggregationScen
mainIndex.createScope().query().where( f -> f.matchAll() )
.aggregation( key1, f -> scenario.setup( f, fieldPath ) )
.aggregation( key2, f -> scenario.setup( f, fieldPath ) )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -529,9 +544,9 @@ private void doTestDuplicatedDifferentKeys(String fieldPath, AggregationScen
*/
@Test
public void duplicated_sameKey() {
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
// A separate method is needed in order to write type-safe code
doTestDuplicatedSameKey( fieldPath, scenario );
@@ -552,9 +567,9 @@ private void doTestDuplicatedSameKey(String fieldPath, AggregationScenario scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
testValidAggregation(
scenario, mainIndex.createScope(), fieldPath
);
@@ -564,9 +579,9 @@ public void inFlattenedObject() {
public void multiIndex_withCompatibleIndex_noConverter() {
StubMappingScope scope = mainIndex.createScope( compatibleIndex );
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.onMainAndOtherIndex( typeDescriptor );
+ AggregationScenario> scenario = expectations.onMainAndOtherIndex();
testValidAggregation(
scenario, scope, fieldPath
);
@@ -576,10 +591,10 @@ public void multiIndex_withCompatibleIndex_noConverter() {
public void multiIndex_withCompatibleIndex_conversionEnabled() {
StubMappingScope scope = mainIndex.createScope( compatibleIndex );
- String fieldPath = mainIndex.binding().fieldWithConverterModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldWithConverterModels.get( fieldType ).relativeFieldName;
AggregationScenario> scenario = expectations.withFieldTypeOnMainAndOtherIndex(
- TypeAssertionHelper.wrapper( typeDescriptor )
+ TypeAssertionHelper.wrapper( fieldType )
);
testValidAggregation(
scenario, scope, fieldPath
@@ -590,10 +605,10 @@ public void multiIndex_withCompatibleIndex_conversionEnabled() {
public void multiIndex_withRawFieldCompatibleIndex_conversionEnabled() {
StubMappingScope scope = mainIndex.createScope( rawFieldCompatibleIndex );
- String fieldPath = mainIndex.binding().fieldWithConverterModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldWithConverterModels.get( fieldType ).relativeFieldName;
AggregationScenario> scenario = expectations.withFieldTypeOnMainAndOtherIndex(
- TypeAssertionHelper.wrapper( typeDescriptor )
+ TypeAssertionHelper.wrapper( fieldType )
);
Assertions.assertThatThrownBy( () -> scenario.setup( scope.aggregation(), fieldPath ) )
@@ -606,9 +621,9 @@ public void multiIndex_withRawFieldCompatibleIndex_conversionEnabled() {
public void multiIndex_withRawFieldCompatibleIndex_conversionDisabled() {
StubMappingScope scope = mainIndex.createScope( rawFieldCompatibleIndex );
- String fieldPath = mainIndex.binding().fieldWithConverterModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldWithConverterModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.onMainAndOtherIndex( typeDescriptor );
+ AggregationScenario> scenario = expectations.onMainAndOtherIndex();
testValidAggregationWithConverterSetting(
scenario, scope, fieldPath, ValueConvert.NO
);
@@ -618,9 +633,9 @@ public void multiIndex_withRawFieldCompatibleIndex_conversionDisabled() {
public void multiIndex_withIncompatibleIndex_conversionEnabled() {
StubMappingScope scope = mainIndex.createScope( incompatibleIndex );
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
Assertions.assertThatThrownBy( () -> scenario.setup( scope.aggregation(), fieldPath ) )
.isInstanceOf( SearchException.class )
@@ -632,9 +647,9 @@ public void multiIndex_withIncompatibleIndex_conversionEnabled() {
public void multiIndex_withIncompatibleIndex_conversionDisabled() {
StubMappingScope scope = mainIndex.createScope( incompatibleIndex );
- String fieldPath = mainIndex.binding().fieldModel.relativeFieldName;
+ String fieldPath = mainIndex.binding().fieldModels.get( fieldType ).relativeFieldName;
- AggregationScenario> scenario = expectations.simple( typeDescriptor );
+ AggregationScenario> scenario = expectations.simple();
Assertions.assertThatThrownBy( () -> scenario.setupWithConverterSetting(
scope.aggregation(), fieldPath, ValueConvert.NO
@@ -670,6 +685,7 @@ private void testValidAggregation(AggregationScenario scenario, StubMappi
scope.query()
.where( predicateContributor )
.aggregation( aggregationKey, f -> aggregationContributor.apply( f, scenario ) )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -678,190 +694,192 @@ private void testValidAggregation(AggregationScenario scenario, StubMappi
);
}
- private void initData() {
- List mainIndexDocumentFieldValues = expectations.getMainIndexDocumentFieldValues();
- List otherIndexDocumentFieldValues = expectations.getOtherIndexDocumentFieldValues();
- List> multiValuedIndexDocumentFieldValues = expectations.getMultiValuedIndexDocumentFieldValues();
+ private static class DataSet {
+ final SupportedSingleFieldAggregationExpectations expectations;
+ final FieldTypeDescriptor fieldType;
+ final String name;
- IndexIndexingPlan> plan = mainIndex.createIndexingPlan();
- for ( int i = 0; i < mainIndexDocumentFieldValues.size(); i++ ) {
- F value = mainIndexDocumentFieldValues.get( i );
- plan.add( referenceProvider( "document_" + i ), document -> {
- document.addValue( mainIndex.binding().fieldModel.reference, value );
- document.addValue( mainIndex.binding().fieldWithConverterModel.reference, value );
+ private DataSet(SupportedSingleFieldAggregationExpectations expectations) {
+ this.expectations = expectations;
+ this.fieldType = expectations.fieldType();
+ this.name = expectations.aggregationName() + "_" + expectations.fieldType().getUniqueName();
+ }
- // Note: this object must be single-valued for these tests
- DocumentElement flattenedObject = document.addObject( mainIndex.binding().flattenedObject.self );
- flattenedObject.addValue( mainIndex.binding().flattenedObject.fieldModel.reference, value );
+ private void init() {
+ FieldTypeDescriptor fieldType = expectations.fieldType();
- // Note: this object must be single-valued for these tests
- DocumentElement nestedObject = document.addObject( mainIndex.binding().nestedObject.self );
- nestedObject.addValue( mainIndex.binding().nestedObject.fieldModel.reference, value );
- } );
- }
- plan.add( referenceProvider( "document_empty" ), document -> { } );
- plan.execute().join();
-
- plan = compatibleIndex.createIndexingPlan();
- for ( int i = 0; i < otherIndexDocumentFieldValues.size(); i++ ) {
- F value = otherIndexDocumentFieldValues.get( i );
- plan.add( referenceProvider( "compatibleindex_document_" + i ), document -> {
- document.addValue( compatibleIndex.binding().fieldModel.reference, value );
- document.addValue( compatibleIndex.binding().fieldWithConverterModel.reference, value );
- } );
- }
- plan.execute().join();
+ List mainIndexDocumentFieldValues = expectations.getMainIndexDocumentFieldValues();
+ List otherIndexDocumentFieldValues = expectations.getOtherIndexDocumentFieldValues();
+ List> multiValuedIndexDocumentFieldValues = expectations.getMultiValuedIndexDocumentFieldValues();
- plan = rawFieldCompatibleIndex.createIndexingPlan();
- for ( int i = 0; i < otherIndexDocumentFieldValues.size(); i++ ) {
- F value = otherIndexDocumentFieldValues.get( i );
- plan.add( referenceProvider( "rawcompatibleindex_document_" + i ), document -> {
- document.addValue( rawFieldCompatibleIndex.binding().fieldWithConverterModel.reference, value );
- } );
- }
- plan.execute().join();
-
- plan = nullOnlyIndex.createIndexingPlan();
- plan.add( referenceProvider( "nullOnlyIndex_document_0" ), document -> {
- document.addValue( nullOnlyIndex.binding().fieldModel.reference, null );
- } );
- plan.execute().join();
-
- plan = multiValuedIndex.createIndexingPlan();
- for ( int i = 0; i < multiValuedIndexDocumentFieldValues.size(); i++ ) {
- List values = multiValuedIndexDocumentFieldValues.get( i );
- plan.add( referenceProvider( "document_" + i ), document -> {
- for ( F value : values ) {
- document.addValue( multiValuedIndex.binding().fieldModel.reference, value );
- }
+ IndexIndexingPlan> plan = mainIndex.createIndexingPlan();
+ for ( int i = 0; i < mainIndexDocumentFieldValues.size(); i++ ) {
+ F value = mainIndexDocumentFieldValues.get( i );
+ plan.add( referenceProvider( name + "_document_" + i, name ), document -> {
+ document.addValue( mainIndex.binding().fieldModels.get( fieldType ).reference, value );
+ document.addValue( mainIndex.binding().fieldWithConverterModels.get( fieldType ).reference, value );
+
+ // Note: this object must be single-valued for these tests
+ DocumentElement flattenedObject = document.addObject( mainIndex.binding().flattenedObject.self );
+ flattenedObject.addValue( mainIndex.binding().flattenedObject.fieldModels.get( fieldType ).reference, value );
+
+ // Note: this object must be single-valued for these tests
+ DocumentElement nestedObject = document.addObject( mainIndex.binding().nestedObject.self );
+ nestedObject.addValue( mainIndex.binding().nestedObject.fieldModels.get( fieldType ).reference, value );
+ } );
+ }
+ plan.add( referenceProvider( name + "_document_empty", name ), document -> { } );
+ plan.execute().join();
+
+ plan = compatibleIndex.createIndexingPlan();
+ for ( int i = 0; i < otherIndexDocumentFieldValues.size(); i++ ) {
+ F value = otherIndexDocumentFieldValues.get( i );
+ plan.add( referenceProvider( name + "_compatibleindex_document_" + i, name ), document -> {
+ document.addValue( compatibleIndex.binding().fieldModels.get( fieldType ).reference, value );
+ document.addValue( compatibleIndex.binding().fieldWithConverterModels.get( fieldType ).reference, value );
+ } );
+ }
+ plan.execute().join();
+
+ plan = rawFieldCompatibleIndex.createIndexingPlan();
+ for ( int i = 0; i < otherIndexDocumentFieldValues.size(); i++ ) {
+ F value = otherIndexDocumentFieldValues.get( i );
+ plan.add( referenceProvider( name + "_rawcompatibleindex_document_" + i, name ), document -> {
+ document.addValue( rawFieldCompatibleIndex.binding().fieldWithConverterModels.get( fieldType ).reference, value );
+ } );
+ }
+ plan.execute().join();
+
+ plan = nullOnlyIndex.createIndexingPlan();
+ plan.add( referenceProvider( name + "_nullOnlyIndex_document_0", name ), document -> {
+ document.addValue( nullOnlyIndex.binding().fieldModels.get( fieldType ).reference, null );
} );
+ plan.execute().join();
+
+ plan = multiValuedIndex.createIndexingPlan();
+ for ( int i = 0; i < multiValuedIndexDocumentFieldValues.size(); i++ ) {
+ List values = multiValuedIndexDocumentFieldValues.get( i );
+ plan.add( referenceProvider( name + "_document_" + i, name ), document -> {
+ for ( F value : values ) {
+ document.addValue( multiValuedIndex.binding().fieldModels.get( fieldType ).reference, value );
+ }
+ } );
+ }
+ plan.add( referenceProvider( name + "_document_empty", name ), document -> { } );
+ plan.execute().join();
+
+ // Check that all documents are searchable
+ SearchResultAssert.assertThat( mainIndex.createScope().query()
+ .where( f -> f.matchAll() )
+ .routing( name )
+ .toQuery() )
+ .hasTotalHitCount( mainIndexDocumentFieldValues.size() + 1 /* +1 for the empty document */ );
+ SearchResultAssert.assertThat( compatibleIndex.createScope().query()
+ .where( f -> f.matchAll() )
+ .routing( name )
+ .toQuery() )
+ .hasTotalHitCount( otherIndexDocumentFieldValues.size() );
+ SearchResultAssert.assertThat( rawFieldCompatibleIndex.createScope().query()
+ .where( f -> f.matchAll() )
+ .routing( name )
+ .toQuery() )
+ .hasTotalHitCount( otherIndexDocumentFieldValues.size() );
+ SearchResultAssert.assertThat( nullOnlyIndex.createScope().query()
+ .where( f -> f.matchAll() )
+ .routing( name )
+ .toQuery() )
+ .hasTotalHitCount( 1 );
+ SearchResultAssert.assertThat( multiValuedIndex.createScope().query()
+ .where( f -> f.matchAll() )
+ .routing( name )
+ .toQuery() )
+ .hasTotalHitCount( multiValuedIndexDocumentFieldValues.size() + 1 /* +1 for the empty document */ );
}
- plan.add( referenceProvider( "document_empty" ), document -> { } );
- plan.execute().join();
-
- // Check that all documents are searchable
- SearchResultAssert.assertThat( mainIndex.createScope().query()
- .where( f -> f.matchAll() )
- .toQuery() )
- .hasTotalHitCount( mainIndexDocumentFieldValues.size() + 1 /* +1 for the empty document */ );
- SearchResultAssert.assertThat( compatibleIndex.createScope().query()
- .where( f -> f.matchAll() )
- .toQuery() )
- .hasTotalHitCount( otherIndexDocumentFieldValues.size() );
- SearchResultAssert.assertThat( rawFieldCompatibleIndex.createScope().query()
- .where( f -> f.matchAll() )
- .toQuery() )
- .hasTotalHitCount( otherIndexDocumentFieldValues.size() );
- SearchResultAssert.assertThat( nullOnlyIndex.createScope().query()
- .where( f -> f.matchAll() )
- .toQuery() )
- .hasTotalHitCount( 1 );
- SearchResultAssert.assertThat( multiValuedIndex.createScope().query()
- .where( f -> f.matchAll() )
- .toQuery() )
- .hasTotalHitCount( multiValuedIndexDocumentFieldValues.size() + 1 /* +1 for the empty document */ );
- }
-
- private SimpleFieldModel mapField(IndexSchemaElement parent, String prefix,
- Consumer> additionalConfiguration) {
- return SimpleFieldModel.mapper( typeDescriptor, additionalConfiguration )
- .map( parent, prefix + typeDescriptor.getUniqueName() );
- }
-
- private SimpleFieldModel mapMultiValuedField(IndexSchemaElement parent, String prefix,
- Consumer> additionalConfiguration) {
- return SimpleFieldModel.mapper( typeDescriptor, additionalConfiguration )
- .mapMultiValued( parent, prefix + typeDescriptor.getUniqueName() );
- }
-
- private class IndexBinding {
- final SimpleFieldModel fieldModel;
- final SimpleFieldModel fieldWithConverterModel;
- final SimpleFieldModel fieldWithAggregationDisabledModel;
+ }
+
+ private static class IndexBinding {
+ final SimpleFieldModelsByType fieldModels;
+ final SimpleFieldModelsByType fieldWithConverterModels;
+ final SimpleFieldModelsByType fieldWithAggregationDisabledModels;
final ObjectBinding flattenedObject;
final ObjectBinding nestedObject;
IndexBinding(IndexSchemaElement root) {
- fieldModel = mapField(
- root, "",
- c -> c.aggregable( Aggregable.YES )
- );
- fieldWithConverterModel = mapField(
- root, "converted_",
- c -> c.aggregable( Aggregable.YES )
+ fieldModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "", c -> c.aggregable( Aggregable.YES ) );
+ fieldWithConverterModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "converted_", c -> c.aggregable( Aggregable.YES )
.dslConverter( ValueWrapper.class, ValueWrapper.toIndexFieldConverter() )
- .projectionConverter( ValueWrapper.class, ValueWrapper.fromIndexFieldConverter() )
- );
- fieldWithAggregationDisabledModel = mapField(
- root, "nonAggregable_",
- c -> c.aggregable( Aggregable.NO )
- );
+ .projectionConverter( ValueWrapper.class, ValueWrapper.fromIndexFieldConverter() ) );
+ fieldWithAggregationDisabledModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "nonAggregable_", c -> c.aggregable( Aggregable.NO ) );
flattenedObject = new ObjectBinding( root, "flattenedObject", ObjectFieldStorage.FLATTENED );
nestedObject = new ObjectBinding( root, "nestedObject", ObjectFieldStorage.NESTED );
}
}
- private class ObjectBinding {
+ private static class ObjectBinding {
final String relativeFieldName;
final IndexObjectFieldReference self;
- final SimpleFieldModel fieldModel;
+ final SimpleFieldModelsByType fieldModels;
ObjectBinding(IndexSchemaElement parent, String relativeFieldName, ObjectFieldStorage storage) {
this.relativeFieldName = relativeFieldName;
IndexSchemaObjectField objectField = parent.objectField( relativeFieldName, storage );
self = objectField.toReference();
- fieldModel = mapField(
- objectField, "", ignored -> { }
- );
+ fieldModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, objectField, "" );
}
}
- private class RawFieldCompatibleIndexBinding {
- final SimpleFieldModel fieldWithConverterModel;
+ private static class RawFieldCompatibleIndexBinding {
+ final SimpleFieldModelsByType fieldWithConverterModels;
RawFieldCompatibleIndexBinding(IndexSchemaElement root) {
/*
* Add a field with the same name as the fieldWithConverterModel from IndexMapping,
* but with an incompatible projection converter.
*/
- fieldWithConverterModel = mapField(
- root, "converted_",
- c -> c.aggregable( Aggregable.YES )
+ fieldWithConverterModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "converted_", c -> c.aggregable( Aggregable.YES )
.dslConverter( ValueWrapper.class, ValueWrapper.toIndexFieldConverter() )
- .projectionConverter( ValueWrapper.class, new IncompatibleProjectionConverter() )
- );
+ .projectionConverter( ValueWrapper.class, new IncompatibleProjectionConverter() ) );
}
- private class IncompatibleProjectionConverter
- implements FromDocumentFieldValueConverter {
+ @SuppressWarnings("rawtypes")
+ private static class IncompatibleProjectionConverter
+ implements FromDocumentFieldValueConverter {
@Override
- public ValueWrapper convert(F value, FromDocumentFieldValueConvertContext context) {
+ public ValueWrapper convert(Object value, FromDocumentFieldValueConvertContext context) {
return null;
}
}
}
- private class IncompatibleIndexBinding {
+ private static class IncompatibleIndexBinding {
IncompatibleIndexBinding(IndexSchemaElement root) {
/*
- * Add a field with the same name as the fieldModel from IndexMapping,
+ * Add fields with the same name as the fieldsModels from IndexMapping,
* but with an incompatible type.
*/
- IncompatibleFieldModel.mapper( FieldTypeDescriptor.getIncompatible( typeDescriptor )::configure )
- .map( root, typeDescriptor.getUniqueName() );
+ mapFieldsWithIncompatibleType( root );
+ }
+
+ private static void mapFieldsWithIncompatibleType(IndexSchemaElement parent) {
+ supportedFieldTypes.forEach( typeDescriptor ->
+ IncompatibleFieldModel.mapper( FieldTypeDescriptor.getIncompatible( typeDescriptor )::configure )
+ .map( parent, "" + typeDescriptor.getUniqueName() )
+ );
}
}
- private class MultiValuedIndexBinding {
- final SimpleFieldModel fieldModel;
+ private static class MultiValuedIndexBinding {
+ final SimpleFieldModelsByType fieldModels;
MultiValuedIndexBinding(IndexSchemaElement root) {
- fieldModel = mapMultiValuedField(
- root, "",
- c -> c.aggregable( Aggregable.YES )
- );
+ fieldModels = SimpleFieldModelsByType.mapAllMultiValued( supportedFieldTypes, root,
+ "", c -> c.aggregable( Aggregable.YES ) );
}
}
diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationUnsupportedTypesIT.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationUnsupportedTypesIT.java
index 2fddbfa578f..d2756b9ae1d 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationUnsupportedTypesIT.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/SingleFieldAggregationUnsupportedTypesIT.java
@@ -7,8 +7,10 @@
package org.hibernate.search.integrationtest.backend.tck.search.aggregation;
import java.util.ArrayList;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Optional;
+import java.util.Set;
import java.util.function.Consumer;
import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement;
@@ -18,75 +20,73 @@
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.expectations.UnsupportedSingleFieldAggregationExpectations;
import org.hibernate.search.integrationtest.backend.tck.testsupport.types.FieldTypeDescriptor;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModel;
+import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModelsByType;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.rule.SearchSetupHelper;
import org.hibernate.search.util.common.SearchException;
import org.hibernate.search.util.impl.integrationtest.common.FailureReportUtils;
import org.hibernate.search.util.impl.integrationtest.mapper.stub.SimpleMappedIndex;
-
-import org.assertj.core.api.Assertions;
import org.hibernate.search.util.impl.test.annotation.PortedFromSearch5;
import org.hibernate.search.util.impl.test.annotation.TestForIssue;
-import org.hibernate.search.util.impl.test.singleinstance.BeforeAll;
-import org.hibernate.search.util.impl.test.singleinstance.InstanceRule;
-import org.hibernate.search.util.impl.test.singleinstance.SingleInstanceRunnerWithParameters;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import org.assertj.core.api.Assertions;
+
/**
* Tests behavior common to all single-field aggregations (range, terms, ...)
* on unsupported types.
*/
@RunWith(Parameterized.class)
-@Parameterized.UseParametersRunnerFactory(SingleInstanceRunnerWithParameters.Factory.class)
public class SingleFieldAggregationUnsupportedTypesIT {
- @Parameterized.Parameters(name = "{0} - {1}")
- public static Object[][] aggregationTypeCombinations() {
- List combinations = new ArrayList<>();
+ private static Set> unsupportedFieldTypes;
+
+ @Parameterized.Parameters(name = "{1}")
+ public static Object[][] parameters() {
+ unsupportedFieldTypes = new LinkedHashSet<>();
+ List parameters = new ArrayList<>();
for ( AggregationDescriptor aggregationDescriptor : AggregationDescriptor.getAll() ) {
for ( FieldTypeDescriptor> fieldTypeDescriptor : FieldTypeDescriptor.getAll() ) {
Optional extends UnsupportedSingleFieldAggregationExpectations> expectations =
aggregationDescriptor.getSingleFieldAggregationExpectations( fieldTypeDescriptor ).getUnsupported();
if ( expectations.isPresent() ) {
- combinations.add( new Object[] {
- aggregationDescriptor,
- fieldTypeDescriptor,
- expectations.get()
- } );
+ unsupportedFieldTypes.add( fieldTypeDescriptor );
+ parameters.add( new Object[] { fieldTypeDescriptor, expectations.get() } );
}
}
}
- return combinations.toArray( new Object[0][] );
+ return parameters.toArray( new Object[0][] );
}
- @InstanceRule
- public SearchSetupHelper setupHelper = new SearchSetupHelper();
+ @ClassRule
+ public static final SearchSetupHelper setupHelper = new SearchSetupHelper();
- private final FieldTypeDescriptor typeDescriptor;
+ private static final SimpleMappedIndex index =
+ SimpleMappedIndex.of( "Main", IndexBinding::new );
- private final UnsupportedSingleFieldAggregationExpectations expectations;
+ @BeforeClass
+ public static void setup() {
+ setupHelper.start().withIndex( index ).setup();
+ }
- private SimpleMappedIndex index = SimpleMappedIndex.of( "Main", IndexBinding::new );
+ private final FieldTypeDescriptor fieldType;
+ private final UnsupportedSingleFieldAggregationExpectations expectations;
- public SingleFieldAggregationUnsupportedTypesIT(AggregationDescriptor thisIsJustForTestName,
- FieldTypeDescriptor typeDescriptor,
+ public SingleFieldAggregationUnsupportedTypesIT(FieldTypeDescriptor fieldType,
UnsupportedSingleFieldAggregationExpectations expectations) {
- this.typeDescriptor = typeDescriptor;
+ this.fieldType = fieldType;
this.expectations = expectations;
}
- @BeforeAll
- public void setup() {
- setupHelper.start().withIndex( index ).setup();
- }
-
@Test
@TestForIssue(jiraKey = "HSEARCH-1748")
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.RangeFacetingTest.testRangeQueryWithUnsupportedType")
public void simple() {
- SimpleFieldModel model = index.binding().fieldModel;
+ SimpleFieldModel model = index.binding().fieldModels.get( fieldType );
String fieldPath = model.relativeFieldName;
Assertions.assertThatThrownBy(
@@ -103,18 +103,15 @@ public void simple() {
private SimpleFieldModel mapField(IndexSchemaElement parent, String prefix,
Consumer> additionalConfiguration) {
- return SimpleFieldModel.mapper( typeDescriptor, additionalConfiguration )
- .map( parent, prefix + typeDescriptor.getUniqueName() );
+ return SimpleFieldModel.mapper( fieldType, additionalConfiguration )
+ .map( parent, prefix + fieldType.getUniqueName() );
}
- private class IndexBinding {
- final SimpleFieldModel fieldModel;
+ private static class IndexBinding {
+ final SimpleFieldModelsByType fieldModels;
IndexBinding(IndexSchemaElement root) {
- fieldModel = mapField(
- root, "",
- c -> { }
- );
+ fieldModels = SimpleFieldModelsByType.mapAll( unsupportedFieldTypes, root, "" );
}
}
}
diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/TermsAggregationSpecificsIT.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/TermsAggregationSpecificsIT.java
index ee2a3bb4cf4..6e0afd599dc 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/TermsAggregationSpecificsIT.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/TermsAggregationSpecificsIT.java
@@ -15,15 +15,16 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement;
import org.hibernate.search.engine.backend.types.Aggregable;
import org.hibernate.search.engine.backend.types.Searchable;
-import org.hibernate.search.engine.backend.types.dsl.StandardIndexFieldTypeOptionsStep;
import org.hibernate.search.engine.backend.work.execution.spi.IndexIndexingPlan;
import org.hibernate.search.engine.backend.common.DocumentReference;
import org.hibernate.search.engine.search.aggregation.AggregationKey;
@@ -31,7 +32,7 @@
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.AggregationDescriptor;
import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.TermsAggregationDescriptor;
import org.hibernate.search.integrationtest.backend.tck.testsupport.types.FieldTypeDescriptor;
-import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModel;
+import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModelsByType;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.TckConfiguration;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.ValueWrapper;
import org.hibernate.search.integrationtest.backend.tck.testsupport.util.rule.SearchSetupHelper;
@@ -41,10 +42,9 @@
import org.assertj.core.api.Assertions;
import org.hibernate.search.util.impl.test.annotation.PortedFromSearch5;
import org.hibernate.search.util.impl.test.annotation.TestForIssue;
-import org.hibernate.search.util.impl.test.singleinstance.BeforeAll;
-import org.hibernate.search.util.impl.test.singleinstance.InstanceRule;
-import org.hibernate.search.util.impl.test.singleinstance.SingleInstanceRunnerWithParameters;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -55,103 +55,76 @@
* Behavior common to all single-field aggregations is tested in {@link SingleFieldAggregationBaseIT}.
*/
@RunWith(Parameterized.class)
-@Parameterized.UseParametersRunnerFactory(SingleInstanceRunnerWithParameters.Factory.class)
public class TermsAggregationSpecificsIT {
private static final String AGGREGATION_NAME = "aggregationName";
+ private static Set> supportedFieldTypes;
+ private static List> dataSets;
+
@Parameterized.Parameters(name = "{0}")
- public static Object[][] supportedTypes() {
- List combinations = new ArrayList<>();
+ public static Object[][] parameters() {
+ supportedFieldTypes = new LinkedHashSet<>();
+ dataSets = new ArrayList<>();
+ List parameters = new ArrayList<>();
AggregationDescriptor aggregationDescriptor = new TermsAggregationDescriptor();
- for ( FieldTypeDescriptor> fieldTypeDescriptor : FieldTypeDescriptor.getAll() ) {
- if ( aggregationDescriptor.getSingleFieldAggregationExpectations( fieldTypeDescriptor ).isSupported() ) {
- combinations.add( new Object[] {
- fieldTypeDescriptor
- } );
+ for ( FieldTypeDescriptor> fieldType : FieldTypeDescriptor.getAll() ) {
+ if ( aggregationDescriptor.getSingleFieldAggregationExpectations( fieldType ).isSupported() ) {
+ supportedFieldTypes.add( fieldType );
+ DataSet> dataSet = new DataSet<>( fieldType );
+ dataSets.add( dataSet );
+ parameters.add( new Object[] { fieldType, dataSet } );
}
}
- return combinations.toArray( new Object[0][] );
+ return parameters.toArray( new Object[0][] );
}
- @InstanceRule
- public SearchSetupHelper setupHelper = new SearchSetupHelper();
-
- private final FieldTypeDescriptor typeDescriptor;
- private final List valuesInAscendingOrder;
- private final List valuesInDescendingOrder;
- private final List valuesInAscendingDocumentCountOrder;
- private final List valuesInDescendingDocumentCountOrder;
- private final Map> documentIdPerTerm;
-
- private SimpleMappedIndex index = SimpleMappedIndex.of( "Main", IndexBinding::new );
-
- public TermsAggregationSpecificsIT(FieldTypeDescriptor typeDescriptor) {
- this.typeDescriptor = typeDescriptor;
- this.documentIdPerTerm = new LinkedHashMap<>();
-
- this.valuesInAscendingOrder = typeDescriptor.getAscendingUniqueTermValues().getSingle();
-
- this.valuesInDescendingOrder = new ArrayList<>( valuesInAscendingOrder );
- Collections.reverse( valuesInDescendingOrder );
-
- this.valuesInDescendingDocumentCountOrder = new ArrayList<>( valuesInAscendingOrder );
- /*
- * Mess with the value order, because some tests would be pointless
- * if the document count order was the same as (or the opposite of) the value order
- */
- valuesInDescendingDocumentCountOrder.add( valuesInDescendingDocumentCountOrder.get( 0 ) );
- valuesInDescendingDocumentCountOrder.remove( 0 );
- valuesInDescendingDocumentCountOrder.add( valuesInDescendingDocumentCountOrder.get( 0 ) );
- valuesInDescendingDocumentCountOrder.remove( 0 );
-
- this.valuesInAscendingDocumentCountOrder = new ArrayList<>( valuesInDescendingDocumentCountOrder );
- Collections.reverse( valuesInAscendingDocumentCountOrder );
-
- // Simple dataset: strictly decreasing number of documents for each term
- int documentIdAsInteger = 0;
- int numberOfDocuments = valuesInDescendingDocumentCountOrder.size();
- for ( F value : valuesInDescendingDocumentCountOrder ) {
- ArrayList documentIdsForTerm = new ArrayList<>();
- documentIdPerTerm.put( value, documentIdsForTerm );
- for ( int i = 0; i < numberOfDocuments; i++ ) {
- String documentId = "document_" + documentIdAsInteger;
- ++documentIdAsInteger;
- documentIdsForTerm.add( documentId );
- }
- --numberOfDocuments;
+ @ClassRule
+ public static final SearchSetupHelper setupHelper = new SearchSetupHelper();
+
+ private static final SimpleMappedIndex index =
+ SimpleMappedIndex.of( "Main", IndexBinding::new );
+
+ @BeforeClass
+ public static void setup() {
+ setupHelper.start().withIndex( index ).setup();
+
+ for ( DataSet> dataSet : dataSets ) {
+ dataSet.init();
}
}
- @BeforeAll
- public void setup() {
- setupHelper.start().withIndex( index ).setup();
+ private final FieldTypeDescriptor fieldType;
+ private final DataSet dataSet;
- initData();
+ public TermsAggregationSpecificsIT(FieldTypeDescriptor fieldType, DataSet dataSet) {
+ this.fieldType = fieldType;
+ this.dataSet = dataSet;
}
@Test
public void superClassFieldType() {
- Class super F> superClass = typeDescriptor.getJavaType().getSuperclass();
+ Class super F> superClass = fieldType.getJavaType().getSuperclass();
doTestSuperClassFieldType( superClass );
}
private void doTestSuperClassFieldType(Class superClass) {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
.aggregation( aggregationKey, f -> f.terms().field( fieldPath, superClass ) )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
// All documents should be mentioned in the aggregation, even those excluded by the limit/offset
containsInAnyOrder( c -> {
- documentIdPerTerm.forEach( (key, value) -> c.accept( key, (long) value.size() ) );
+ dataSet.documentIdPerTerm.forEach( (key, value) -> c.accept( key, (long) value.size() ) );
} )
);
}
@@ -161,11 +134,11 @@ private void doTestSuperClassFieldType(Class superClass) {
*/
@Test
public void predicate() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
- Map.Entry> firstTermEntry = documentIdPerTerm.entrySet().iterator().next();
+ Map.Entry> firstTermEntry = dataSet.documentIdPerTerm.entrySet().iterator().next();
SearchResultAssert.assertThat(
index.createScope().query()
@@ -173,7 +146,8 @@ public void predicate() {
.matching( firstTermEntry.getValue().get( 0 ) )
.matching( firstTermEntry.getValue().get( 1 ) )
)
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() ) )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -190,20 +164,20 @@ public void predicate() {
*/
@Test
public void limitAndOffset() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() ) )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) )
.fetch( 3, 4 )
)
.aggregation(
aggregationKey,
// All documents should be mentioned in the aggregation, even those excluded by the limit/offset
containsInAnyOrder( c -> {
- documentIdPerTerm.forEach( (key, value) -> c.accept( key, (long) value.size() ) );
+ dataSet.documentIdPerTerm.forEach( (key, value) -> c.accept( key, (long) value.size() ) );
} )
);
}
@@ -211,21 +185,22 @@ public void limitAndOffset() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testDefaultSortOrderIsCount")
public void order_default() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() ) )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) )
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
// The result should present buckets with decreasing term count
containsExactly( c -> {
- for ( F value : valuesInDescendingDocumentCountOrder ) {
- c.accept( value, (long) documentIdPerTerm.get( value ).size() );
+ for ( F value : dataSet.valuesInDescendingDocumentCountOrder ) {
+ c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() );
}
} )
);
@@ -234,23 +209,24 @@ public void order_default() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testCountSortOrderDesc")
public void orderByCountDescending() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.orderByCountDescending()
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
// The result should present buckets with decreasing term count
containsExactly( c -> {
- for ( F value : valuesInDescendingDocumentCountOrder ) {
- c.accept( value, (long) documentIdPerTerm.get( value ).size() );
+ for ( F value : dataSet.valuesInDescendingDocumentCountOrder ) {
+ c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() );
}
} )
);
@@ -261,23 +237,24 @@ public void orderByCountDescending() {
public void orderByCountAscending() {
assumeNonDefaultOrdersSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.orderByCountAscending()
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
// The result should present buckets with increasing term count
containsExactly( c -> {
- for ( F value : valuesInAscendingDocumentCountOrder ) {
- c.accept( value, (long) documentIdPerTerm.get( value ).size() );
+ for ( F value : dataSet.valuesInAscendingDocumentCountOrder ) {
+ c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() );
}
} )
);
@@ -287,23 +264,24 @@ public void orderByCountAscending() {
public void orderByTermDescending() {
assumeNonDefaultOrdersSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.orderByTermDescending()
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
- // The result should present buckets with decreasing term values
+ // The result should present buckets with decreasing term dataSet.values
containsExactly( c -> {
- for ( F value : valuesInDescendingOrder ) {
- c.accept( value, (long) documentIdPerTerm.get( value ).size() );
+ for ( F value : dataSet.valuesInDescendingOrder ) {
+ c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() );
}
} )
);
@@ -314,23 +292,24 @@ public void orderByTermDescending() {
public void orderByTermAscending() {
assumeNonDefaultOrdersSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.orderByTermAscending()
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
- // The result should present buckets with increasing term values
+ // The result should present buckets with increasing term dataSet.values
containsExactly( c -> {
- for ( F value : valuesInAscendingOrder ) {
- c.accept( value, (long) documentIdPerTerm.get( value ).size() );
+ for ( F value : dataSet.valuesInAscendingOrder ) {
+ c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() );
}
} )
);
@@ -339,22 +318,23 @@ public void orderByTermAscending() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testZeroCountsExcluded")
public void minDocumentCount_positive() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.minDocumentCount( 2 )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
// Only buckets with the minimum required document count should appear in the result
containsInAnyOrder( c -> {
- documentIdPerTerm.forEach( (key, value) -> {
+ dataSet.documentIdPerTerm.forEach( (key, value) -> {
int documentCount = value.size();
if ( documentCount >= 2 ) {
c.accept( key, (long) documentCount );
@@ -367,11 +347,11 @@ public void minDocumentCount_positive() {
@Test
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testZeroCountsIncluded")
public void minDocumentCount_zero() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
- Map.Entry> firstTermEntry = documentIdPerTerm.entrySet().iterator().next();
+ Map.Entry> firstTermEntry = dataSet.documentIdPerTerm.entrySet().iterator().next();
SearchResultAssert.assertThat(
index.createScope().query()
@@ -379,19 +359,20 @@ public void minDocumentCount_zero() {
.where( f -> f.matchAll().except(
f.id().matchingAny( firstTermEntry.getValue() )
) )
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.minDocumentCount( 0 )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
aggregationKey,
/*
- * Buckets with a count of 0 should appear for values that are in the index,
+ * Buckets with a count of 0 should appear for dataSet.values that are in the index,
* but are not encountered in any matching document.
*/
containsInAnyOrder( c -> {
- documentIdPerTerm.entrySet().stream().skip( 1 ).forEach( e -> {
+ dataSet.documentIdPerTerm.entrySet().stream().skip( 1 ).forEach( e -> {
c.accept( e.getKey(), (long) e.getValue().size() );
} );
c.accept( firstTermEntry.getKey(), 0L );
@@ -401,7 +382,7 @@ public void minDocumentCount_zero() {
@Test
public void minDocumentCount_zero_noMatch() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
@@ -409,9 +390,10 @@ public void minDocumentCount_zero_noMatch() {
index.createScope().query()
// Exclude all documents from the matches
.where( f -> f.id().matching( "none" ) )
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.minDocumentCount( 0 )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -420,7 +402,7 @@ public void minDocumentCount_zero_noMatch() {
* All indexed terms should appear in a bucket, in ascending value order, with a count of zero.
*/
containsInAnyOrder( c -> {
- for ( F value : valuesInAscendingOrder ) {
+ for ( F value : dataSet.valuesInAscendingOrder ) {
c.accept( value, 0L );
}
} )
@@ -431,7 +413,7 @@ public void minDocumentCount_zero_noMatch() {
public void minDocumentCount_zero_noMatch_orderByTermDescending() {
assumeNonDefaultOrdersSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
@@ -439,10 +421,11 @@ public void minDocumentCount_zero_noMatch_orderByTermDescending() {
index.createScope().query()
// Exclude all documents from the matches
.where( f -> f.id().matching( "none" ) )
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.minDocumentCount( 0 )
.orderByTermDescending()
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -451,7 +434,7 @@ public void minDocumentCount_zero_noMatch_orderByTermDescending() {
* All indexed terms should appear in a bucket, in descending value order, with a count of zero.
*/
containsInAnyOrder( c -> {
- for ( F value : valuesInDescendingOrder ) {
+ for ( F value : dataSet.valuesInDescendingOrder ) {
c.accept( value, 0L );
}
} )
@@ -460,10 +443,10 @@ public void minDocumentCount_zero_noMatch_orderByTermDescending() {
@Test
public void minDocumentCount_negative() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
Assertions.assertThatThrownBy( () ->
- index.createScope().aggregation().terms().field( fieldPath, typeDescriptor.getJavaType() )
+ index.createScope().aggregation().terms().field( fieldPath, fieldType.getJavaType() )
.minDocumentCount( -1 ) )
.isInstanceOf( IllegalArgumentException.class )
.hasMessageContaining( "'minDocumentCount'" )
@@ -474,15 +457,16 @@ public void minDocumentCount_negative() {
@TestForIssue(jiraKey = "HSEARCH-776")
@PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testMaxFacetCounts")
public void maxTermCount_positive() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.maxTermCount( 1 )
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -491,30 +475,31 @@ public void maxTermCount_positive() {
* Only the bucket with the most documents should be returned.
*/
containsInAnyOrder( c -> {
- F valueWithMostDocuments = valuesInDescendingDocumentCountOrder.get( 0 );
- c.accept( valueWithMostDocuments, (long) documentIdPerTerm.get( valueWithMostDocuments ).size() );
+ F valueWithMostDocuments = dataSet.valuesInDescendingDocumentCountOrder.get( 0 );
+ c.accept( valueWithMostDocuments, (long) dataSet.documentIdPerTerm.get( valueWithMostDocuments ).size() );
} )
);
}
/**
* Test maxTermCount with a non-default sort by ascending term value.
- * The returned terms should be the "lowest" values.
+ * The returned terms should be the "lowest" dataSet.values.
*/
@Test
public void maxTermCount_positive_orderByTermAscending() {
assumeNonDefaultOrdersSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.maxTermCount( 1 )
.orderByTermAscending()
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -523,8 +508,8 @@ public void maxTermCount_positive_orderByTermAscending() {
* Only the bucket with the "lowest" value should be returned.
*/
containsInAnyOrder( c -> {
- F lowestValue = valuesInAscendingOrder.get( 0 );
- c.accept( lowestValue, (long) documentIdPerTerm.get( lowestValue ).size() );
+ F lowestValue = dataSet.valuesInAscendingOrder.get( 0 );
+ c.accept( lowestValue, (long) dataSet.documentIdPerTerm.get( lowestValue ).size() );
} )
);
}
@@ -533,16 +518,17 @@ public void maxTermCount_positive_orderByTermAscending() {
public void maxTermCount_positive_orderByCountAscending() {
assumeNonDefaultOrdersSupported();
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
AggregationKey> aggregationKey = AggregationKey.of( AGGREGATION_NAME );
SearchResultAssert.assertThat(
matchAllQuery()
- .aggregation( aggregationKey, f -> f.terms().field( fieldPath, typeDescriptor.getJavaType() )
+ .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() )
.maxTermCount( 1 )
.orderByCountAscending()
)
+ .routing( dataSet.name )
.toQuery()
)
.aggregation(
@@ -551,18 +537,18 @@ public void maxTermCount_positive_orderByCountAscending() {
* Only the bucket with the least documents should be returned.
*/
containsInAnyOrder( c -> {
- F valueWithLeastDocuments = valuesInAscendingDocumentCountOrder.get( 0 );
- c.accept( valueWithLeastDocuments, (long) documentIdPerTerm.get( valueWithLeastDocuments ).size() );
+ F valueWithLeastDocuments = dataSet.valuesInAscendingDocumentCountOrder.get( 0 );
+ c.accept( valueWithLeastDocuments, (long) dataSet.documentIdPerTerm.get( valueWithLeastDocuments ).size() );
} )
);
}
@Test
public void maxTermCount_zero() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
Assertions.assertThatThrownBy( () ->
- index.createScope().aggregation().terms().field( fieldPath, typeDescriptor.getJavaType() )
+ index.createScope().aggregation().terms().field( fieldPath, fieldType.getJavaType() )
.maxTermCount( 0 ) )
.isInstanceOf( IllegalArgumentException.class )
.hasMessageContaining( "'maxTermCount'" )
@@ -571,10 +557,10 @@ public void maxTermCount_zero() {
@Test
public void maxTermCount_negative() {
- String fieldPath = index.binding().fieldModel.relativeFieldName;
+ String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName;
Assertions.assertThatThrownBy( () ->
- index.createScope().aggregation().terms().field( fieldPath, typeDescriptor.getJavaType() )
+ index.createScope().aggregation().terms().field( fieldPath, fieldType.getJavaType() )
.maxTermCount( -1 ) )
.isInstanceOf( IllegalArgumentException.class )
.hasMessageContaining( "'maxTermCount'" )
@@ -592,42 +578,10 @@ private void assumeNonDefaultOrdersSupported() {
);
}
- private void initData() {
- IndexIndexingPlan> plan = index.createIndexingPlan();
- int documentCount = 0;
- for ( Map.Entry> entry : documentIdPerTerm.entrySet() ) {
- F value = entry.getKey();
- for ( String documentId : entry.getValue() ) {
- plan.add( referenceProvider( documentId ), document -> {
- document.addValue( index.binding().fieldModel.reference, value );
- document.addValue( index.binding().fieldWithConverterModel.reference, value );
- } );
- ++documentCount;
- }
- }
- plan.add( referenceProvider( "document_empty" ), document -> { } );
- ++documentCount;
- plan.execute().join();
-
- // Check that all documents are searchable
- SearchResultAssert.assertThat(
- index.createScope().query()
- .where( f -> f.matchAll() )
- .toQuery()
- )
- .hasTotalHitCount( documentCount );
- }
-
- private SimpleFieldModel mapField(IndexSchemaElement parent, String prefix,
- Consumer> additionalConfiguration) {
- return SimpleFieldModel.mapper( typeDescriptor, additionalConfiguration )
- .map( parent, prefix + typeDescriptor.getUniqueName() );
- }
-
@SuppressWarnings("unchecked")
private Consumer> containsExactly(Consumer> expectationBuilder) {
List> expected = new ArrayList<>();
- expectationBuilder.accept( (k, v) -> expected.add( entry( typeDescriptor.toExpectedDocValue( k ), v ) ) );
+ expectationBuilder.accept( (k, v) -> expected.add( entry( fieldType.toExpectedDocValue( k ), v ) ) );
return actual -> assertThat( normalize( actual ) )
.containsExactly( normalize( expected ).toArray( new Map.Entry[0] ) );
}
@@ -635,31 +589,104 @@ private Consumer> containsExactly(Consumer> ex
@SuppressWarnings("unchecked")
private Consumer> containsInAnyOrder(Consumer> expectationBuilder) {
List> expected = new ArrayList<>();
- expectationBuilder.accept( (k, v) -> expected.add( entry( typeDescriptor.toExpectedDocValue( k ), v ) ) );
+ expectationBuilder.accept( (k, v) -> expected.add( entry( fieldType.toExpectedDocValue( k ), v ) ) );
return actual -> assertThat( normalize( actual ).entrySet() )
.containsExactlyInAnyOrder( normalize( expected ).toArray( new Map.Entry[0] ) );
}
- private class IndexBinding {
- final SimpleFieldModel fieldModel;
- final SimpleFieldModel fieldWithConverterModel;
- final SimpleFieldModel fieldWithAggregationDisabledModel;
+ private static class DataSet {
+ final FieldTypeDescriptor fieldType;
+ final String name;
+ final Map> documentIdPerTerm;
+ final List valuesInAscendingOrder;
+ final List valuesInDescendingOrder;
+ final List valuesInAscendingDocumentCountOrder;
+ final List valuesInDescendingDocumentCountOrder;
+
+ private DataSet(FieldTypeDescriptor fieldType) {
+ this.fieldType = fieldType;
+ this.name = fieldType.getUniqueName();
+ this.documentIdPerTerm = new LinkedHashMap<>();
+
+ this.valuesInAscendingOrder = fieldType.getAscendingUniqueTermValues().getSingle();
+
+ this.valuesInDescendingOrder = new ArrayList<>( valuesInAscendingOrder );
+ Collections.reverse( valuesInDescendingOrder );
+
+ this.valuesInDescendingDocumentCountOrder = new ArrayList<>( valuesInAscendingOrder );
+ /*
+ * Mess with the value order, because some tests would be pointless
+ * if the document count order was the same as (or the opposite of) the value order
+ */
+ valuesInDescendingDocumentCountOrder.add( valuesInDescendingDocumentCountOrder.get( 0 ) );
+ valuesInDescendingDocumentCountOrder.remove( 0 );
+ valuesInDescendingDocumentCountOrder.add( valuesInDescendingDocumentCountOrder.get( 0 ) );
+ valuesInDescendingDocumentCountOrder.remove( 0 );
+
+ this.valuesInAscendingDocumentCountOrder = new ArrayList<>( valuesInDescendingDocumentCountOrder );
+ Collections.reverse( valuesInAscendingDocumentCountOrder );
+
+ // Simple dataset: strictly decreasing number of documents for each term
+ int documentIdAsInteger = 0;
+ int numberOfDocuments = valuesInDescendingDocumentCountOrder.size();
+ for ( F value : valuesInDescendingDocumentCountOrder ) {
+ ArrayList documentIdsForTerm = new ArrayList<>();
+ documentIdPerTerm.put( value, documentIdsForTerm );
+ for ( int i = 0; i < numberOfDocuments; i++ ) {
+ String documentId = name + "_document_" + documentIdAsInteger;
+ ++documentIdAsInteger;
+ documentIdsForTerm.add( documentId );
+ }
+ --numberOfDocuments;
+ }
+ }
+
+ private void init() {
+ IndexIndexingPlan> plan = index.createIndexingPlan();
+ int documentCount = 0;
+ for ( Map.Entry> entry : documentIdPerTerm.entrySet() ) {
+ F value = entry.getKey();
+ for ( String documentId : entry.getValue() ) {
+ plan.add( referenceProvider( documentId, name ), document -> {
+ document.addValue( index.binding().fieldModels.get( fieldType ).reference, value );
+ document.addValue( index.binding().fieldWithConverterModels.get( fieldType ).reference, value );
+ } );
+ ++documentCount;
+ }
+ }
+ plan.add( referenceProvider( name + "_document_empty", name ), document -> { } );
+ ++documentCount;
+ plan.execute().join();
+
+ // Check that all documents are searchable
+ SearchResultAssert.assertThat(
+ index.createScope().query()
+ .where( f -> f.matchAll() )
+ .routing( name )
+ .toQuery()
+ )
+ .hasTotalHitCount( documentCount );
+ }
+
+ }
+
+ private static class IndexBinding {
+ final SimpleFieldModelsByType fieldModels;
+ final SimpleFieldModelsByType fieldWithConverterModels;
+ final SimpleFieldModelsByType fieldWithAggregationDisabledModels;
IndexBinding(IndexSchemaElement root) {
- fieldModel = mapField(
- root, "",
- c -> c.aggregable( Aggregable.YES )
+ fieldModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "", c -> c.aggregable( Aggregable.YES )
.searchable( Searchable.NO ) // Terms aggregations should not need this
);
- fieldWithConverterModel = mapField(
- root, "converted_",
- c -> c.aggregable( Aggregable.YES )
+ fieldWithConverterModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "converted_", c -> c.aggregable( Aggregable.YES )
.dslConverter( ValueWrapper.class, ValueWrapper.toIndexFieldConverter() )
.projectionConverter( ValueWrapper.class, ValueWrapper.fromIndexFieldConverter() )
);
- fieldWithAggregationDisabledModel = mapField(
- root, "nonAggregable_",
- c -> c.aggregable( Aggregable.NO )
+ fieldWithAggregationDisabledModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root,
+ "nonAggregable_", c -> c.aggregable( Aggregable.NO )
);
}
}
diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/RangeAggregationDescriptor.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/RangeAggregationDescriptor.java
index 4c8495634ae..57ced3b60c1 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/RangeAggregationDescriptor.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/RangeAggregationDescriptor.java
@@ -93,6 +93,7 @@ > getSingleFieldAggregationExpectations(FieldTypeDescriptor typeDescriptor) {
multiValuedIndexExpected.put( Range.atLeast( ascendingValues.get( 5 ) ), 2L );
return ExpectationsAlternative.supported( new SupportedSingleFieldAggregationExpectations(
+ typeDescriptor, "range",
mainIndexDocumentFieldValues,
otherIndexDocumentFieldValues,
multiValuedIndexDocumentFieldValues
@@ -109,13 +110,13 @@ public AggregationScenario, Long>> withFieldTypeOnMainAndOtherI
}
@Override
- public AggregationScenario> withoutMatch(FieldTypeDescriptor typeDescriptor) {
- return doCreate( noIndexedValueExpected, TypeAssertionHelper.identity( typeDescriptor ) );
+ public AggregationScenario> withoutMatch() {
+ return doCreate( noIndexedValueExpected, TypeAssertionHelper.identity( fieldType() ) );
}
@Override
- public AggregationScenario> onMultiValuedIndex(FieldTypeDescriptor typeDescriptor) {
- return doCreate( multiValuedIndexExpected, TypeAssertionHelper.identity( typeDescriptor ) );
+ public AggregationScenario> onMultiValuedIndex() {
+ return doCreate( multiValuedIndexExpected, TypeAssertionHelper.identity( fieldType() ) );
}
private AggregationScenario, Long>> doCreate(Map, Long> expectedResult,
@@ -166,7 +167,17 @@ public void check(Map, Long> aggregationResult) {
}
private UnsupportedSingleFieldAggregationExpectations unsupportedExpectations(FieldTypeDescriptor typeDescriptor) {
- return (factory, fieldPath) -> factory.range().field( fieldPath, typeDescriptor.getJavaType() );
+ return new UnsupportedSingleFieldAggregationExpectations() {
+ @Override
+ public void trySetup(SearchAggregationFactory factory, String fieldPath) {
+ factory.range().field( fieldPath, typeDescriptor.getJavaType() );
+ }
+
+ @Override
+ public String toString() {
+ return "range on " + typeDescriptor;
+ }
+ };
}
}
diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/TermsAggregationDescriptor.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/TermsAggregationDescriptor.java
index 33fc8d500f3..3fcf4e08b02 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/TermsAggregationDescriptor.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/TermsAggregationDescriptor.java
@@ -101,6 +101,7 @@ > getSingleFieldAggregationExpectations(FieldTypeDescriptor typeDescriptor) {
multiValuedIndexExpected.put( typeDescriptor.toExpectedDocValue( uniqueTermValues.get( 1 ) ), 2L );
return ExpectationsAlternative.supported( new SupportedSingleFieldAggregationExpectations(
+ typeDescriptor, "terms",
mainIndexDocumentFieldValues,
otherIndexDocumentFieldValues,
multiValuedIndexDocumentFieldValues
@@ -116,13 +117,13 @@ public AggregationScenario> withFieldTypeOnMainAndOtherIndex(Ty
}
@Override
- public AggregationScenario> withoutMatch(FieldTypeDescriptor typeDescriptor) {
- return doCreate( Collections.emptyMap(), TypeAssertionHelper.identity( typeDescriptor ) );
+ public AggregationScenario> withoutMatch() {
+ return doCreate( Collections.emptyMap(), TypeAssertionHelper.identity( fieldType() ) );
}
@Override
- public AggregationScenario> onMultiValuedIndex(FieldTypeDescriptor typeDescriptor) {
- return doCreate( multiValuedIndexExpected, TypeAssertionHelper.identity( typeDescriptor ) );
+ public AggregationScenario> onMultiValuedIndex() {
+ return doCreate( multiValuedIndexExpected, TypeAssertionHelper.identity( fieldType() ) );
}
private AggregationScenario> doCreate(Map expectedResult,
@@ -161,7 +162,17 @@ public void check(Map aggregationResult) {
}
private UnsupportedSingleFieldAggregationExpectations unsupportedExpectations(FieldTypeDescriptor typeDescriptor) {
- return (factory, fieldPath) -> factory.terms().field( fieldPath, typeDescriptor.getJavaType() );
+ return new UnsupportedSingleFieldAggregationExpectations() {
+ @Override
+ public void trySetup(SearchAggregationFactory factory, String fieldPath) {
+ factory.terms().field( fieldPath, typeDescriptor.getJavaType() );
+ }
+
+ @Override
+ public String toString() {
+ return "terms on " + typeDescriptor;
+ }
+ };
}
}
diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/expectations/SupportedSingleFieldAggregationExpectations.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/expectations/SupportedSingleFieldAggregationExpectations.java
index d15f42ec7ca..f714bbb182e 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/expectations/SupportedSingleFieldAggregationExpectations.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/operations/expectations/SupportedSingleFieldAggregationExpectations.java
@@ -13,18 +13,36 @@
public abstract class SupportedSingleFieldAggregationExpectations {
+ private final FieldTypeDescriptor fieldType;
+ private final String aggregationName;
private final List mainIndexDocumentFieldValues;
private final List compatibleIndexDocumentFieldValues;
private final List> multiValuedIndexDocumentFieldValues;
- protected SupportedSingleFieldAggregationExpectations(List mainIndexDocumentFieldValues,
+ protected SupportedSingleFieldAggregationExpectations(FieldTypeDescriptor fieldType, String aggregationName,
+ List mainIndexDocumentFieldValues,
List compatibleIndexDocumentFieldValues,
List> multiValuedIndexDocumentFieldValues) {
+ this.fieldType = fieldType;
+ this.aggregationName = aggregationName;
this.mainIndexDocumentFieldValues = mainIndexDocumentFieldValues;
this.compatibleIndexDocumentFieldValues = compatibleIndexDocumentFieldValues;
this.multiValuedIndexDocumentFieldValues = multiValuedIndexDocumentFieldValues;
}
+ @Override
+ public String toString() {
+ return aggregationName + " on type " + fieldType.getUniqueName();
+ }
+
+ public FieldTypeDescriptor fieldType() {
+ return fieldType;
+ }
+
+ public String aggregationName() {
+ return aggregationName;
+ }
+
public List getMainIndexDocumentFieldValues() {
return mainIndexDocumentFieldValues;
}
@@ -38,25 +56,25 @@ public List> getMultiValuedIndexDocumentFieldValues() {
}
/*
- * f -> f.myAggregationType().field( fieldPath, theActualFieldType )
- * .someParam( valueOfActualFieldType )
+ * f -> f.myAggregationType().field( fieldPath, theUnderlyingFieldType )
+ * .someParam( valueOfUnderlyingFieldType )
*/
- public final AggregationScenario> simple(FieldTypeDescriptor typeDescriptor) {
- return withFieldType( TypeAssertionHelper.identity( typeDescriptor ) );
+ public final AggregationScenario> simple() {
+ return withFieldType( TypeAssertionHelper.identity( fieldType ) );
}
/*
* f -> f.myAggregationType().field( fieldPath, fieldType )
- * .someParam( fieldValueConverter.apply( valueOfActualFieldType ) )
+ * .someParam( helper.create( valueOfUnderlyingFieldType ) )
*/
public abstract AggregationScenario> withFieldType(TypeAssertionHelper helper);
/*
- * Same as simple(...), but targeting both the main index and another index,
+ * Same as simple(), but targeting both the main index and another index,
* and expecting an aggregation result taking into account both indexes.
*/
- public final AggregationScenario> onMainAndOtherIndex(FieldTypeDescriptor typeDescriptor) {
- return withFieldTypeOnMainAndOtherIndex( TypeAssertionHelper.identity( typeDescriptor ) );
+ public final AggregationScenario> onMainAndOtherIndex() {
+ return withFieldTypeOnMainAndOtherIndex( TypeAssertionHelper.identity( fieldType ) );
}
/*
@@ -66,14 +84,14 @@ public final AggregationScenario> onMainAndOtherIndex(FieldTypeDescriptor t
public abstract AggregationScenario> withFieldTypeOnMainAndOtherIndex(TypeAssertionHelper helper);
/*
- * Same as simple(...), but not expecting any matching document,
+ * Same as simple(), but not expecting any matching document,
* and thus expecting the aggregation result to be empty.
*/
- public abstract AggregationScenario> withoutMatch(FieldTypeDescriptor typeDescriptor);
+ public abstract AggregationScenario> withoutMatch();
/*
- * Same as simple(...), but targeting the index with multi-valued documents.
+ * Same as simple(), but targeting the index with multi-valued documents.
*/
- public abstract AggregationScenario> onMultiValuedIndex(FieldTypeDescriptor typeDescriptor);
+ public abstract AggregationScenario> onMultiValuedIndex();
}
diff --git a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/util/SimpleFieldModelsByType.java b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/util/SimpleFieldModelsByType.java
index a46439f4cab..14355d5ca9f 100644
--- a/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/util/SimpleFieldModelsByType.java
+++ b/integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/testsupport/util/SimpleFieldModelsByType.java
@@ -6,6 +6,7 @@
*/
package org.hibernate.search.integrationtest.backend.tck.testsupport.util;
+import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.Consumer;
@@ -16,32 +17,45 @@
import org.hibernate.search.integrationtest.backend.tck.testsupport.types.FieldTypeDescriptor;
public class SimpleFieldModelsByType {
+ @SafeVarargs
+ public static SimpleFieldModelsByType mapAll(Collection> typeDescriptors,
+ IndexSchemaElement parent, String prefix,
+ Consumer> ... additionalConfiguration) {
+ return mapAll( typeDescriptors.stream(), parent, prefix, additionalConfiguration );
+ }
+
@SafeVarargs
public static SimpleFieldModelsByType mapAll(Stream> typeDescriptors,
IndexSchemaElement parent, String prefix,
- Consumer> additionalConfiguration1,
- Consumer> ... additionalConfiguration2) {
+ Consumer> ... additionalConfiguration) {
SimpleFieldModelsByType result = new SimpleFieldModelsByType();
typeDescriptors.forEach( typeDescriptor -> {
result.content.put(
typeDescriptor,
- SimpleFieldModel.mapper( typeDescriptor, additionalConfiguration1 )
- .map( parent, prefix + typeDescriptor.getUniqueName(), additionalConfiguration2 )
+ SimpleFieldModel.mapper( typeDescriptor, ignored -> { } )
+ .map( parent, prefix + typeDescriptor.getUniqueName(), additionalConfiguration )
);
} );
return result;
}
+ @SafeVarargs
+ public static SimpleFieldModelsByType mapAllMultiValued(Collection> typeDescriptors,
+ IndexSchemaElement parent, String prefix,
+ Consumer> ... additionalConfiguration) {
+ return mapAllMultiValued( typeDescriptors.stream(), parent, prefix, additionalConfiguration );
+ }
+
+ @SafeVarargs
public static SimpleFieldModelsByType mapAllMultiValued(Stream> typeDescriptors,
IndexSchemaElement parent, String prefix,
- Consumer> additionalConfiguration1,
- Consumer> additionalConfiguration2) {
+ Consumer> ... additionalConfiguration) {
SimpleFieldModelsByType result = new SimpleFieldModelsByType();
typeDescriptors.forEach( typeDescriptor -> {
result.content.put(
typeDescriptor,
- SimpleFieldModel.mapper( typeDescriptor, additionalConfiguration1 )
- .mapMultiValued( parent, prefix + typeDescriptor.getUniqueName(), additionalConfiguration2 )
+ SimpleFieldModel.mapper( typeDescriptor, ignored -> { } )
+ .mapMultiValued( parent, prefix + typeDescriptor.getUniqueName(), additionalConfiguration )
);
} );
return result;