diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index 92d1cbdf96cef..fefa1cb2a16c3 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexNumericFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -78,13 +78,16 @@ public class ScriptScoreBenchmark { ); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class)); - private final Map fieldTypes = Map.ofEntries( - Map.entry("n", new NumberFieldType("n", NumberType.LONG, false, false, true, true, null, Map.of(), null, false, null)) + private final Map mappedFields = Map.ofEntries( + Map.entry( + "n", + new MappedField("n", new NumberFieldType(NumberType.LONG, false, false, true, true, null, Map.of(), null, false, null)) + ) ); private final IndexFieldDataCache fieldDataCache = new IndexFieldDataCache.None(); private final CircuitBreakerService breakerService = new NoneCircuitBreakerService(); private SearchLookup lookup = new SearchLookup( - fieldTypes::get, + mappedFields::get, (mft, lookup) -> mft.fielddataBuilder("test", lookup).build(fieldDataCache, breakerService) ); @@ -150,8 +153,8 @@ private Query scriptScoreQuery(ScoreScript.Factory factory) { private ScoreScript.Factory bareMetalScript() { return (params, lookup) -> { - MappedFieldType type = fieldTypes.get("n"); - IndexNumericFieldData ifd = (IndexNumericFieldData) lookup.getForField(type); + MappedField mappedField = mappedFields.get("n"); + IndexNumericFieldData ifd = (IndexNumericFieldData) lookup.getForField(mappedField); return new ScoreScript.LeafFactory() { @Override public ScoreScript newInstance(DocReader docReader) throws IOException { diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/AggConstructionContentionBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/AggConstructionContentionBenchmark.java index d573015bc8f4c..055ada22a2bbc 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/AggConstructionContentionBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/AggConstructionContentionBenchmark.java @@ -27,7 +27,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -207,16 +207,16 @@ public Analyzer buildCustomAnalyzer( } @Override - protected IndexFieldData buildFieldData(MappedFieldType ft) { + protected IndexFieldData buildFieldData(MappedField mappedField) { IndexFieldDataCache indexFieldDataCache = indicesFieldDataCache.buildIndexFieldDataCache(new IndexFieldDataCache.Listener() { - }, index, ft.name()); - return ft.fielddataBuilder("test", this::lookup).build(indexFieldDataCache, breakerService); + }, index, mappedField.name()); + return mappedField.fielddataBuilder("test", this::lookup).build(indexFieldDataCache, breakerService); } @Override - public MappedFieldType getFieldType(String path) { + public MappedField getMappedField(String path) { if (path.startsWith("int")) { - return new NumberFieldMapper.NumberFieldType(path, NumberType.INTEGER); + return new MappedField(path, new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); } throw new UnsupportedOperationException(); } diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java index 024e81feec2c1..4625e9a9a16e3 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java @@ -17,7 +17,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -30,7 +30,7 @@ public class MatrixStatsAggregatorTests extends AggregatorTestCase { public void testNoData() throws Exception { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { if (randomBoolean()) { @@ -41,7 +41,7 @@ public void testNoData() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("field") ); - InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft); + InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -49,7 +49,7 @@ public void testNoData() throws Exception { } public void testUnmapped() throws Exception { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { if (randomBoolean()) { @@ -60,7 +60,7 @@ public void testUnmapped() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg").fields( Collections.singletonList("bogus") ); - InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft); + InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertNull(stats.getStats()); assertEquals(0L, stats.getDocCount()); } @@ -69,9 +69,9 @@ public void testUnmapped() throws Exception { public void testTwoFields() throws Exception { String fieldA = "a"; - MappedFieldType ftA = new NumberFieldMapper.NumberFieldType(fieldA, NumberFieldMapper.NumberType.DOUBLE); + MappedField ftA = new MappedField(fieldA, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); String fieldB = "b"; - MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE); + MappedField ftB = new MappedField(fieldB, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java index 8617106d5cc28..6df8ea454aff3 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/TimestampFieldMapperServiceTests.java @@ -61,7 +61,7 @@ public void testGetTimestampFieldTypeForTsdbDataStream() throws IOException { IndexResponse indexResponse = indexDoc(); var indicesService = getInstanceFromNode(IndicesService.class); - var result = indicesService.getTimestampFieldType(indexResponse.getShardId().getIndex()); + var result = indicesService.getTimestampField(indexResponse.getShardId().getIndex()); assertThat(result, notNullValue()); } @@ -70,7 +70,7 @@ public void testGetTimestampFieldTypeForDataStream() throws IOException { IndexResponse indexResponse = indexDoc(); var indicesService = getInstanceFromNode(IndicesService.class); - var result = indicesService.getTimestampFieldType(indexResponse.getShardId().getIndex()); + var result = indicesService.getTimestampField(indexResponse.getShardId().getIndex()); assertThat(result, nullValue()); } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 0d2f71ade6e51..9f4f4e0d8bbb5 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -19,7 +19,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; @@ -454,22 +454,22 @@ private static DoubleValuesSource getDocValueSource(String variable, SearchLooku } String fieldname = parts[1].text; - MappedFieldType fieldType = lookup.fieldType(fieldname); + MappedField mappedField = lookup.mappedField(fieldname); - if (fieldType == null) { + if (mappedField == null) { throw new ParseException("Field [" + fieldname + "] does not exist in mappings", 5); } - IndexFieldData fieldData = lookup.getForField(fieldType); + IndexFieldData fieldData = lookup.getForField(mappedField); final DoubleValuesSource valueSource; - if (fieldType instanceof GeoPointFieldType) { + if (mappedField.type() instanceof GeoPointFieldType) { // geo if (methodname == null) { valueSource = GeoField.getVariable(fieldData, fieldname, variablename); } else { valueSource = GeoField.getMethod(fieldData, fieldname, methodname); } - } else if (fieldType instanceof DateFieldMapper.DateFieldType) { + } else if (mappedField.type() instanceof DateFieldMapper.DateFieldType) { if (dateAccessor) { // date object if (methodname == null) { diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java index b8ce67ef13d66..f67a0606ee233 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.LeafNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.ScriptException; @@ -35,7 +36,7 @@ public class ExpressionFieldScriptTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); - NumberFieldMapper.NumberFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); when(doubleValues.advanceExact(anyInt())).thenReturn(true); @@ -49,7 +50,7 @@ public void setUp() throws Exception { when(fieldData.load(any())).thenReturn(atomicFieldData); service = new ExpressionScriptEngine(); - lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup) -> fieldData); + lookup = new SearchLookup(field -> field.equals("field") ? mappedField : null, (ignored, _lookup) -> fieldData); } private FieldScript.LeafFactory compile(String expression) { diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java index a3f1a750053c8..589397c2bdba5 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.LeafNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.DocValuesDocReader; @@ -36,7 +37,7 @@ public class ExpressionNumberSortScriptTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); - NumberFieldType fieldType = new NumberFieldType("field", NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldType(NumberType.DOUBLE)); SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); when(doubleValues.advanceExact(anyInt())).thenReturn(true); @@ -50,7 +51,7 @@ public void setUp() throws Exception { when(fieldData.load(any())).thenReturn(atomicFieldData); service = new ExpressionScriptEngine(); - lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup) -> fieldData); + lookup = new SearchLookup(field -> field.equals("field") ? mappedField : null, (ignored, _lookup) -> fieldData); } private NumberSortScript.LeafFactory compile(String expression) { diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java index faa08db55138b..0baf026119998 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.LeafNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.ScriptException; @@ -35,7 +36,7 @@ public class ExpressionTermsSetQueryTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); - NumberFieldType fieldType = new NumberFieldType("field", NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldType(NumberType.DOUBLE)); SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); when(doubleValues.advanceExact(anyInt())).thenReturn(true); @@ -49,7 +50,7 @@ public void setUp() throws Exception { when(fieldData.load(any())).thenReturn(atomicFieldData); service = new ExpressionScriptEngine(); - lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup) -> fieldData); + lookup = new SearchLookup(field -> field.equals("field") ? mappedField : null, (ignored, _lookup) -> fieldData); } private TermsSetQueryScript.LeafFactory compile(String expression) { diff --git a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java index 90240d3b8efb0..ccbb7353c77e2 100644 --- a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java +++ b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java @@ -34,7 +34,7 @@ import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.GeoShapeQueryable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; @@ -294,7 +294,7 @@ private void setupFieldTypeDeprecatedParameters(GeoShapeFieldType ft) { } } - private void setupPrefixTrees(GeoShapeFieldType ft) { + private void setupPrefixTrees(GeoShapeFieldType ft, MapperBuilderContext context) { SpatialPrefixTree prefixTree; if (ft.tree().equals(PrefixTrees.GEOHASH)) { prefixTree = new GeohashPrefixTree( @@ -317,13 +317,13 @@ private void setupPrefixTrees(GeoShapeFieldType ft) { // setup prefix trees regardless of strategy (this is used for the QueryBuilder) // recursive: - RecursivePrefixTreeStrategy rpts = new RecursivePrefixTreeStrategy(prefixTree, ft.name()); + RecursivePrefixTreeStrategy rpts = new RecursivePrefixTreeStrategy(prefixTree, context.buildFullName(name)); rpts.setDistErrPct(ft.distanceErrorPct()); rpts.setPruneLeafyBranches(false); ft.recursiveStrategy = rpts; // term: - TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, ft.name()); + TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, context.buildFullName(name)); termStrategy.setDistErrPct(ft.distanceErrorPct()); ft.termStrategy = termStrategy; @@ -333,15 +333,9 @@ private void setupPrefixTrees(GeoShapeFieldType ft) { } private GeoShapeFieldType buildFieldType(LegacyGeoShapeParser parser, MapperBuilderContext context) { - GeoShapeFieldType ft = new GeoShapeFieldType( - context.buildFullName(name), - indexed.get(), - orientation.get().value(), - parser, - meta.get() - ); + GeoShapeFieldType ft = new GeoShapeFieldType(indexed.get(), orientation.get().value(), parser, meta.get()); setupFieldTypeDeprecatedParameters(ft); - setupPrefixTrees(ft); + setupPrefixTrees(ft, context); return ft; } @@ -367,7 +361,14 @@ public LegacyGeoShapeFieldMapper build(MapperBuilderContext context) { } LegacyGeoShapeParser parser = new LegacyGeoShapeParser(); GeoShapeFieldType ft = buildFieldType(parser, context); - return new LegacyGeoShapeFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), parser, this); + return new LegacyGeoShapeFieldMapper( + name, + new MappedField(context.buildFullName(name), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), + parser, + this + ); } } @@ -431,28 +432,29 @@ public static final class GeoShapeFieldType extends AbstractShapeGeometryFieldTy private final LegacyGeoShapeQueryProcessor queryProcessor; - private GeoShapeFieldType( - String name, - boolean indexed, - Orientation orientation, - LegacyGeoShapeParser parser, - Map meta - ) { - super(name, indexed, false, false, parser, orientation, meta); + private GeoShapeFieldType(boolean indexed, Orientation orientation, LegacyGeoShapeParser parser, Map meta) { + super(indexed, false, false, parser, orientation, meta); this.queryProcessor = new LegacyGeoShapeQueryProcessor(this); } - public GeoShapeFieldType(String name) { - this(name, true, Orientation.RIGHT, null, Collections.emptyMap()); + public GeoShapeFieldType() { + this(true, Orientation.RIGHT, null, Collections.emptyMap()); } @Override - public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { + public Query geoShapeQuery( + String name, + SearchExecutionContext context, + String fieldName, + ShapeRelation relation, + LatLonGeometry... geometries + ) { throw new UnsupportedOperationException("process method should not be called for PrefixTree based geo_shapes"); } @Override public Query geoShapeQuery( + String name, SearchExecutionContext context, String fieldName, SpatialStrategy spatialStrategy, @@ -551,7 +553,7 @@ public PrefixTreeStrategy resolvePrefixTreeStrategy(String strategyName) { public LegacyGeoShapeFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, LegacyGeoShapeParser parser, @@ -559,7 +561,7 @@ public LegacyGeoShapeFieldMapper( ) { super( simpleName, - mappedFieldType, + mappedField, builder.ignoreMalformed.get(), builder.coerce.get(), builder.ignoreZValue.get(), @@ -574,7 +576,7 @@ public LegacyGeoShapeFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override @@ -605,7 +607,7 @@ protected void index(DocumentParserContext context, ShapeBuilder shapeB } else if (shape instanceof Point == false) { throw new MapperParsingException( "[{" - + fieldType().name() + + name() + "}] is configured for points only but a " + ((shape instanceof JtsGeometry) ? ((JtsGeometry) shape).getGeom().getGeometryType() : shape.getClass()) + " was found" @@ -613,7 +615,7 @@ protected void index(DocumentParserContext context, ShapeBuilder shapeB } } context.doc().addAll(Arrays.asList(fieldType().defaultPrefixTreeStrategy().createIndexableFields(shape))); - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } @Override diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java index 5f4c8f0afd3e8..01d5f23625d6a 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -627,7 +628,14 @@ public void testDisallowExpensiveQueries() throws IOException { ElasticsearchException e = expectThrows( ElasticsearchException.class, () -> geoShapeFieldMapper.fieldType() - .geoShapeQuery(searchExecutionContext, "location", SpatialStrategy.TERM, ShapeRelation.INTERSECTS, new Point(-10, 10)) + .geoShapeQuery( + "field", + searchExecutionContext, + "location", + SpatialStrategy.TERM, + ShapeRelation.INTERSECTS, + new Point(-10, 10) + ) ); assertEquals( "[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when " + "'search.allow_expensive_queries' is set to false.", @@ -668,9 +676,10 @@ public void testGeoShapeArrayParsing() throws Exception { assertFieldWarnings("tree", "strategy"); } - protected void assertSearchable(MappedFieldType fieldType) { + @Override + protected void assertSearchable(MappedField mappedField) { // always searchable even if it uses TextSearchInfo.NONE - assertTrue(fieldType.isSearchable()); + assertTrue(mappedField.isSearchable()); } @Override diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java index 68c5ae8af3874..2be87e3a32d3e 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType; import org.elasticsearch.test.VersionUtils; @@ -26,7 +26,7 @@ public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase { * that {@link LegacyGeoShapeFieldMapper.GeoShapeFieldType#pointsOnly()} gets set as a side effect when using SpatialStrategy.TERM */ public void testSetStrategyName() { - GeoShapeFieldType fieldType = new GeoShapeFieldType("field"); + GeoShapeFieldType fieldType = new GeoShapeFieldType(); assertFalse(fieldType.pointsOnly()); fieldType.setStrategy(SpatialStrategy.RECURSIVE); assertFalse(fieldType.pointsOnly()); @@ -36,8 +36,7 @@ public void testSetStrategyName() { public void testFetchSourceValue() throws IOException { Version version = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0); - MappedFieldType mapper = new LegacyGeoShapeFieldMapper.Builder("field", version, false, true).build(MapperBuilderContext.ROOT) - .fieldType(); + MappedField mapper = new LegacyGeoShapeFieldMapper.Builder("field", version, false, true).build(MapperBuilderContext.ROOT).field(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index 39456154fa6b5..5aa2e6fb9cddf 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.StringFieldType; @@ -112,7 +113,7 @@ private MatchOnlyTextFieldType buildFieldType(MapperBuilderContext context) { NamedAnalyzer searchQuoteAnalyzer = analyzers.getSearchQuoteAnalyzer(); NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer(); TextSearchInfo tsi = new TextSearchInfo(Defaults.FIELD_TYPE, null, searchAnalyzer, searchQuoteAnalyzer); - MatchOnlyTextFieldType ft = new MatchOnlyTextFieldType(context.buildFullName(name), tsi, indexAnalyzer, meta.getValue()); + MatchOnlyTextFieldType ft = new MatchOnlyTextFieldType(tsi, indexAnalyzer, meta.getValue()); return ft; } @@ -120,7 +121,14 @@ private MatchOnlyTextFieldType buildFieldType(MapperBuilderContext context) { public MatchOnlyTextFieldMapper build(MapperBuilderContext context) { MatchOnlyTextFieldType tft = buildFieldType(context); MultiFields multiFields = multiFieldsBuilder.build(this, context); - return new MatchOnlyTextFieldMapper(name, Defaults.FIELD_TYPE, tft, multiFields, copyTo.build(), this); + return new MatchOnlyTextFieldMapper( + name, + Defaults.FIELD_TYPE, + new MappedField(context.buildFullName(name), tft), + multiFields, + copyTo.build(), + this + ); } } @@ -131,15 +139,14 @@ public static class MatchOnlyTextFieldType extends StringFieldType { private final Analyzer indexAnalyzer; private final TextFieldType textFieldType; - public MatchOnlyTextFieldType(String name, TextSearchInfo tsi, Analyzer indexAnalyzer, Map meta) { - super(name, true, false, false, tsi, meta); + public MatchOnlyTextFieldType(TextSearchInfo tsi, Analyzer indexAnalyzer, Map meta) { + super(true, false, false, tsi, meta); this.indexAnalyzer = Objects.requireNonNull(indexAnalyzer); - this.textFieldType = new TextFieldType(name); + this.textFieldType = new TextFieldType(); } - public MatchOnlyTextFieldType(String name) { + public MatchOnlyTextFieldType() { this( - name, new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), Lucene.STANDARD_ANALYZER, Collections.emptyMap() @@ -157,20 +164,21 @@ public String familyTypeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.toString(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.toString(name, context, format); } private Function, IOException>> getValueFetcherProvider( + String name, SearchExecutionContext searchExecutionContext ) { if (searchExecutionContext.isSourceEnabled() == false) { throw new IllegalArgumentException( - "Field [" + name() + "] of type [" + CONTENT_TYPE + "] cannot run positional queries since [_source] is disabled." + "Field [" + name + "] of type [" + CONTENT_TYPE + "] cannot run positional queries since [_source] is disabled." ); } SourceLookup sourceLookup = searchExecutionContext.lookup().source(); - ValueFetcher valueFetcher = valueFetcher(searchExecutionContext, null); + ValueFetcher valueFetcher = valueFetcher(name, searchExecutionContext, null); return context -> { valueFetcher.setNextReader(context); return docID -> { @@ -184,28 +192,30 @@ private Function, IOException }; } - private Query toQuery(Query query, SearchExecutionContext searchExecutionContext) { + private Query toQuery(String name, Query query, SearchExecutionContext searchExecutionContext) { return new ConstantScoreQuery( - new SourceConfirmedTextQuery(query, getValueFetcherProvider(searchExecutionContext), indexAnalyzer) + new SourceConfirmedTextQuery(query, getValueFetcherProvider(name, searchExecutionContext), indexAnalyzer) ); } private IntervalsSource toIntervalsSource( + String name, IntervalsSource source, Query approximation, SearchExecutionContext searchExecutionContext ) { - return new SourceIntervalsSource(source, approximation, getValueFetcherProvider(searchExecutionContext), indexAnalyzer); + return new SourceIntervalsSource(source, approximation, getValueFetcherProvider(name, searchExecutionContext), indexAnalyzer); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { // Disable scoring - return new ConstantScoreQuery(super.termQuery(value, context)); + return new ConstantScoreQuery(super.termQuery(name, value, context)); } @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -214,36 +224,38 @@ public Query fuzzyQuery( SearchExecutionContext context ) { // Disable scoring - return new ConstantScoreQuery(super.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context)); + return new ConstantScoreQuery(super.fuzzyQuery(name, value, fuzziness, prefixLength, maxExpansions, transpositions, context)); } @Override - public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext context) { - return toIntervalsSource(Intervals.term(term), new TermQuery(new Term(name(), term)), context); + public IntervalsSource termIntervals(String name, BytesRef term, SearchExecutionContext context) { + return toIntervalsSource(name, Intervals.term(term), new TermQuery(new Term(name, term)), context); } @Override - public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext context) { - return toIntervalsSource(Intervals.prefix(term), new PrefixQuery(new Term(name(), term)), context); + public IntervalsSource prefixIntervals(String name, BytesRef term, SearchExecutionContext context) { + return toIntervalsSource(name, Intervals.prefix(term), new PrefixQuery(new Term(name, term)), context); } @Override public IntervalsSource fuzzyIntervals( + String name, String term, int maxDistance, int prefixLength, boolean transpositions, SearchExecutionContext context ) { - FuzzyQuery fuzzyQuery = new FuzzyQuery(new Term(name(), term), maxDistance, prefixLength, 128, transpositions); + FuzzyQuery fuzzyQuery = new FuzzyQuery(new Term(name, term), maxDistance, prefixLength, 128, transpositions); fuzzyQuery.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE); IntervalsSource fuzzyIntervals = Intervals.multiterm(fuzzyQuery.getAutomata(), term); - return toIntervalsSource(fuzzyIntervals, fuzzyQuery, context); + return toIntervalsSource(name, fuzzyIntervals, fuzzyQuery, context); } @Override - public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { + public IntervalsSource wildcardIntervals(String name, BytesRef pattern, SearchExecutionContext context) { return toIntervalsSource( + name, Intervals.wildcard(pattern), new MatchAllDocsQuery(), // wildcard queries can be expensive, what should the approximation be? context @@ -251,32 +263,43 @@ public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContex } @Override - public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, SearchExecutionContext queryShardContext) - throws IOException { - final Query query = textFieldType.phraseQuery(stream, slop, enablePosIncrements, queryShardContext); - return toQuery(query, queryShardContext); + public Query phraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePosIncrements, + SearchExecutionContext queryShardContext + ) throws IOException { + final Query query = textFieldType.phraseQuery(name, stream, slop, enablePosIncrements, queryShardContext); + return toQuery(name, query, queryShardContext); } @Override public Query multiPhraseQuery( + String name, TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext queryShardContext ) throws IOException { - final Query query = textFieldType.multiPhraseQuery(stream, slop, enablePositionIncrements, queryShardContext); - return toQuery(query, queryShardContext); + final Query query = textFieldType.multiPhraseQuery(name, stream, slop, enablePositionIncrements, queryShardContext); + return toQuery(name, query, queryShardContext); } @Override - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext queryShardContext) - throws IOException { - final Query query = textFieldType.phrasePrefixQuery(stream, slop, maxExpansions, queryShardContext); - return toQuery(query, queryShardContext); + public Query phrasePrefixQuery( + String name, + TokenStream stream, + int slop, + int maxExpansions, + SearchExecutionContext queryShardContext + ) throws IOException { + final Query query = textFieldType.phrasePrefixQuery(name, stream, slop, maxExpansions, queryShardContext); + return toQuery(name, query, queryShardContext); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { throw new IllegalArgumentException(CONTENT_TYPE + " fields do not support sorting and aggregations"); } @@ -291,14 +314,14 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S private MatchOnlyTextFieldMapper( String simpleName, FieldType fieldType, - MatchOnlyTextFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo, false, null); - assert mappedFieldType.getTextSearchInfo().isTokenized(); - assert mappedFieldType.hasDocValues() == false; + super(simpleName, mappedField, multiFields, copyTo, false, null); + assert mappedField.getTextSearchInfo().isTokenized(); + assert mappedField.hasDocValues() == false; this.fieldType = fieldType; this.indexCreatedVersion = builder.indexCreatedVersion; this.indexAnalyzers = builder.analyzers.indexAnalyzers; @@ -308,7 +331,7 @@ private MatchOnlyTextFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), indexAnalyzer); + return Map.of(mappedField.name(), indexAnalyzer); } @Override @@ -324,9 +347,9 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio return; } - Field field = new Field(fieldType().name(), value, fieldType); + Field field = new Field(name(), value, fieldType); context.doc().add(field); - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java index f4db702a09e47..196aa08ed1b74 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; @@ -77,7 +78,7 @@ protected Parameter[] getParameters() { public RankFeatureFieldMapper build(MapperBuilderContext context) { return new RankFeatureFieldMapper( name, - new RankFeatureFieldType(context.buildFullName(name), meta.getValue(), positiveScoreImpact.getValue()), + new MappedField(context.buildFullName(name), new RankFeatureFieldType(meta.getValue(), positiveScoreImpact.getValue())), multiFieldsBuilder.build(this, context), copyTo.build(), positiveScoreImpact.getValue() @@ -91,8 +92,8 @@ public static final class RankFeatureFieldType extends MappedFieldType { private final boolean positiveScoreImpact; - public RankFeatureFieldType(String name, Map meta, boolean positiveScoreImpact) { - super(name, true, false, false, TextSearchInfo.NONE, meta); + public RankFeatureFieldType(Map meta, boolean positiveScoreImpact) { + super(true, false, false, TextSearchInfo.NONE, meta); this.positiveScoreImpact = positiveScoreImpact; } @@ -106,21 +107,21 @@ public boolean positiveScoreImpact() { } @Override - public Query existsQuery(SearchExecutionContext context) { - return new TermQuery(new Term("_feature", name())); + public Query existsQuery(String name, SearchExecutionContext context) { + return new TermQuery(new Term("_feature", name)); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { throw new IllegalArgumentException("[rank_feature] fields do not support sorting, scripting or aggregating"); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new SourceValueFetcher(name(), context) { + return new SourceValueFetcher(name, context) { @Override protected Float parseSourceValue(Object value) { return objectToFloat(value); @@ -129,7 +130,7 @@ protected Float parseSourceValue(Object value) { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException("Queries on [rank_feature] fields are not supported"); } } @@ -138,18 +139,18 @@ public Query termQuery(Object value, SearchExecutionContext context) { private RankFeatureFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, boolean positiveScoreImpact ) { - super(simpleName, mappedFieldType, multiFields, copyTo, false, null); + super(simpleName, mappedField, multiFields, copyTo, false, null); this.positiveScoreImpact = positiveScoreImpact; } @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java index b47370fe0e487..3af994dacb529 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.extras; import org.apache.lucene.search.Query; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.TextSearchInfo; @@ -35,7 +36,7 @@ public static final class RankFeatureMetaFieldType extends MappedFieldType { public static final RankFeatureMetaFieldType INSTANCE = new RankFeatureMetaFieldType(); private RankFeatureMetaFieldType() { - super(NAME, false, false, false, TextSearchInfo.NONE, Collections.emptyMap()); + super(false, false, false, TextSearchInfo.NONE, Collections.emptyMap()); } @Override @@ -44,23 +45,23 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "]."); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { throw new UnsupportedOperationException("Cannot run exists query on [_feature]"); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new UnsupportedOperationException("The [_feature] field may not be queried directly"); } } private RankFeatureMetaFieldMapper() { - super(RankFeatureMetaFieldType.INSTANCE); + super(new MappedField(NAME, RankFeatureMetaFieldType.INSTANCE)); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java index 25f0786534110..37e1cd84be9bc 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java @@ -14,7 +14,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.extras.RankFeatureFieldMapper.RankFeatureFieldType; import org.elasticsearch.index.mapper.extras.RankFeaturesFieldMapper.RankFeaturesFieldType; import org.elasticsearch.index.query.AbstractQueryBuilder; @@ -378,25 +378,25 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - final MappedFieldType ft = context.getFieldType(field); + final MappedField mappedField = context.getMappedField(field); - if (ft instanceof final RankFeatureFieldType fft) { - return scoreFunction.toQuery(RankFeatureMetaFieldMapper.NAME, field, fft.positiveScoreImpact()); - } else if (ft == null) { + if (mappedField == null) { final int lastDotIndex = field.lastIndexOf('.'); if (lastDotIndex != -1) { final String parentField = field.substring(0, lastDotIndex); - final MappedFieldType parentFt = context.getFieldType(parentField); - if (parentFt instanceof RankFeaturesFieldType) { + final MappedField parentMappedField = context.getMappedField(parentField); + if (parentMappedField != null && parentMappedField.type() instanceof RankFeaturesFieldType) { return scoreFunction.toQuery(parentField, field.substring(lastDotIndex + 1), true); } } return new MatchNoDocsQuery(); // unmapped field + } else if (mappedField.type()instanceof final RankFeatureFieldType fft) { + return scoreFunction.toQuery(RankFeatureMetaFieldMapper.NAME, field, fft.positiveScoreImpact()); } else { throw new IllegalArgumentException( "[rank_feature] query only works on [rank_feature] fields and " + "features of [rank_features] fields, not [" - + ft.typeName() + + mappedField.typeName() + "]" ); } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldMapper.java index 1c2f31d1d68bb..64cd83967cd8f 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; @@ -63,7 +64,7 @@ protected Parameter[] getParameters() { public RankFeaturesFieldMapper build(MapperBuilderContext context) { return new RankFeaturesFieldMapper( name, - new RankFeaturesFieldType(context.buildFullName(name), meta.getValue(), positiveScoreImpact.getValue()), + new MappedField(context.buildFullName(name), new RankFeaturesFieldType(meta.getValue(), positiveScoreImpact.getValue())), multiFieldsBuilder.build(this, context), copyTo.build(), positiveScoreImpact.getValue() @@ -77,8 +78,8 @@ public static final class RankFeaturesFieldType extends MappedFieldType { private final boolean positiveScoreImpact; - public RankFeaturesFieldType(String name, Map meta, boolean positiveScoreImpact) { - super(name, false, false, false, TextSearchInfo.NONE, meta); + public RankFeaturesFieldType(Map meta, boolean positiveScoreImpact) { + super(false, false, false, TextSearchInfo.NONE, meta); this.positiveScoreImpact = positiveScoreImpact; } @@ -92,22 +93,22 @@ public boolean positiveScoreImpact() { } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { throw new IllegalArgumentException("[rank_features] fields do not support [exists] queries"); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { throw new IllegalArgumentException("[rank_features] fields do not support sorting, scripting or aggregating"); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.identity(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.identity(name, context, format); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException("Queries on [rank_features] fields are not supported"); } } @@ -116,18 +117,18 @@ public Query termQuery(Object value, SearchExecutionContext context) { private RankFeaturesFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, boolean positiveScoreImpact ) { - super(simpleName, mappedFieldType, multiFields, copyTo, false, null); + super(simpleName, mappedField, multiFields, copyTo, false, null); this.positiveScoreImpact = positiveScoreImpact; } @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index 09ac5a21ab725..f5a7b398919db 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.SimpleMappedFieldType; @@ -166,7 +167,6 @@ protected Parameter[] getParameters() { @Override public ScaledFloatFieldMapper build(MapperBuilderContext context) { ScaledFloatFieldType type = new ScaledFloatFieldType( - context.buildFullName(name), indexed.getValue(), stored.getValue(), hasDocValues.getValue(), @@ -175,7 +175,13 @@ public ScaledFloatFieldMapper build(MapperBuilderContext context) { nullValue.getValue(), metric.getValue() ); - return new ScaledFloatFieldMapper(name, type, multiFieldsBuilder.build(this, context), copyTo.build(), this); + return new ScaledFloatFieldMapper( + name, + new MappedField(context.buildFullName(name), type), + multiFieldsBuilder.build(this, context), + copyTo.build(), + this + ); } } @@ -188,7 +194,6 @@ public static final class ScaledFloatFieldType extends SimpleMappedFieldType { private final TimeSeriesParams.MetricType metricType; public ScaledFloatFieldType( - String name, boolean indexed, boolean stored, boolean hasDocValues, @@ -197,18 +202,18 @@ public ScaledFloatFieldType( Double nullValue, TimeSeriesParams.MetricType metricType ) { - super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.scalingFactor = scalingFactor; this.nullValue = nullValue; this.metricType = metricType; } - public ScaledFloatFieldType(String name, double scalingFactor) { - this(name, scalingFactor, true); + public ScaledFloatFieldType(double scalingFactor) { + this(scalingFactor, true); } - public ScaledFloatFieldType(String name, double scalingFactor, boolean indexed) { - this(name, indexed, false, true, Collections.emptyMap(), scalingFactor, null, null); + public ScaledFloatFieldType(double scalingFactor, boolean indexed) { + this(indexed, false, true, Collections.emptyMap(), scalingFactor, null, null); } public double getScalingFactor() { @@ -221,41 +226,42 @@ public String typeName() { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); long scaledValue = Math.round(scale(value)); - return NumberFieldMapper.NumberType.LONG.termQuery(name(), scaledValue, isIndexed()); + return NumberFieldMapper.NumberType.LONG.termQuery(name, scaledValue, isIndexed()); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { List scaledValues = new ArrayList<>(values.size()); for (Object value : values) { long scaledValue = Math.round(scale(value)); scaledValues.add(scaledValue); } - return NumberFieldMapper.NumberType.LONG.termsQuery(name(), Collections.unmodifiableList(scaledValues)); + return NumberFieldMapper.NumberType.LONG.termsQuery(name, Collections.unmodifiableList(scaledValues)); } else { - return super.termsQuery(values, context); + return super.termsQuery(name, values, context); } } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); Long lo = null; if (lowerTerm != null) { double dValue = scale(lowerTerm); @@ -272,15 +278,15 @@ public Query rangeQuery( } hi = Math.round(Math.floor(dValue)); } - return NumberFieldMapper.NumberType.LONG.rangeQuery(name(), lo, hi, true, true, hasDocValues(), context, isIndexed()); + return NumberFieldMapper.NumberType.LONG.rangeQuery(name, lo, hi, true, true, hasDocValues(), context, isIndexed()); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); return (cache, breakerService) -> { final IndexNumericFieldData scaledValues = new SortedNumericIndexFieldData.Builder( - name(), + name, IndexNumericFieldData.NumericType.LONG, (dv, n) -> { throw new UnsupportedOperationException(); } ).build(cache, breakerService); @@ -289,11 +295,11 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new SourceValueFetcher(name(), context) { + return new SourceValueFetcher(name, context) { @Override protected Double parseSourceValue(Object value) { double doubleValue; @@ -321,8 +327,8 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + checkNoTimeZone(name, timeZone); if (format == null) { return DocValueFormat.RAW; } @@ -377,14 +383,8 @@ public String toString() { private final boolean coerceByDefault; private final TimeSeriesParams.MetricType metricType; - private ScaledFloatFieldMapper( - String simpleName, - ScaledFloatFieldType mappedFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder - ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + private ScaledFloatFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo); this.indexed = builder.indexed.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); this.stored = builder.stored.getValue(); @@ -465,10 +465,10 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } long scaledValue = encode(doubleValue, scalingFactor); - NumberFieldMapper.NumberType.LONG.addFields(context.doc(), fieldType().name(), scaledValue, indexed, hasDocValues, stored); + NumberFieldMapper.NumberType.LONG.addFields(context.doc(), name(), scaledValue, indexed, hasDocValues, stored); if (hasDocValues == false && (indexed || stored)) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java index 8ab0968384467..87e97c728684c 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; @@ -186,23 +187,23 @@ public SearchAsYouTypeFieldMapper build(MapperBuilderContext context) { NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer(); NamedAnalyzer searchAnalyzer = analyzers.getSearchAnalyzer(); - SearchAsYouTypeFieldType ft = new SearchAsYouTypeFieldType( - context.buildFullName(name), + final String fullName = context.buildFullName(name); + SearchAsYouTypeFieldType searchAsYouTypeFieldType = new SearchAsYouTypeFieldType( fieldType, similarity.getValue(), analyzers.getSearchAnalyzer(), analyzers.getSearchQuoteAnalyzer(), meta.getValue() ); + MappedField searchAsYouTypeField = new MappedField(fullName, searchAsYouTypeFieldType); - indexAnalyzers.put(ft.name(), indexAnalyzer); + indexAnalyzers.put(fullName, indexAnalyzer); // set up the prefix field FieldType prefixft = new FieldType(); prefixft.setIndexOptions(fieldType.indexOptions()); prefixft.setOmitNorms(true); prefixft.setStored(false); - final String fullName = context.buildFullName(name); // wrap the root field's index analyzer with shingles and edge ngrams final Analyzer prefixIndexWrapper = SearchAsYouTypeAnalyzer.withShingleAndPrefix( indexAnalyzer.analyzer(), @@ -216,14 +217,17 @@ public SearchAsYouTypeFieldMapper build(MapperBuilderContext context) { ); // don't wrap the root field's search quote analyzer as prefix field doesn't support phrase queries TextSearchInfo prefixSearchInfo = new TextSearchInfo(prefixft, similarity.getValue(), prefixSearchWrapper, searchAnalyzer); - final PrefixFieldType prefixFieldType = new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM); + final MappedField prefixField = PrefixFieldType.newMappedField( + fullName, + new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM) + ); final NamedAnalyzer prefixAnalyzer = new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, prefixIndexWrapper); - final PrefixFieldMapper prefixFieldMapper = new PrefixFieldMapper(prefixft, prefixFieldType); - indexAnalyzers.put(prefixFieldType.name(), prefixAnalyzer); + final PrefixFieldMapper prefixFieldMapper = new PrefixFieldMapper(prefixft, prefixField); + indexAnalyzers.put(prefixField.name(), prefixAnalyzer); // set up the shingle fields final ShingleFieldMapper[] shingleFieldMappers = new ShingleFieldMapper[maxShingleSize.getValue() - 1]; - final ShingleFieldType[] shingleFieldTypes = new ShingleFieldType[maxShingleSize.getValue() - 1]; + final MappedField[] shingleFieldTypes = new MappedField[maxShingleSize.getValue() - 1]; for (int i = 0; i < shingleFieldMappers.length; i++) { final int shingleSize = i + 2; FieldType shingleft = new FieldType(fieldType); @@ -250,18 +254,19 @@ public SearchAsYouTypeFieldMapper build(MapperBuilderContext context) { shingleSearchWrapper, shingleSearchQuoteWrapper ); - final ShingleFieldType shingleFieldType = new ShingleFieldType(fieldName, shingleSize, textSearchInfo); - shingleFieldType.setPrefixFieldType(prefixFieldType); - shingleFieldTypes[i] = shingleFieldType; + final ShingleFieldType shingleFieldType = new ShingleFieldType(shingleSize, textSearchInfo); + final MappedField shingleField = new MappedField(fieldName, shingleFieldType); + shingleFieldType.setPrefixField(prefixField); + shingleFieldTypes[i] = shingleField; NamedAnalyzer shingleAnalyzer = new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, shingleIndexWrapper); - shingleFieldMappers[i] = new ShingleFieldMapper(shingleft, shingleFieldType); - indexAnalyzers.put(shingleFieldType.name(), shingleAnalyzer); + shingleFieldMappers[i] = new ShingleFieldMapper(shingleft, shingleField); + indexAnalyzers.put(shingleField.name(), shingleAnalyzer); } - ft.setPrefixField(prefixFieldType); - ft.setShingleFields(shingleFieldTypes); + searchAsYouTypeFieldType.setPrefixField(prefixField); + searchAsYouTypeFieldType.setShingleFields(shingleFieldTypes); return new SearchAsYouTypeFieldMapper( name, - ft, + searchAsYouTypeField, copyTo.build(), indexAnalyzers, prefixFieldMapper, @@ -293,11 +298,10 @@ private static int countPosition(TokenStream stream) throws IOException { static class SearchAsYouTypeFieldType extends StringFieldType { final FieldType fieldType; - PrefixFieldType prefixField; - ShingleFieldType[] shingleFields = new ShingleFieldType[0]; + MappedField prefixField; + MappedField[] shingleFields = new MappedField[0]; SearchAsYouTypeFieldType( - String name, FieldType fieldType, SimilarityProvider similarity, NamedAnalyzer searchAnalyzer, @@ -305,7 +309,7 @@ static class SearchAsYouTypeFieldType extends StringFieldType { Map meta ) { super( - name, + fieldType.indexOptions() != IndexOptions.NONE, fieldType.stored(), false, @@ -315,11 +319,11 @@ static class SearchAsYouTypeFieldType extends StringFieldType { this.fieldType = fieldType; } - public void setPrefixField(PrefixFieldType prefixField) { + public void setPrefixField(MappedField prefixField) { this.prefixField = prefixField; } - public void setShingleFields(ShingleFieldType[] shingleFields) { + public void setShingleFields(MappedField[] shingleFields) { this.shingleFields = shingleFields; } @@ -328,25 +332,26 @@ public String typeName() { return CONTENT_TYPE; } - private ShingleFieldType shingleFieldForPositions(int positions) { + private MappedField shingleFieldForPositions(int positions) { final int indexFromShingleSize = Math.max(positions - 2, 0); return shingleFields[Math.min(indexFromShingleSize, shingleFields.length - 1)]; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.toString(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.toString(name, context, format); } @Override public Query prefixQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context ) { - if (prefixField == null || prefixField.termLengthWithinBounds(value.length()) == false) { - return super.prefixQuery(value, method, caseInsensitive, context); + if (prefixField == null || ((PrefixFieldType) prefixField.type()).termLengthWithinBounds(value.length()) == false) { + return super.prefixQuery(name, value, method, caseInsensitive, context); } else { final Query query = prefixField.prefixQuery(value, method, caseInsensitive, context); if (method == null @@ -359,59 +364,75 @@ public Query prefixQuery( } } - private void checkForPositions() { + private void checkForPositions(String name) { if (getTextSearchInfo().hasPositions() == false) { - throw new IllegalStateException("field:[" + name() + "] was indexed without position data; cannot run PhraseQuery"); + throw new IllegalStateException("field:[" + name + "] was indexed without position data; cannot run PhraseQuery"); } } @Override - public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) - throws IOException { - checkForPositions(); + public Query phraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) throws IOException { + checkForPositions(name); int numPos = countPosition(stream); if (shingleFields.length == 0 || slop > 0 || hasGaps(stream) || numPos <= 1) { - return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + return TextFieldMapper.createPhraseQuery(stream, name, slop, enablePositionIncrements); } - final ShingleFieldType shingleField = shingleFieldForPositions(numPos); - stream = new FixedShingleFilter(stream, shingleField.shingleSize); + final MappedField shingleField = shingleFieldForPositions(numPos); + stream = new FixedShingleFilter(stream, ((ShingleFieldType) shingleField.type()).shingleSize); return shingleField.phraseQuery(stream, 0, true, context); } @Override - public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) - throws IOException { - checkForPositions(); + public Query multiPhraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) throws IOException { + checkForPositions(name); int numPos = countPosition(stream); if (shingleFields.length == 0 || slop > 0 || hasGaps(stream) || numPos <= 1) { - return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + return TextFieldMapper.createPhraseQuery(stream, name, slop, enablePositionIncrements); } - final ShingleFieldType shingleField = shingleFieldForPositions(numPos); - stream = new FixedShingleFilter(stream, shingleField.shingleSize); + final MappedField shingleField = shingleFieldForPositions(numPos); + stream = new FixedShingleFilter(stream, ((ShingleFieldType) shingleField.type()).shingleSize); return shingleField.multiPhraseQuery(stream, 0, true, context); } @Override - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) throws IOException { + public Query phrasePrefixQuery(String name, TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) + throws IOException { int numPos = countPosition(stream); if (numPos > 1) { - checkForPositions(); + checkForPositions(name); } if (shingleFields.length == 0 || slop > 0 || hasGaps(stream) || numPos <= 1) { - return TextFieldMapper.createPhrasePrefixQuery(stream, name(), slop, maxExpansions, null, null); + return TextFieldMapper.createPhrasePrefixQuery(stream, name, slop, maxExpansions, null, null); } - final ShingleFieldType shingleField = shingleFieldForPositions(numPos); - stream = new FixedShingleFilter(stream, shingleField.shingleSize); + final MappedField shingleField = shingleFieldForPositions(numPos); + stream = new FixedShingleFilter(stream, ((ShingleFieldType) shingleField.type()).shingleSize); return shingleField.phrasePrefixQuery(stream, 0, maxExpansions, context); } @Override - public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) { - if (prefixField != null && prefixField.termLengthWithinBounds(value.length())) { - return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixField.name(), indexedValueForSearch(value))), name()); + public SpanQuery spanPrefixQuery( + String name, + String value, + SpanMultiTermQueryWrapper.SpanRewriteMethod method, + SearchExecutionContext context + ) { + if (prefixField == null || ((PrefixFieldType) prefixField.type()).termLengthWithinBounds(value.length())) { + return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixField.name(), indexedValueForSearch(name, value))), name); } else { SpanMultiTermQueryWrapper spanMulti = new SpanMultiTermQueryWrapper<>( - new PrefixQuery(new Term(name(), indexedValueForSearch(value))) + new PrefixQuery(new Term(name, indexedValueForSearch(name, value))) ); spanMulti.setRewriteMethod(method); return spanMulti; @@ -429,8 +450,12 @@ static final class PrefixFieldType extends StringFieldType { final int maxChars; final String parentField; + static MappedField newMappedField(String parentField, PrefixFieldType prefixFieldType) { + return new MappedField(parentField + PREFIX_FIELD_SUFFIX, prefixFieldType); + } + PrefixFieldType(String parentField, TextSearchInfo textSearchInfo, int minChars, int maxChars) { - super(parentField + PREFIX_FIELD_SUFFIX, true, false, false, textSearchInfo, Collections.emptyMap()); + super(true, false, false, textSearchInfo, Collections.emptyMap()); this.minChars = minChars; this.maxChars = maxChars; this.parentField = parentField; @@ -441,12 +466,13 @@ boolean termLengthWithinBounds(int length) { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return false; } @Override public Query prefixQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, @@ -454,9 +480,9 @@ public Query prefixQuery( ) { if (value.length() >= minChars) { if (caseInsensitive) { - return super.termQueryCaseInsensitive(value, context); + return super.termQueryCaseInsensitive(name, value, context); } - return super.termQuery(value, context); + return super.termQuery(name, value, context); } List automata = new ArrayList<>(); automata.add(Automata.makeString(value)); @@ -464,7 +490,7 @@ public Query prefixQuery( automata.add(Automata.makeAnyChar()); } Automaton automaton = Operations.concatenate(automata); - AutomatonQuery query = new AutomatonQuery(new Term(name(), value + "*"), automaton); + AutomatonQuery query = new AutomatonQuery(new Term(name, value + "*"), automaton); query.setRewriteMethod(method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) @@ -472,10 +498,10 @@ public Query prefixQuery( } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { // Because this internal field is modelled as a multi-field, SourceValueFetcher will look up its // parent field in _source. So we don't need to use the parent field name here. - return SourceValueFetcher.toString(name(), context, format); + return SourceValueFetcher.toString(name, context, format); } @Override @@ -493,8 +519,8 @@ static final class PrefixFieldMapper extends FieldMapper { final FieldType fieldType; - PrefixFieldMapper(FieldType fieldType, PrefixFieldType mappedFieldType) { - super(mappedFieldType.name(), mappedFieldType, MultiFields.empty(), CopyTo.empty()); + PrefixFieldMapper(FieldType fieldType, MappedField mappedField) { + super(mappedField.name(), mappedField, MultiFields.empty(), CopyTo.empty()); this.fieldType = fieldType; } @@ -532,8 +558,8 @@ static final class ShingleFieldMapper extends FieldMapper { private final FieldType fieldType; - ShingleFieldMapper(FieldType fieldType, ShingleFieldType mappedFieldtype) { - super(mappedFieldtype.name(), mappedFieldtype, MultiFields.empty(), CopyTo.empty()); + ShingleFieldMapper(FieldType fieldType, MappedField mappedField) { + super(mappedField.name(), mappedField, MultiFields.empty(), CopyTo.empty()); this.fieldType = fieldType; } @@ -567,27 +593,27 @@ protected String contentType() { */ static class ShingleFieldType extends StringFieldType { final int shingleSize; - PrefixFieldType prefixFieldType; + MappedField prefixField; - ShingleFieldType(String name, int shingleSize, TextSearchInfo textSearchInfo) { - super(name, true, false, false, textSearchInfo, Collections.emptyMap()); + ShingleFieldType(int shingleSize, TextSearchInfo textSearchInfo) { + super(true, false, false, textSearchInfo, Collections.emptyMap()); this.shingleSize = shingleSize; } - void setPrefixFieldType(PrefixFieldType prefixFieldType) { - this.prefixFieldType = prefixFieldType; + void setPrefixField(MappedField prefixField) { + this.prefixField = prefixField; } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return false; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { // Because this internal field is modelled as a multi-field, SourceValueFetcher will look up its // parent field in _source. So we don't need to use the parent field name here. - return SourceValueFetcher.toString(name(), context, format); + return SourceValueFetcher.toString(name, context, format); } @Override @@ -597,15 +623,16 @@ public String typeName() { @Override public Query prefixQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context ) { - if (prefixFieldType == null || prefixFieldType.termLengthWithinBounds(value.length()) == false) { - return super.prefixQuery(value, method, caseInsensitive, context); + if (prefixField == null || ((PrefixFieldType) prefixField.type()).termLengthWithinBounds(value.length()) == false) { + return super.prefixQuery(name, value, method, caseInsensitive, context); } else { - final Query query = prefixFieldType.prefixQuery(value, method, caseInsensitive, context); + final Query query = prefixField.prefixQuery(value, method, caseInsensitive, context); if (method == null || method == MultiTermQuery.CONSTANT_SCORE_REWRITE || method == MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE) { @@ -617,37 +644,53 @@ public Query prefixQuery( } @Override - public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) - throws IOException { - return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + public Query phraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) throws IOException { + return TextFieldMapper.createPhraseQuery(stream, name, slop, enablePositionIncrements); } @Override - public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) - throws IOException { - return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + public Query multiPhraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) throws IOException { + return TextFieldMapper.createPhraseQuery(stream, name, slop, enablePositionIncrements); } @Override - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) throws IOException { - final String prefixFieldName = slop > 0 ? null : prefixFieldType.name(); + public Query phrasePrefixQuery(String name, TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) + throws IOException { + final String prefixFieldName = slop > 0 ? null : prefixField.name(); return TextFieldMapper.createPhrasePrefixQuery( stream, - name(), + name, slop, maxExpansions, prefixFieldName, - prefixFieldType::termLengthWithinBounds + ((PrefixFieldType) prefixField.type())::termLengthWithinBounds ); } @Override - public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) { - if (prefixFieldType != null && prefixFieldType.termLengthWithinBounds(value.length())) { - return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixFieldType.name(), indexedValueForSearch(value))), name()); + public SpanQuery spanPrefixQuery( + String name, + String value, + SpanMultiTermQueryWrapper.SpanRewriteMethod method, + SearchExecutionContext context + ) { + if (prefixField != null && ((PrefixFieldType) prefixField.type()).termLengthWithinBounds(value.length())) { + return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixField.name(), indexedValueForSearch(name, value))), name); } else { SpanMultiTermQueryWrapper spanMulti = new SpanMultiTermQueryWrapper<>( - new PrefixQuery(new Term(name(), indexedValueForSearch(value))) + new PrefixQuery(new Term(name, indexedValueForSearch(name, value))) ); spanMulti.setRewriteMethod(method); return spanMulti; @@ -664,7 +707,7 @@ public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRew public SearchAsYouTypeFieldMapper( String simpleName, - SearchAsYouTypeFieldType mappedFieldType, + MappedField mappedField, CopyTo copyTo, Map indexAnalyzers, PrefixFieldMapper prefixField, @@ -672,7 +715,7 @@ public SearchAsYouTypeFieldMapper( MultiFields multiFields, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo, false, null); + super(simpleName, mappedField, multiFields, copyTo, false, null); this.prefixField = prefixField; this.shingleFields = shingleFields; this.maxShingleSize = builder.maxShingleSize.getValue(); @@ -696,15 +739,15 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio return; } - context.doc().add(new Field(fieldType().name(), value, fieldType().fieldType)); + context.doc().add(new Field(name(), value, fieldType().fieldType)); if (this.builder.index.get()) { for (ShingleFieldMapper subFieldMapper : shingleFields) { - context.doc().add(new Field(subFieldMapper.fieldType().name(), value, subFieldMapper.getLuceneFieldType())); + context.doc().add(new Field(subFieldMapper.name(), value, subFieldMapper.getLuceneFieldType())); } - context.doc().add(new Field(prefixField.fieldType().name(), value, prefixField.getLuceneFieldType())); + context.doc().add(new Field(prefixField.name(), value, prefixField.getLuceneFieldType())); } if (fieldType().fieldType.omitNorms()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java index f5c69329e3599..1cb9731cebacb 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.mapper.DocValueFetcher; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; @@ -81,48 +82,34 @@ public TokenCountFieldMapper build(MapperBuilderContext context) { throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't."); } MappedFieldType ft = new TokenCountFieldType( - context.buildFullName(name), index.getValue(), store.getValue(), hasDocValues.getValue(), nullValue.getValue(), meta.getValue() ); - return new TokenCountFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this); + return new TokenCountFieldMapper( + name, + new MappedField(context.buildFullName(name), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), + this + ); } } static class TokenCountFieldType extends NumberFieldMapper.NumberFieldType { - TokenCountFieldType( - String name, - boolean isSearchable, - boolean isStored, - boolean hasDocValues, - Number nullValue, - Map meta - ) { - super( - name, - NumberFieldMapper.NumberType.INTEGER, - isSearchable, - isStored, - hasDocValues, - false, - nullValue, - meta, - null, - false, - null - ); + TokenCountFieldType(boolean isSearchable, boolean isStored, boolean hasDocValues, Number nullValue, Map meta) { + super(NumberFieldMapper.NumberType.INTEGER, isSearchable, isStored, hasDocValues, false, nullValue, meta, null, false, null); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (hasDocValues() == false) { return (lookup, ignoredValues) -> List.of(); } - return new DocValueFetcher(docValueFormat(format, null), context.getForField(this)); + return new DocValueFetcher(docValueFormat(name, format, null), context.getForField(new MappedField(name, this))); } } @@ -135,14 +122,8 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) private final boolean enablePositionIncrements; private final Integer nullValue; - protected TokenCountFieldMapper( - String simpleName, - MappedFieldType defaultFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder - ) { - super(simpleName, defaultFieldType, multiFields, copyTo); + protected TokenCountFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo); this.analyzer = builder.analyzer.getValue(); this.enablePositionIncrements = builder.enablePositionIncrements.getValue(); this.nullValue = builder.nullValue.getValue(); @@ -166,7 +147,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio tokenCount = countPositions(analyzer, name(), value, enablePositionIncrements); } - NumberFieldMapper.NumberType.INTEGER.addFields(context.doc(), fieldType().name(), tokenCount, index, hasDocValues, store); + NumberFieldMapper.NumberType.INTEGER.addFields(context.doc(), name(), tokenCount, index, hasDocValues, store); } /** diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index 6a7270fe8f5bc..d39836a73fd2f 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; @@ -143,14 +144,14 @@ public void testDisabledSource() throws IOException { mapping.endObject().endObject(); MapperService mapperService = createMapperService(mapping); - MappedFieldType ft = mapperService.fieldType("foo"); + MappedField mappedField = mapperService.mappedField("foo"); SearchExecutionContext context = createSearchExecutionContext(mapperService); TokenStream ts = new CannedTokenStream(new Token("a", 0, 3), new Token("b", 4, 7)); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.phraseQuery(ts, 0, true, context)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mappedField.phraseQuery(ts, 0, true, context)); assertThat(e.getMessage(), Matchers.containsString("cannot run positional queries since [_source] is disabled")); // Term queries are ok - ft.termQuery("a", context); // no exception + mappedField.termQuery("a", context); // no exception } @Override diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java index 082ae9624779a..572906dfeb2a2 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldTypeTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.extras.MatchOnlyTextFieldMapper.MatchOnlyTextFieldType; import org.hamcrest.Matchers; @@ -42,29 +43,32 @@ public class MatchOnlyTextFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); - assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field", "foo"))), ft.termQuery("foo", null)); - assertEquals(AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo")), ft.termQueryCaseInsensitive("fOo", null)); + MappedFieldType ft = new MatchOnlyTextFieldType(); + assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field", "foo"))), ft.termQuery("field", "foo", null)); + assertEquals( + AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo")), + ft.termQueryCaseInsensitive("field", "fOo", null) + ); } public void testTermsQuery() { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); + MappedFieldType ft = new MatchOnlyTextFieldType(); List terms = new ArrayList<>(); terms.add(new BytesRef("foo")); terms.add(new BytesRef("bar")); - assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), null)); + assertEquals(new TermInSetQuery("field", terms), ft.termsQuery("field", Arrays.asList("foo", "bar"), null)); } public void testRangeQuery() { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); + MappedFieldType ft = new MatchOnlyTextFieldType(); assertEquals( new TermRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), - ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT) ); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[range] queries on [text] or [keyword] fields cannot be executed when " + "'search.allow_expensive_queries' is set to false.", @@ -73,26 +77,27 @@ public void testRangeQuery() { } public void testRegexpQuery() { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); - assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_CONTEXT)); + MappedFieldType ft = new MatchOnlyTextFieldType(); + assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("field", "foo.*", 0, 0, 10, null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.regexpQuery("foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.regexpQuery("field", "foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testFuzzyQuery() { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); + MappedFieldType ft = new MatchOnlyTextFieldType(); assertEquals( new ConstantScoreQuery(new FuzzyQuery(new Term("field", "foo"), 2, 1, 50, true)), - ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) + ft.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) ); ElasticsearchException ee = expectThrows( ElasticsearchException.class, () -> ft.fuzzyQuery( + "field", "foo", Fuzziness.AUTO, randomInt(10) + 1, @@ -105,11 +110,11 @@ public void testFuzzyQuery() { } public void testFetchSourceValue() throws IOException { - MatchOnlyTextFieldType fieldType = new MatchOnlyTextFieldType("field"); + MappedField mappedField = new MappedField("field", new MatchOnlyTextFieldType()); - assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); - assertEquals(List.of("42"), fetchSourceValue(fieldType, 42L)); - assertEquals(List.of("true"), fetchSourceValue(fieldType, true)); + assertEquals(List.of("value"), fetchSourceValue(mappedField, "value")); + assertEquals(List.of("42"), fetchSourceValue(mappedField, 42L)); + assertEquals(List.of("true"), fetchSourceValue(mappedField, true)); } private Query unwrapPositionalQuery(Query query) { @@ -119,18 +124,18 @@ private Query unwrapPositionalQuery(Query query) { } public void testPhraseQuery() throws IOException { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); + MappedFieldType ft = new MatchOnlyTextFieldType(); TokenStream ts = new CannedTokenStream(new Token("a", 0, 3), new Token("b", 4, 7)); - Query query = ft.phraseQuery(ts, 0, true, MOCK_CONTEXT); + Query query = ft.phraseQuery("field", ts, 0, true, MOCK_CONTEXT); Query delegate = unwrapPositionalQuery(query); assertEquals(new PhraseQuery("field", "a", "b"), delegate); assertNotEquals(new MatchAllDocsQuery(), SourceConfirmedTextQuery.approximate(delegate)); } public void testMultiPhraseQuery() throws IOException { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); + MappedFieldType ft = new MatchOnlyTextFieldType(); TokenStream ts = new CannedTokenStream(new Token("a", 0, 3), new Token("b", 0, 0, 3), new Token("c", 4, 7)); - Query query = ft.multiPhraseQuery(ts, 0, true, MOCK_CONTEXT); + Query query = ft.multiPhraseQuery("field", ts, 0, true, MOCK_CONTEXT); Query delegate = unwrapPositionalQuery(query); MultiPhraseQuery expected = new MultiPhraseQuery.Builder().add(new Term[] { new Term("field", "a"), new Term("field", "b") }) .add(new Term("field", "c")) @@ -140,9 +145,9 @@ public void testMultiPhraseQuery() throws IOException { } public void testPhrasePrefixQuery() throws IOException { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); + MappedFieldType ft = new MatchOnlyTextFieldType(); TokenStream ts = new CannedTokenStream(new Token("a", 0, 3), new Token("b", 0, 0, 3), new Token("c", 4, 7)); - Query query = ft.phrasePrefixQuery(ts, 0, 10, MOCK_CONTEXT); + Query query = ft.phrasePrefixQuery("field", ts, 0, 10, MOCK_CONTEXT); Query delegate = unwrapPositionalQuery(query); MultiPhrasePrefixQuery expected = new MultiPhrasePrefixQuery("field"); expected.add(new Term[] { new Term("field", "a"), new Term("field", "b") }); @@ -152,29 +157,29 @@ public void testPhrasePrefixQuery() throws IOException { } public void testTermIntervals() throws IOException { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); - IntervalsSource termIntervals = ft.termIntervals(new BytesRef("foo"), MOCK_CONTEXT); + MappedFieldType ft = new MatchOnlyTextFieldType(); + IntervalsSource termIntervals = ft.termIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertThat(termIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); assertEquals(Intervals.term(new BytesRef("foo")), ((SourceIntervalsSource) termIntervals).getIntervalsSource()); } public void testPrefixIntervals() throws IOException { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); - IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); + MappedFieldType ft = new MatchOnlyTextFieldType(); + IntervalsSource prefixIntervals = ft.prefixIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertThat(prefixIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); assertEquals(Intervals.prefix(new BytesRef("foo")), ((SourceIntervalsSource) prefixIntervals).getIntervalsSource()); } public void testWildcardIntervals() throws IOException { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); - IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); + MappedFieldType ft = new MatchOnlyTextFieldType(); + IntervalsSource wildcardIntervals = ft.wildcardIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertThat(wildcardIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); assertEquals(Intervals.wildcard(new BytesRef("foo")), ((SourceIntervalsSource) wildcardIntervals).getIntervalsSource()); } public void testFuzzyIntervals() throws IOException { - MappedFieldType ft = new MatchOnlyTextFieldType("field"); - IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); + MappedFieldType ft = new MatchOnlyTextFieldType(); + IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("field", "foo", 1, 2, true, MOCK_CONTEXT); assertThat(fuzzyIntervals, Matchers.instanceOf(SourceIntervalsSource.class)); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapperTests.java index b906803b04c8c..0c57269bc2988 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapperTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperTestCase; @@ -45,7 +46,7 @@ protected void registerParameters(ParameterChecker checker) throws IOException { } @Override - protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { + protected void assertExistsQuery(MappedField mappedField, Query query, LuceneDocument fields) { assertThat(query, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) query; assertEquals("_feature", termQuery.getTerm().field()); @@ -54,9 +55,9 @@ protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneD } @Override - protected void assertSearchable(MappedFieldType fieldType) { + protected void assertSearchable(MappedField mappedField) { // always searchable even if it uses TextSearchInfo.NONE - assertTrue(fieldType.isSearchable()); + assertTrue(mappedField.isSearchable()); } @Override diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java index 57c931992f5db..b39639c73fc3d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldTypeTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.extras; import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -19,12 +20,12 @@ public class RankFeatureFieldTypeTests extends FieldTypeTestCase { public void testIsNotAggregatable() { - MappedFieldType fieldType = new RankFeatureFieldMapper.RankFeatureFieldType("field", Collections.emptyMap(), true); - assertFalse(fieldType.isAggregatable()); + MappedFieldType fieldType = new RankFeatureFieldMapper.RankFeatureFieldType(Collections.emptyMap(), true); + assertFalse(fieldType.isAggregatable("field")); } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new RankFeatureFieldMapper.Builder("field").build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new RankFeatureFieldMapper.Builder("field").build(MapperBuilderContext.ROOT).field(); assertEquals(List.of(3.14f), fetchSourceValue(mapper, 3.14)); assertEquals(List.of(42.9f), fetchSourceValue(mapper, "42.9")); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldTypeTests.java index 48a31691ac1bc..4d0cc9a7b0eca 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeaturesFieldTypeTests.java @@ -16,7 +16,7 @@ public class RankFeaturesFieldTypeTests extends FieldTypeTestCase { public void testIsNotAggregatable() { - MappedFieldType fieldType = new RankFeaturesFieldMapper.RankFeaturesFieldType("field", Collections.emptyMap(), true); - assertFalse(fieldType.isAggregatable()); + MappedFieldType fieldType = new RankFeaturesFieldMapper.RankFeaturesFieldType(Collections.emptyMap(), true); + assertFalse(fieldType.isAggregatable("field")); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java index 52d524ca0ea02..818e592186024 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; @@ -294,8 +295,8 @@ public void testRejectIndexOptions() { public void testMetricType() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - ScaledFloatFieldMapper.ScaledFloatFieldType ft = (ScaledFloatFieldMapper.ScaledFloatFieldType) mapperService.fieldType("field"); - assertNull(ft.getMetricType()); + MappedField mappedField = mapperService.mappedField("field"); + assertNull(mappedField.getMetricType()); assertMetricType("gauge", ScaledFloatFieldMapper.ScaledFloatFieldType::getMetricType); assertMetricType("counter", ScaledFloatFieldMapper.ScaledFloatFieldType::getMetricType); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java index 7f1094280bbda..6f16752c1040e 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.fielddata.LeafNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -37,16 +38,16 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( - "scaled_float", - 0.1 + randomDouble() * 100 - ); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(0.1 + randomDouble() * 100); double value = (randomDouble() * 2 - 1) * 10000; long scaledValue = Math.round(value * ft.getScalingFactor()); - assertEquals(LongPoint.newExactQuery("scaled_float", scaledValue), ft.termQuery(value, MOCK_CONTEXT)); + assertEquals(LongPoint.newExactQuery("scaled_float", scaledValue), ft.termQuery("scaled_float", value, MOCK_CONTEXT)); - MappedFieldType ft2 = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 0.1 + randomDouble() * 100, false); - ElasticsearchException e2 = expectThrows(ElasticsearchException.class, () -> ft2.termQuery("42", MOCK_CONTEXT_DISALLOW_EXPENSIVE)); + MappedFieldType ft2 = new ScaledFloatFieldMapper.ScaledFloatFieldType(0.1 + randomDouble() * 100, false); + ElasticsearchException e2 = expectThrows( + ElasticsearchException.class, + () -> ft2.termQuery("scaled_float", "42", MOCK_CONTEXT_DISALLOW_EXPENSIVE) + ); assertEquals( "Cannot search on field [scaled_float] since it is not indexed and 'search.allow_expensive_queries' is set to false.", e2.getMessage() @@ -54,23 +55,20 @@ public void testTermQuery() { } public void testTermsQuery() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( - "scaled_float", - 0.1 + randomDouble() * 100 - ); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(0.1 + randomDouble() * 100); double value1 = (randomDouble() * 2 - 1) * 10000; long scaledValue1 = Math.round(value1 * ft.getScalingFactor()); double value2 = (randomDouble() * 2 - 1) * 10000; long scaledValue2 = Math.round(value2 * ft.getScalingFactor()); assertEquals( LongPoint.newSetQuery("scaled_float", scaledValue1, scaledValue2), - ft.termsQuery(Arrays.asList(value1, value2), MOCK_CONTEXT) + ft.termsQuery("scaled_float", Arrays.asList(value1, value2), MOCK_CONTEXT) ); - MappedFieldType ft2 = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 0.1 + randomDouble() * 100, false); + MappedFieldType ft2 = new ScaledFloatFieldMapper.ScaledFloatFieldType(0.1 + randomDouble() * 100, false); ElasticsearchException e2 = expectThrows( ElasticsearchException.class, - () -> ft2.termsQuery(Arrays.asList(value1, value2), MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft2.termsQuery("scaled_float", Arrays.asList(value1, value2), MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "Cannot search on field [scaled_float] since it is not indexed and 'search.allow_expensive_queries' is set to false.", @@ -83,7 +81,6 @@ public void testRangeQuery() throws IOException { // this test checks that searching scaled floats yields the same results as // searching doubles that are rounded to the closest half float ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( - "scaled_float", randomBoolean(), false, true, @@ -125,51 +122,48 @@ public void testRangeQuery() throws IOException { MOCK_CONTEXT, randomBoolean() ); - Query scaledFloatQ = ft.rangeQuery(l, u, includeLower, includeUpper, MOCK_CONTEXT); + Query scaledFloatQ = ft.rangeQuery("scaled_float", l, u, includeLower, includeUpper, MOCK_CONTEXT); assertEquals(searcher.count(doubleQ), searcher.count(scaledFloatQ)); } IOUtils.close(reader, dir); } public void testRoundsUpperBoundCorrectly() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); - Query scaledFloatQ = ft.rangeQuery(null, 0.1, true, false, MOCK_CONTEXT); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(100); + Query scaledFloatQ = ft.rangeQuery("scaled_float", null, 0.1, true, false, MOCK_CONTEXT); assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(null, 0.1, true, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", null, 0.1, true, true, MOCK_CONTEXT); assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(null, 0.095, true, false, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", null, 0.095, true, false, MOCK_CONTEXT); assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(null, 0.095, true, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", null, 0.095, true, true, MOCK_CONTEXT); assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(null, 0.105, true, false, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", null, 0.105, true, false, MOCK_CONTEXT); assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(null, 0.105, true, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", null, 0.105, true, true, MOCK_CONTEXT); assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(null, 79.99, true, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", null, 79.99, true, true, MOCK_CONTEXT); assertEquals("scaled_float:[-9223372036854775808 TO 7999]", scaledFloatQ.toString()); } public void testRoundsLowerBoundCorrectly() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); - Query scaledFloatQ = ft.rangeQuery(-0.1, null, false, true, MOCK_CONTEXT); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(100); + Query scaledFloatQ = ft.rangeQuery("scaled_float", -0.1, null, false, true, MOCK_CONTEXT); assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(-0.1, null, true, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", -0.1, null, true, true, MOCK_CONTEXT); assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(-0.095, null, false, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", -0.095, null, false, true, MOCK_CONTEXT); assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(-0.095, null, true, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", -0.095, null, true, true, MOCK_CONTEXT); assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(-0.105, null, false, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", -0.105, null, false, true, MOCK_CONTEXT); assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); - scaledFloatQ = ft.rangeQuery(-0.105, null, true, true, MOCK_CONTEXT); + scaledFloatQ = ft.rangeQuery("scaled_float", -0.105, null, true, true, MOCK_CONTEXT); assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); } public void testValueForSearch() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( - "scaled_float", - 0.1 + randomDouble() * 100 - ); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(0.1 + randomDouble() * 100); assertNull(ft.valueForDisplay(null)); assertEquals(10 / ft.getScalingFactor(), ft.valueForDisplay(10L)); } @@ -185,11 +179,9 @@ public void testFieldData() throws IOException { w.addDocument(doc); try (DirectoryReader reader = DirectoryReader.open(w)) { // single-valued - ScaledFloatFieldMapper.ScaledFloatFieldType f1 = new ScaledFloatFieldMapper.ScaledFloatFieldType( - "scaled_float1", - scalingFactor - ); + ScaledFloatFieldMapper.ScaledFloatFieldType f1 = new ScaledFloatFieldMapper.ScaledFloatFieldType(scalingFactor); IndexNumericFieldData fielddata = (IndexNumericFieldData) f1.fielddataBuilder( + "scaled_float1", "index", () -> { throw new UnsupportedOperationException(); } ).build(null, null); @@ -201,12 +193,12 @@ public void testFieldData() throws IOException { assertEquals(10 / f1.getScalingFactor(), values.nextValue(), 10e-5); // multi-valued - ScaledFloatFieldMapper.ScaledFloatFieldType f2 = new ScaledFloatFieldMapper.ScaledFloatFieldType( + ScaledFloatFieldMapper.ScaledFloatFieldType f2 = new ScaledFloatFieldMapper.ScaledFloatFieldType(scalingFactor); + fielddata = (IndexNumericFieldData) f2.fielddataBuilder( "scaled_float2", - scalingFactor - ); - fielddata = (IndexNumericFieldData) f2.fielddataBuilder("index", () -> { throw new UnsupportedOperationException(); }) - .build(null, null); + "index", + () -> { throw new UnsupportedOperationException(); } + ).build(null, null); leafFieldData = fielddata.load(reader.leaves().get(0)); values = leafFieldData.getDoubleValues(); assertTrue(values.advanceExact(0)); @@ -218,17 +210,17 @@ public void testFieldData() throws IOException { } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new ScaledFloatFieldMapper.Builder("field", false, false).scalingFactor(100) + MappedField mapper = new ScaledFloatFieldMapper.Builder("field", false, false).scalingFactor(100) .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); assertEquals(List.of(3.14), fetchSourceValue(mapper, 3.1415926)); assertEquals(List.of(3.14), fetchSourceValue(mapper, "3.1415")); assertEquals(List.of(), fetchSourceValue(mapper, "")); - MappedFieldType nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false).scalingFactor(100) + MappedField nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false).scalingFactor(100) .nullValue(2.71) .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); assertEquals(List.of(2.71), fetchSourceValue(nullValueMapper, "")); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java index f63c8784f4c06..0b09851f81955 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; @@ -178,14 +179,14 @@ public void testDefaultConfiguration() throws IOException { rootMapper.indexAnalyzers(), 2, "default", - prefixFieldMapper.fieldType() + prefixFieldMapper.field() ); assertShingleFieldType( getShingleFieldMapper(defaultMapper, "field._3gram"), rootMapper.indexAnalyzers(), 3, "default", - prefixFieldMapper.fieldType() + prefixFieldMapper.field() ); } @@ -209,21 +210,21 @@ public void testConfiguration() throws IOException { rootMapper.indexAnalyzers(), 2, analyzerName, - prefixFieldMapper.fieldType() + prefixFieldMapper.field() ); assertShingleFieldType( getShingleFieldMapper(defaultMapper, "field._3gram"), rootMapper.indexAnalyzers(), 3, analyzerName, - prefixFieldMapper.fieldType() + prefixFieldMapper.field() ); assertShingleFieldType( getShingleFieldMapper(defaultMapper, "field._4gram"), rootMapper.indexAnalyzers(), 4, analyzerName, - prefixFieldMapper.fieldType() + prefixFieldMapper.field() ); } @@ -280,22 +281,23 @@ private void assertMultiField(int shingleSize, MapperService mapperService, Stri List fields = new ArrayList<>(); fields.add(suggestPath); fields.add(textPath); - MappedFieldType fieldType = mapperService.fieldType(suggestPath + "._index_prefix"); - assertThat(fieldType, instanceOf(PrefixFieldType.class)); - PrefixFieldType prefixFieldType = (PrefixFieldType) fieldType; + MappedField mappedField = mapperService.mappedField(suggestPath + "._index_prefix"); + assertThat(mappedField.type(), instanceOf(PrefixFieldType.class)); + MappedField prefixField = mappedField; + PrefixFieldType prefixFieldType = (PrefixFieldType) mappedField.type(); assertEquals(suggestPath, prefixFieldType.parentField); for (int i = 2; i < shingleSize; i++) { String name = suggestPath + "._" + i + "gram"; fields.add(name); - fieldType = mapperService.fieldType(name); - assertThat(fieldType, instanceOf(ShingleFieldType.class)); - ShingleFieldType ft = (ShingleFieldType) fieldType; + mappedField = mapperService.mappedField(name); + assertThat(mappedField.type(), instanceOf(ShingleFieldType.class)); + ShingleFieldType ft = (ShingleFieldType) mappedField.type(); assertEquals(i, ft.shingleSize); - assertSame(prefixFieldType, ft.prefixFieldType); + assertSame(prefixField, ft.prefixField); } - MappedFieldType textFieldType = mapperService.fieldType(textPath); - assertThat(textFieldType, instanceOf(TextFieldMapper.TextFieldType.class)); + MappedField textField = mapperService.mappedField(textPath); + assertThat(textField.type(), instanceOf(TextFieldMapper.TextFieldType.class)); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "new york city"))); for (String field : fields) { @@ -681,7 +683,7 @@ private static void assertRootFieldMapper(SearchAsYouTypeFieldMapper mapper, int assertThat(mapper.maxShingleSize(), equalTo(maxShingleSize)); assertThat(mapper.fieldType(), notNullValue()); - assertSearchAsYouTypeFieldType(mapper, mapper.fieldType(), maxShingleSize, analyzerName, mapper.prefixField().fieldType()); + assertSearchAsYouTypeFieldType(mapper, mapper.field(), maxShingleSize, analyzerName, mapper.prefixField().field()); assertThat(mapper.prefixField(), notNullValue()); assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.name())); @@ -690,13 +692,7 @@ private static void assertRootFieldMapper(SearchAsYouTypeFieldMapper mapper, int for (int shingleSize = 2; shingleSize <= maxShingleSize; shingleSize++) { final ShingleFieldMapper shingleFieldMapper = mapper.shingleFields()[shingleSize - 2]; assertThat(shingleFieldMapper, notNullValue()); - assertShingleFieldType( - shingleFieldMapper, - mapper.indexAnalyzers(), - shingleSize, - analyzerName, - mapper.prefixField().fieldType() - ); + assertShingleFieldType(shingleFieldMapper, mapper.indexAnalyzers(), shingleSize, analyzerName, mapper.prefixField().field()); } final int numberOfShingleSubfields = (maxShingleSize - 2) + 1; @@ -705,23 +701,23 @@ private static void assertRootFieldMapper(SearchAsYouTypeFieldMapper mapper, int private static void assertSearchAsYouTypeFieldType( SearchAsYouTypeFieldMapper mapper, - SearchAsYouTypeFieldType fieldType, + MappedField mappedField, int maxShingleSize, String analyzerName, - PrefixFieldType prefixFieldType + MappedField prefixField ) { - + SearchAsYouTypeFieldType fieldType = (SearchAsYouTypeFieldType) mappedField.type(); assertThat(fieldType.shingleFields.length, equalTo(maxShingleSize - 1)); - NamedAnalyzer indexAnalyzer = mapper.indexAnalyzers().get(fieldType.name()); - for (NamedAnalyzer analyzer : asList(indexAnalyzer, fieldType.getTextSearchInfo().searchAnalyzer())) { + NamedAnalyzer indexAnalyzer = mapper.indexAnalyzers().get(mappedField.name()); + for (NamedAnalyzer analyzer : asList(indexAnalyzer, mappedField.getTextSearchInfo().searchAnalyzer())) { assertThat(analyzer.name(), equalTo(analyzerName)); } int shingleSize = 2; for (ShingleFieldMapper shingleField : mapper.shingleFields()) { - assertShingleFieldType(shingleField, mapper.indexAnalyzers(), shingleSize++, analyzerName, prefixFieldType); + assertShingleFieldType(shingleField, mapper.indexAnalyzers(), shingleSize++, analyzerName, prefixField); } - assertThat(fieldType.prefixField, equalTo(prefixFieldType)); + assertThat(fieldType.prefixField, equalTo(prefixField)); } private static void assertShingleFieldType( @@ -729,13 +725,14 @@ private static void assertShingleFieldType( Map indexAnalyzers, int shingleSize, String analyzerName, - PrefixFieldType prefixFieldType + MappedField prefixField ) { - ShingleFieldType fieldType = mapper.fieldType(); + MappedField mappedField = mapper.field(); + ShingleFieldType fieldType = (ShingleFieldType) mappedField.type(); assertThat(fieldType.shingleSize, equalTo(shingleSize)); - for (NamedAnalyzer analyzer : asList(indexAnalyzers.get(fieldType.name()), fieldType.getTextSearchInfo().searchAnalyzer())) { + for (NamedAnalyzer analyzer : asList(indexAnalyzers.get(mappedField.name()), mappedField.getTextSearchInfo().searchAnalyzer())) { assertThat(analyzer.name(), equalTo(analyzerName)); if (shingleSize > 1) { final SearchAsYouTypeAnalyzer wrappedAnalyzer = (SearchAsYouTypeAnalyzer) analyzer.analyzer(); @@ -744,7 +741,7 @@ private static void assertShingleFieldType( } } - assertThat(fieldType.prefixFieldType, equalTo(prefixFieldType)); + assertThat(fieldType.prefixField, equalTo(prefixField)); } @@ -754,14 +751,14 @@ private static void assertPrefixFieldType( int shingleSize, String analyzerName ) { - PrefixFieldType fieldType = mapper.fieldType(); - NamedAnalyzer indexAnalyzer = indexAnalyzers.get(fieldType.name()); - for (NamedAnalyzer analyzer : asList(indexAnalyzer, fieldType.getTextSearchInfo().searchAnalyzer())) { + MappedField mappedField = mapper.field(); + NamedAnalyzer indexAnalyzer = indexAnalyzers.get(mappedField.name()); + for (NamedAnalyzer analyzer : asList(indexAnalyzer, mappedField.getTextSearchInfo().searchAnalyzer())) { assertThat(analyzer.name(), equalTo(analyzerName)); } final SearchAsYouTypeAnalyzer wrappedIndexAnalyzer = (SearchAsYouTypeAnalyzer) indexAnalyzer.analyzer(); - final SearchAsYouTypeAnalyzer wrappedSearchAnalyzer = (SearchAsYouTypeAnalyzer) fieldType.getTextSearchInfo() + final SearchAsYouTypeAnalyzer wrappedSearchAnalyzer = (SearchAsYouTypeAnalyzer) mappedField.getTextSearchInfo() .searchAnalyzer() .analyzer(); for (SearchAsYouTypeAnalyzer analyzer : asList(wrappedIndexAnalyzer, wrappedSearchAnalyzer)) { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldTypeTests.java index 5ad01f1922e34..26d46e12c9fb5 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldTypeTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.extras.SearchAsYouTypeFieldMapper.Defaults; @@ -51,32 +52,35 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase { private static SearchAsYouTypeFieldType createFieldType() { final SearchAsYouTypeFieldType fieldType = new SearchAsYouTypeFieldType( - NAME, SEARCHABLE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER, Collections.emptyMap() ); - fieldType.setPrefixField(new PrefixFieldType(NAME, TextSearchInfo.SIMPLE_MATCH_ONLY, Defaults.MIN_GRAM, Defaults.MAX_GRAM)); - fieldType.setShingleFields(new ShingleFieldType[] { new ShingleFieldType(fieldType.name(), 2, TextSearchInfo.SIMPLE_MATCH_ONLY) }); + fieldType.setPrefixField( + PrefixFieldType.newMappedField( + NAME, + new PrefixFieldType(NAME, TextSearchInfo.SIMPLE_MATCH_ONLY, Defaults.MIN_GRAM, Defaults.MAX_GRAM) + ) + ); + fieldType.setShingleFields(new MappedField[] { new MappedField(NAME, new ShingleFieldType(2, TextSearchInfo.SIMPLE_MATCH_ONLY)) }); return fieldType; } public void testTermQuery() { final MappedFieldType fieldType = createFieldType(); - assertThat(fieldType.termQuery("foo", null), equalTo(new TermQuery(new Term(NAME, "foo")))); + assertThat(fieldType.termQuery(NAME, "foo", null), equalTo(new TermQuery(new Term(NAME, "foo")))); SearchAsYouTypeFieldType unsearchable = new SearchAsYouTypeFieldType( - NAME, UNSEARCHABLE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER, Collections.emptyMap() ); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("foo", null)); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery(NAME, "foo", null)); assertThat(e.getMessage(), equalTo("Cannot search on field [" + NAME + "] since it is not indexed.")); } @@ -84,12 +88,11 @@ public void testTermsQuery() { final MappedFieldType fieldType = createFieldType(); assertThat( - fieldType.termsQuery(asList("foo", "bar"), null), + fieldType.termsQuery(NAME, asList("foo", "bar"), null), equalTo(new TermInSetQuery(NAME, asList(new BytesRef("foo"), new BytesRef("bar")))) ); SearchAsYouTypeFieldType unsearchable = new SearchAsYouTypeFieldType( - NAME, UNSEARCHABLE, null, Lucene.STANDARD_ANALYZER, @@ -98,7 +101,7 @@ public void testTermsQuery() { ); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.termsQuery(asList("foo", "bar"), null) + () -> unsearchable.termsQuery(NAME, asList("foo", "bar"), null) ); assertThat(e.getMessage(), equalTo("Cannot search on field [" + NAME + "] since it is not indexed.")); } @@ -109,7 +112,7 @@ public void testPrefixQuery() { // this term should be a length that can be rewriteable to a term query on the prefix field final String withinBoundsTerm = "foo"; assertThat( - fieldType.prefixQuery(withinBoundsTerm, CONSTANT_SCORE_REWRITE, randomMockContext()), + fieldType.prefixQuery(NAME, withinBoundsTerm, CONSTANT_SCORE_REWRITE, randomMockContext()), equalTo(new ConstantScoreQuery(new TermQuery(new Term(NAME + "._index_prefix", withinBoundsTerm)))) ); @@ -118,13 +121,13 @@ public void testPrefixQuery() { // this term should be too long to be rewriteable to a term query on the prefix field final String longTerm = "toolongforourprefixfieldthistermis"; assertThat( - fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_CONTEXT), + fieldType.prefixQuery(NAME, longTerm, CONSTANT_SCORE_REWRITE, MOCK_CONTEXT), equalTo(new PrefixQuery(new Term(NAME, longTerm))) ); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> fieldType.prefixQuery(NAME, longTerm, CONSTANT_SCORE_REWRITE, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " @@ -134,26 +137,23 @@ public void testPrefixQuery() { } public void testFetchSourceValue() throws IOException { - SearchAsYouTypeFieldType fieldType = createFieldType(); + MappedField mappedField = new MappedField(NAME, createFieldType()); - assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); - assertEquals(List.of("42"), fetchSourceValue(fieldType, 42L)); - assertEquals(List.of("true"), fetchSourceValue(fieldType, true)); + assertEquals(List.of("value"), fetchSourceValue(mappedField, "value")); + assertEquals(List.of("42"), fetchSourceValue(mappedField, 42L)); + assertEquals(List.of("true"), fetchSourceValue(mappedField, true)); - SearchAsYouTypeFieldMapper.PrefixFieldType prefixFieldType = new SearchAsYouTypeFieldMapper.PrefixFieldType( - fieldType.name(), - fieldType.getTextSearchInfo(), - 2, - 10 + MappedField prefixField = PrefixFieldType.newMappedField( + mappedField.name(), + new SearchAsYouTypeFieldMapper.PrefixFieldType(mappedField.name(), mappedField.getTextSearchInfo(), 2, 10) ); - assertEquals(List.of("value"), fetchSourceValue(prefixFieldType, "value")); - assertEquals(List.of("42"), fetchSourceValue(prefixFieldType, 42L)); - assertEquals(List.of("true"), fetchSourceValue(prefixFieldType, true)); - - SearchAsYouTypeFieldMapper.ShingleFieldType shingleFieldType = new SearchAsYouTypeFieldMapper.ShingleFieldType( - fieldType.name(), - 5, - fieldType.getTextSearchInfo() + assertEquals(List.of("value"), fetchSourceValue(prefixField, "value")); + assertEquals(List.of("42"), fetchSourceValue(prefixField, 42L)); + assertEquals(List.of("true"), fetchSourceValue(prefixField, true)); + + MappedField shingleFieldType = new MappedField( + mappedField.name(), + new SearchAsYouTypeFieldMapper.ShingleFieldType(5, mappedField.getTextSearchInfo()) ); assertEquals(List.of("value"), fetchSourceValue(shingleFieldType, "value")); assertEquals(List.of("42"), fetchSourceValue(shingleFieldType, 42L)); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java index 1594e4e817b96..f9c9c309aea8b 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.join.mapper.Joiner; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -112,8 +112,8 @@ protected ValuesSourceConfig resolveConfig(AggregationContext context) { parentFilter = joiner.parentFilter(childType); childFilter = joiner.filter(childType); - MappedFieldType fieldType = context.getFieldType(joiner.parentJoinField(childType)); - config = ValuesSourceConfig.resolveFieldOnly(fieldType, context); + MappedField mappedField = context.getMappedField(joiner.parentJoinField(childType)); + config = ValuesSourceConfig.resolveFieldOnly(mappedField, context); return config; } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java index 1b6cca403abad..5d8716b3f445d 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentAggregationBuilder.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.join.mapper.Joiner; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -106,8 +106,8 @@ protected ValuesSourceConfig resolveConfig(AggregationContext context) { if (joiner != null && joiner.childTypeExists(childType)) { parentFilter = joiner.parentFilter(childType); childFilter = joiner.filter(childType); - MappedFieldType fieldType = context.getFieldType(joiner.parentJoinField(childType)); - config = ValuesSourceConfig.resolveFieldOnly(fieldType, context); + MappedField mappedField = context.getMappedField(joiner.parentJoinField(childType)); + config = ValuesSourceConfig.resolveFieldOnly(mappedField, context); } else { // unmapped case config = ValuesSourceConfig.resolveUnmapped(defaultValueSourceType(), context); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/Joiner.java b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/Joiner.java index 9a4911fb8c4d0..c5a2168f3351e 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/Joiner.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/Joiner.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.join.mapper.ParentJoinFieldMapper.JoinFieldType; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -37,21 +37,23 @@ public final class Joiner { * Get the Joiner for this context, or {@code null} if none is configured */ public static Joiner getJoiner(SearchExecutionContext context) { - return getJoiner(context.getMatchingFieldNames("*").stream().map(context::getFieldType)); + return getJoiner(context.getMatchingFieldNames("*").stream().map(context::getMappedField)); } /** * Get the Joiner for this context, or {@code null} if none is configured */ public static Joiner getJoiner(AggregationContext context) { - return getJoiner(context.getMatchingFieldNames("*").stream().map(context::getFieldType)); + return getJoiner(context.getMatchingFieldNames("*").stream().map(context::getMappedField)); } /** * Get the Joiner for this context, or {@code null} if none is configured */ - static Joiner getJoiner(Stream fieldTypes) { - Optional joinType = fieldTypes.filter(ft -> ft instanceof JoinFieldType).map(ft -> (JoinFieldType) ft).findFirst(); + static Joiner getJoiner(Stream mappedFields) { + Optional joinType = mappedFields.filter(ft -> ft.type() instanceof JoinFieldType) + .map(ft -> (JoinFieldType) ft.type()) + .findFirst(); return joinType.map(JoinFieldType::getJoiner).orElse(null); } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java index be7bc800cc3e2..4915a8db2dba9 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentIdFieldMapper.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.ValueFetcher; @@ -56,8 +57,8 @@ public static final class ParentIdFieldType extends StringFieldType { private final boolean eagerGlobalOrdinals; - public ParentIdFieldType(String name, boolean eagerGlobalOrdinals) { - super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + public ParentIdFieldType(boolean eagerGlobalOrdinals) { + super(true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); this.eagerGlobalOrdinals = eagerGlobalOrdinals; } @@ -72,10 +73,10 @@ public boolean eagerGlobalOrdinals() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); return new SortedSetOrdinalsIndexFieldData.Builder( - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new DelegateDocValuesField( new ScriptDocValues.Strings(new ScriptDocValues.StringsSupplier(FieldData.toString(dv))), @@ -85,7 +86,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { // Although this is an internal field, we return it in the list of all field types. So we // provide an empty value fetcher here instead of throwing an error. return (lookup, ignoredValues) -> List.of(); @@ -102,12 +103,12 @@ public Object valueForDisplay(Object value) { } protected ParentIdFieldMapper(String name, boolean eagerGlobalOrdinals) { - super(name, new ParentIdFieldType(name, eagerGlobalOrdinals), MultiFields.empty(), CopyTo.empty(), false, null); + super(name, new MappedField(name, new ParentIdFieldType(eagerGlobalOrdinals)), MultiFields.empty(), CopyTo.empty(), false, null); } @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override @@ -117,9 +118,9 @@ protected void parseCreateField(DocumentParserContext context) { public void indexValue(DocumentParserContext context, String refId) { BytesRef binaryValue = new BytesRef(refId); - Field field = new Field(fieldType().name(), binaryValue, Defaults.FIELD_TYPE); + Field field = new Field(name(), binaryValue, Defaults.FIELD_TYPE); context.doc().add(field); - context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue)); + context.doc().add(new SortedDocValuesField(name(), binaryValue)); } @Override diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java index 9ac5ee4938f83..fa51d17843ad5 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/mapper/ParentJoinFieldMapper.java @@ -22,7 +22,7 @@ import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MappingLookup; @@ -135,7 +135,7 @@ public ParentJoinFieldMapper build(MapperBuilderContext context) { Joiner joiner = new Joiner(name(), relations.get()); return new ParentJoinFieldMapper( name, - new JoinFieldType(context.buildFullName(name), joiner, meta.get()), + new MappedField(context.buildFullName(name), new JoinFieldType(joiner, meta.get())), Collections.unmodifiableMap(parentIdFields), eagerGlobalOrdinals.get(), relations.get() @@ -152,8 +152,8 @@ public static final class JoinFieldType extends StringFieldType { private final Joiner joiner; - private JoinFieldType(String name, Joiner joiner, Map meta) { - super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); + private JoinFieldType(Joiner joiner, Map meta) { + super(true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); this.joiner = joiner; } @@ -167,9 +167,9 @@ public String typeName() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { return new SortedSetOrdinalsIndexFieldData.Builder( - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new DelegateDocValuesField( new ScriptDocValues.Strings(new ScriptDocValues.StringsSupplier(FieldData.toString(dv))), @@ -179,11 +179,11 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return SourceValueFetcher.identity(name(), context, format); + return SourceValueFetcher.identity(name, context, format); } @Override @@ -208,12 +208,12 @@ private static boolean checkRelationsConflicts(List previous, List parentIdFields, boolean eagerGlobalOrdinals, List relations ) { - super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty(), false, null); + super(simpleName, mappedField, MultiFields.empty(), CopyTo.empty(), false, null); this.parentIdFields = parentIdFields; this.eagerGlobalOrdinals = eagerGlobalOrdinals; this.relations = relations; @@ -221,7 +221,7 @@ protected ParentJoinFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override @@ -304,9 +304,9 @@ public void parse(DocumentParserContext context) throws IOException { } BytesRef binaryValue = new BytesRef(name); - Field field = new Field(fieldType().name(), binaryValue, Defaults.FIELD_TYPE); + Field field = new Field(name(), binaryValue, Defaults.FIELD_TYPE); context.doc().add(field); - context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue)); + context.doc().add(new SortedDocValuesField(name(), binaryValue)); context.path().remove(); } @@ -334,9 +334,10 @@ public FieldMapper.Builder getMergeBuilder() { protected void doValidate(MappingLookup mappingLookup) { List joinFields = mappingLookup.getMatchingFieldNames("*") .stream() - .map(mappingLookup::getFieldType) - .filter(ft -> ft instanceof JoinFieldType) - .map(MappedFieldType::name) + .map(mappingLookup::getMappedField) + .filter(Objects::nonNull) + .filter(mappedField -> mappedField.type() instanceof JoinFieldType) + .map(MappedField::name) .collect(Collectors.toList()); if (joinFields.size() > 1) { throw new IllegalArgumentException("Only one [parent-join] field can be defined per index, got " + joinFields); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index d877db6a7ff3c..cfefa043bc750 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.InnerHitContextBuilder; @@ -343,8 +343,8 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { Query parentFilter = joiner.parentFilter(type); Query childFilter = joiner.filter(type); Query filteredQuery = Queries.filtered(query.toQuery(context), childFilter); - MappedFieldType ft = context.getFieldType(parentJoinField); - final SortedSetOrdinalsIndexFieldData fieldData = context.getForField(ft); + MappedField mappedField = context.getMappedField(parentJoinField); + final SortedSetOrdinalsIndexFieldData fieldData = context.getForField(mappedField); return new LateParsingQuery( parentFilter, filteredQuery, diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java index 64c3e627df75f..b0b21ac179360 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.InnerHitContextBuilder; @@ -180,14 +180,14 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { Query parentFilter = joiner.filter(parentType); Query innerQuery = Queries.filtered(query.toQuery(context), parentFilter); Query childFilter = joiner.childrenFilter(parentType); - MappedFieldType fieldType = context.getFieldType(joiner.childJoinField(parentType)); - final SortedSetOrdinalsIndexFieldData fieldData = context.getForField(fieldType); + MappedField mappedField = context.getMappedField(joiner.childJoinField(parentType)); + final SortedSetOrdinalsIndexFieldData fieldData = context.getForField(mappedField); return new HasChildQueryBuilder.LateParsingQuery( childFilter, innerQuery, HasChildQueryBuilder.DEFAULT_MIN_CHILDREN, HasChildQueryBuilder.DEFAULT_MAX_CHILDREN, - fieldType.name(), + mappedField.name(), score ? ScoreMode.Max : ScoreMode.None, fieldData, context.getSearchSimilarity() diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 6a644cdcdc5ca..0255289682d7d 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -118,7 +118,7 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { if (parentId == null) { return new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN); } - q = this.context.getSearchExecutionContext().getFieldType(IdFieldMapper.NAME).termQuery(parentId, context); + q = this.context.getSearchExecutionContext().getMappedField(IdFieldMapper.NAME).termQuery(parentId, context); } Weight weight = this.context.searcher().createWeight(this.context.searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f); diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 00028af11a9eb..f9905337e530a 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.Uid; @@ -267,8 +267,8 @@ private void testCase(Query query, IndexSearcher indexSearcher, Consumer getSearchPlugins() { return Collections.singletonList(new ParentJoinPlugin()); } - static MappedFieldType[] withJoinFields(MappedFieldType... fieldTypes) { - MappedFieldType[] result = new MappedFieldType[fieldTypes.length + 2]; - System.arraycopy(fieldTypes, 0, result, 0, fieldTypes.length); + static MappedField[] withJoinFields(MappedField... mappedFields) { + MappedField[] result = new MappedField[mappedFields.length + 2]; + System.arraycopy(mappedFields, 0, result, 0, mappedFields.length); - int i = fieldTypes.length; + int i = mappedFields.length; result[i++] = new ParentJoinFieldMapper.Builder("join_field").addRelation(PARENT_TYPE, Collections.singleton(CHILD_TYPE)) .build(MapperBuilderContext.ROOT) - .fieldType(); - result[i++] = new ParentIdFieldMapper.ParentIdFieldType("join_field#" + PARENT_TYPE, false); + .field(); + result[i++] = new MappedField("join_field#" + PARENT_TYPE, new ParentIdFieldMapper.ParentIdFieldType(false)); assert i == result.length; return result; } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index df961f90f131e..c1297d8dc57f7 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.shard.ShardId; @@ -113,7 +113,7 @@ public void testParentChild() throws IOException { } public void testParentChildAsSubAgg() throws IOException { - MappedFieldType kwd = new KeywordFieldMapper.KeywordFieldType("kwd", randomBoolean(), true, Collections.emptyMap()); + MappedField kwd = new MappedField("kwd", new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap())); try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -220,8 +220,8 @@ private void testCase(Query query, IndexSearcher indexSearcher, Consumer parentValue = Map.of("relation", "parent"); - assertEquals(List.of(parentValue), fetchSourceValue(fieldType, parentValue)); + assertEquals(List.of(parentValue), fetchSourceValue(mappedField, parentValue)); Map childValue = Map.of("relation", "child", "parent", "1"); - assertEquals(List.of(childValue), fetchSourceValue(fieldType, childValue)); + assertEquals(List.of(childValue), fetchSourceValue(mappedField, childValue)); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> fetchSourceValue(fieldType, parentValue, "format")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> fetchSourceValue(mappedField, parentValue, "format") + ); assertEquals("Field [field] of type [join] doesn't support formats.", e.getMessage()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentIdFieldTypeTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentIdFieldTypeTests.java index 8771aecca0909..d7c6c04418a8c 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentIdFieldTypeTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentIdFieldTypeTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.join.mapper; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import java.io.IOException; import java.util.List; @@ -19,12 +19,12 @@ public class ParentIdFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { // The parent join ID is an internal field type and we don't return any values for it. - MappedFieldType fieldType = new ParentIdFieldMapper.ParentIdFieldType("field#parent", true); + MappedField mappedField = new ParentIdFieldMapper("field#parent", true).field(); Map parentValue = Map.of("relation", "parent"); - assertEquals(List.of(), fetchSourceValue(fieldType, parentValue)); + assertEquals(List.of(), fetchSourceValue(mappedField, parentValue)); Map childValue = Map.of("relation", "child", "parent", "1"); - assertEquals(List.of(), fetchSourceValue(fieldType, childValue)); + assertEquals(List.of(), fetchSourceValue(mappedField, childValue)); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java index cb296f5ecbf3b..81630cafbe6ea 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java @@ -48,7 +48,7 @@ public void testSingleLevel() throws Exception { })); DocumentMapper docMapper = mapperService.documentMapper(); Joiner joiner = Joiner.getJoiner( - mapperService.mappingLookup().getMatchingFieldNames("*").stream().map(mapperService.mappingLookup()::getFieldType) + mapperService.mappingLookup().getMatchingFieldNames("*").stream().map(mapperService.mappingLookup()::getMappedField) ); assertNotNull(joiner); assertEquals("join_field", joiner.getJoinField()); @@ -244,7 +244,7 @@ public void testUpdateRelations() throws Exception { })); Joiner joiner = Joiner.getJoiner( - mapperService.mappingLookup().getMatchingFieldNames("*").stream().map(mapperService.mappingLookup()::getFieldType) + mapperService.mappingLookup().getMatchingFieldNames("*").stream().map(mapperService.mappingLookup()::getMappedField) ); assertNotNull(joiner); assertEquals("join_field", joiner.getJoinField()); @@ -272,7 +272,7 @@ public void testUpdateRelations() throws Exception { b.endObject(); })); joiner = Joiner.getJoiner( - mapperService.mappingLookup().getMatchingFieldNames("*").stream().map(mapperService.mappingLookup()::getFieldType) + mapperService.mappingLookup().getMatchingFieldNames("*").stream().map(mapperService.mappingLookup()::getMappedField) ); assertNotNull(joiner); assertEquals("join_field", joiner.getJoinField()); @@ -386,11 +386,11 @@ public void testEagerGlobalOrdinals() throws Exception { .endObject() ) ); - assertFalse(mapperService.fieldType("join_field").eagerGlobalOrdinals()); - assertNotNull(mapperService.fieldType("join_field#parent")); - assertTrue(mapperService.fieldType("join_field#parent").eagerGlobalOrdinals()); - assertNotNull(mapperService.fieldType("join_field#child")); - assertTrue(mapperService.fieldType("join_field#child").eagerGlobalOrdinals()); + assertFalse(mapperService.mappedField("join_field").eagerGlobalOrdinals()); + assertNotNull(mapperService.mappedField("join_field#parent")); + assertTrue(mapperService.mappedField("join_field#parent").eagerGlobalOrdinals()); + assertNotNull(mapperService.mappedField("join_field#child")); + assertTrue(mapperService.mappedField("join_field#child").eagerGlobalOrdinals()); merge( mapperService, @@ -405,11 +405,11 @@ public void testEagerGlobalOrdinals() throws Exception { .endObject() ) ); - assertFalse(mapperService.fieldType("join_field").eagerGlobalOrdinals()); - assertNotNull(mapperService.fieldType("join_field#parent")); - assertFalse(mapperService.fieldType("join_field#parent").eagerGlobalOrdinals()); - assertNotNull(mapperService.fieldType("join_field#child")); - assertFalse(mapperService.fieldType("join_field#child").eagerGlobalOrdinals()); + assertFalse(mapperService.mappedField("join_field").eagerGlobalOrdinals()); + assertNotNull(mapperService.mappedField("join_field#parent")); + assertFalse(mapperService.mappedField("join_field#parent").eagerGlobalOrdinals()); + assertNotNull(mapperService.mappedField("join_field#child")); + assertFalse(mapperService.mappedField("join_field#child").eagerGlobalOrdinals()); } public void testSubFields() throws IOException { @@ -425,20 +425,20 @@ public void testSubFields() throws IOException { ) ); ParentJoinFieldMapper mapper = (ParentJoinFieldMapper) mapperService.mappingLookup().getMapper("join_field"); - assertTrue(mapper.fieldType().isSearchable()); - assertTrue(mapper.fieldType().isAggregatable()); + assertTrue(mapper.field().isSearchable()); + assertTrue(mapper.field().isAggregatable()); Iterator it = mapper.iterator(); FieldMapper next = (FieldMapper) it.next(); assertThat(next.name(), equalTo("join_field#parent")); - assertTrue(next.fieldType().isSearchable()); - assertTrue(next.fieldType().isAggregatable()); + assertTrue(next.field().isSearchable()); + assertTrue(next.field().isAggregatable()); assertTrue(it.hasNext()); next = (FieldMapper) it.next(); assertThat(next.name(), equalTo("join_field#child")); - assertTrue(next.fieldType().isSearchable()); - assertTrue(next.fieldType().isAggregatable()); + assertTrue(next.field().isSearchable()); + assertTrue(next.field().isAggregatable()); assertFalse(it.hasNext()); } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index ee8e3370ef6cd..a626d35bcea93 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -49,7 +49,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.ParsedDocument; @@ -487,15 +487,15 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { throw new IllegalStateException("no document to percolate"); } - MappedFieldType fieldType = context.getFieldType(field); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(field); + if (mappedField == null) { throw new QueryShardException(context, "field [" + field + "] does not exist"); } - if ((fieldType instanceof PercolatorFieldMapper.PercolatorFieldType) == false) { + if ((mappedField.type() instanceof PercolatorFieldMapper.PercolatorFieldType) == false) { throw new QueryShardException( context, - "expected field [" + field + "] to be of type [percolator], but is of type [" + fieldType.typeName() + "]" + "expected field [" + field + "] to be of type [percolator], but is of type [" + mappedField.typeName() + "]" ); } @@ -526,11 +526,10 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { excludeNestedDocuments = false; } - PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) fieldType; - String queryName = this.name != null ? this.name : pft.name(); + PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) mappedField.type(); + String queryName = this.name != null ? this.name : mappedField.name(); SearchExecutionContext percolateShardContext = wrap(context); PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText); - ; PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext); return pft.percolateQuery(queryName, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated()); @@ -570,12 +569,12 @@ static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection

{ LeafReader leafReader = ctx.reader(); - BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(queryBuilderFieldType.name()); + BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(queryBuilderField.name()); if (binaryDocValues == null) { return docId -> null; } @@ -640,8 +639,11 @@ public BitSetProducer bitsetFilter(Query query) { @Override @SuppressWarnings("unchecked") - public > IFD getForField(MappedFieldType fieldType) { - IndexFieldData.Builder builder = fieldType.fielddataBuilder(delegate.getFullyQualifiedIndex().getName(), delegate::lookup); + public > IFD getForField(MappedField mappedField) { + IndexFieldData.Builder builder = mappedField.fielddataBuilder( + delegate.getFullyQualifiedIndex().getName(), + delegate::lookup + ); IndexFieldDataCache cache = new IndexFieldDataCache.None(); CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService(); return (IFD) builder.build(cache, circuitBreaker); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 61a15d3a21185..30b6fd79739b5 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -134,7 +135,8 @@ protected Parameter[] getParameters() { @Override public PercolatorFieldMapper build(MapperBuilderContext context) { - PercolatorFieldType fieldType = new PercolatorFieldType(context.buildFullName(name), meta.getValue()); + String fullName = context.buildFullName(name); + PercolatorFieldType fieldType = new PercolatorFieldType(meta.getValue()); // TODO should percolator even allow multifields? MultiFields multiFields = multiFieldsBuilder.build(this, context); context = context.createChildContext(name); @@ -143,26 +145,26 @@ public PercolatorFieldMapper build(MapperBuilderContext context) { context, indexCreatedVersion ); - fieldType.queryTermsField = extractedTermsField.fieldType(); + fieldType.queryTermsField = extractedTermsField.field(); KeywordFieldMapper extractionResultField = createExtractQueryFieldBuilder( EXTRACTION_RESULT_FIELD_NAME, context, indexCreatedVersion ); - fieldType.extractionResultField = extractionResultField.fieldType(); + fieldType.extractionResultField = extractionResultField.field(); BinaryFieldMapper queryBuilderField = createQueryBuilderFieldBuilder(context); - fieldType.queryBuilderField = queryBuilderField.fieldType(); + fieldType.queryBuilderField = queryBuilderField.field(); // Range field is of type ip, because that matches closest with BinaryRange field. Otherwise we would // have to introduce a new field type... RangeFieldMapper rangeFieldMapper = createExtractedRangeFieldBuilder(RANGE_FIELD_NAME, RangeType.IP, context); - fieldType.rangeField = rangeFieldMapper.fieldType(); + fieldType.rangeField = rangeFieldMapper.field(); NumberFieldMapper minimumShouldMatchFieldMapper = createMinimumShouldMatchField(context, indexCreatedVersion); - fieldType.minimumShouldMatchField = minimumShouldMatchFieldMapper.fieldType(); + fieldType.minimumShouldMatchField = minimumShouldMatchFieldMapper.field(); fieldType.mapUnmappedFieldsAsText = mapUnmappedFieldsAsText; return new PercolatorFieldMapper( name(), - fieldType, + new MappedField(fullName, fieldType), multiFields, copyTo.build(), searchExecutionContext, @@ -224,16 +226,16 @@ private static boolean getMapUnmappedFieldAsText(Settings indexSettings) { static class PercolatorFieldType extends MappedFieldType { - MappedFieldType queryTermsField; - MappedFieldType extractionResultField; - MappedFieldType queryBuilderField; - MappedFieldType minimumShouldMatchField; + MappedField queryTermsField; + MappedField extractionResultField; + MappedField queryBuilderField; + MappedField minimumShouldMatchField; - RangeFieldMapper.RangeFieldType rangeField; + MappedField rangeField; boolean mapUnmappedFieldsAsText; - private PercolatorFieldType(String name, Map meta) { - super(name, false, false, false, TextSearchInfo.NONE, meta); + private PercolatorFieldType(Map meta) { + super(false, false, false, TextSearchInfo.NONE, meta); } @Override @@ -242,13 +244,13 @@ public String typeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new QueryShardException(context, "Percolator fields are not searchable directly, use a percolate query instead"); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.identity(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.identity(name, context, format); } Query percolateQuery( @@ -364,7 +366,7 @@ Tuple, Map>> extractTermsAndRanges(IndexRead PercolatorFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Supplier searchExecutionContext, @@ -376,7 +378,7 @@ Tuple, Map>> extractTermsAndRanges(IndexRead boolean mapUnmappedFieldsAsText, Version indexCreatedVersion ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + super(simpleName, mappedField, multiFields, copyTo); this.searchExecutionContext = searchExecutionContext; this.queryTermsField = queryTermsField; this.extractionResultField = extractionResultField; @@ -472,7 +474,7 @@ void processQuery(Query query, DocumentParserContext context) { doc.add(new Field(extractionResultField.name(), EXTRACTION_PARTIAL, INDEXED_KEYWORD)); } - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 52422a35a4079..084ae6ed71b5a 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -77,7 +77,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TestDocumentParserContext; @@ -216,7 +216,7 @@ public void testDuel() throws Exception { Collections.sort(intValues); SearchExecutionContext context = createSearchContext(indexService).getSearchExecutionContext(); - MappedFieldType intFieldType = mapperService.fieldType("int_field"); + MappedField intField = mapperService.mappedField("int_field"); List> queryFunctions = new ArrayList<>(); queryFunctions.add(MatchNoDocsQuery::new); @@ -226,10 +226,10 @@ public void testDuel() throws Exception { queryFunctions.add(() -> new TermQuery(new Term(field1, randomFrom(stringContent.get(field1))))); String field2 = randomFrom(stringFields); queryFunctions.add(() -> new TermQuery(new Term(field2, randomFrom(stringContent.get(field2))))); - queryFunctions.add(() -> intFieldType.termQuery(randomFrom(intValues), context)); - queryFunctions.add(() -> intFieldType.termsQuery(Arrays.asList(randomFrom(intValues), randomFrom(intValues)), context)); + queryFunctions.add(() -> intField.termQuery(randomFrom(intValues), context)); + queryFunctions.add(() -> intField.termsQuery(Arrays.asList(randomFrom(intValues), randomFrom(intValues)), context)); queryFunctions.add( - () -> intFieldType.rangeQuery( + () -> intField.rangeQuery( intValues.get(4), intValues.get(intValues.size() - 4), true, @@ -257,7 +257,7 @@ public void testDuel() throws Exception { // many iterations with boolean queries, which are the most complex queries to deal with when nested int numRandomBoolQueries = 1000; for (int i = 0; i < numRandomBoolQueries; i++) { - queryFunctions.add(() -> createRandomBooleanQuery(1, stringFields, stringContent, intFieldType, intValues, context)); + queryFunctions.add(() -> createRandomBooleanQuery(1, stringFields, stringContent, intField, intValues, context)); } queryFunctions.add(() -> { int numClauses = randomIntBetween(1, 1 << randomIntBetween(2, 4)); @@ -308,7 +308,7 @@ private BooleanQuery createRandomBooleanQuery( int depth, List fields, Map> content, - MappedFieldType intFieldType, + MappedField intField, List intValues, SearchExecutionContext context ) { @@ -324,24 +324,24 @@ private BooleanQuery createRandomBooleanQuery( String field = randomFrom(fields); builder.add(new TermQuery(new Term(field, randomFrom(content.get(field)))), occur); } else { - builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur); + builder.add(intField.termQuery(randomFrom(intValues), context), occur); } } else if (rarely() && depth <= 3) { occur = randomFrom(Arrays.asList(Occur.FILTER, Occur.MUST, Occur.SHOULD)); - builder.add(createRandomBooleanQuery(depth + 1, fields, content, intFieldType, intValues, context), occur); + builder.add(createRandomBooleanQuery(depth + 1, fields, content, intField, intValues, context), occur); } else if (rarely()) { if (randomBoolean()) { occur = randomFrom(Arrays.asList(Occur.FILTER, Occur.MUST, Occur.SHOULD)); if (randomBoolean()) { builder.add(new TermQuery(new Term("unknown_field", randomAlphaOfLength(8))), occur); } else { - builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur); + builder.add(intField.termQuery(randomFrom(intValues), context), occur); } } else if (randomBoolean()) { String field = randomFrom(fields); builder.add(new TermQuery(new Term(field, randomFrom(content.get(field)))), occur = Occur.MUST_NOT); } else { - builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur = Occur.MUST_NOT); + builder.add(intField.termQuery(randomFrom(intValues), context), occur = Occur.MUST_NOT); } } else { if (randomBoolean()) { @@ -350,7 +350,7 @@ private BooleanQuery createRandomBooleanQuery( String field = randomFrom(fields); builder.add(new TermQuery(new Term(field, randomFrom(content.get(field)))), occur); } else { - builder.add(intFieldType.termQuery(randomFrom(intValues), context), occur); + builder.add(intField.termQuery(randomFrom(intValues), context), occur); } } else { builder.add(new TermQuery(new Term("unknown_field", randomAlphaOfLength(8))), occur = Occur.MUST_NOT); @@ -370,7 +370,7 @@ public void testDuel2() throws Exception { stringValues.add("value2"); stringValues.add("value3"); - MappedFieldType intFieldType = mapperService.fieldType("int_field"); + MappedField intField = mapperService.mappedField("int_field"); List ranges = new ArrayList<>(); ranges.add(new int[] { -5, 5 }); ranges.add(new int[] { 0, 10 }); @@ -386,13 +386,13 @@ public void testDuel2() throws Exception { } { int[] range = randomFrom(ranges); - Query rangeQuery = intFieldType.rangeQuery(range[0], range[1], true, true, null, null, null, context); + Query rangeQuery = intField.rangeQuery(range[0], range[1], true, true, null, null, null, context); addQuery(rangeQuery, documents); } { int numBooleanQueries = randomIntBetween(1, 5); for (int i = 0; i < numBooleanQueries; i++) { - Query randomBQ = randomBQ(1, stringValues, ranges, intFieldType, context); + Query randomBQ = randomBQ(1, stringValues, ranges, intField, context); addQuery(randomBQ, documents); } } @@ -430,7 +430,7 @@ private BooleanQuery randomBQ( int depth, List stringValues, List ranges, - MappedFieldType intFieldType, + MappedField intField, SearchExecutionContext context ) { final int numClauses = randomIntBetween(1, 4); @@ -441,10 +441,10 @@ private BooleanQuery randomBQ( for (int i = 0; i < numClauses; i++) { Query subQuery; if (randomBoolean() && depth <= 3) { - subQuery = randomBQ(depth + 1, stringValues, ranges, intFieldType, context); + subQuery = randomBQ(depth + 1, stringValues, ranges, intField, context); } else if (randomBoolean()) { int[] range = randomFrom(ranges); - subQuery = intFieldType.rangeQuery(range[0], range[1], true, true, null, null, null, context); + subQuery = intField.rangeQuery(range[0], range[1], true, true, null, null, null, context); } else { subQuery = new TermQuery(new Term("string_field", randomFrom(stringValues))); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 16300ae51d81b..7ebbd447ce56e 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -200,7 +200,7 @@ private void addQueryFieldMappings() throws Exception { .endObject() ); mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); - fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName); + fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.mappedField(fieldName).type(); } public void testExtractTerms() throws Exception { @@ -254,9 +254,9 @@ public void testExtractRanges() throws Exception { SearchExecutionContext context = createSearchContext(indexService).getSearchExecutionContext(); addQueryFieldMappings(); BooleanQuery.Builder bq = new BooleanQuery.Builder(); - Query rangeQuery1 = mapperService.fieldType("number_field1").rangeQuery(10, 20, true, true, null, null, null, context); + Query rangeQuery1 = mapperService.mappedField("number_field1").rangeQuery(10, 20, true, true, null, null, null, context); bq.add(rangeQuery1, Occur.MUST); - Query rangeQuery2 = mapperService.fieldType("number_field1").rangeQuery(15, 20, true, true, null, null, null, context); + Query rangeQuery2 = mapperService.mappedField("number_field1").rangeQuery(15, 20, true, true, null, null, null, context); bq.add(rangeQuery2, Occur.MUST); DocumentMapper documentMapper = mapperService.documentMapper(); @@ -282,7 +282,7 @@ public void testExtractRanges() throws Exception { // Range queries on different fields: bq = new BooleanQuery.Builder(); bq.add(rangeQuery1, Occur.MUST); - rangeQuery2 = mapperService.fieldType("number_field2").rangeQuery(15, 20, true, true, null, null, null, context); + rangeQuery2 = mapperService.mappedField("number_field2").rangeQuery(15, 20, true, true, null, null, null, context); bq.add(rangeQuery2, Occur.MUST); documentParserContext = new TestDocumentParserContext(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java index e2d3ffe259e47..ab3317bec04e9 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.TestDocumentParserContext; import org.elasticsearch.index.query.SearchExecutionContext; @@ -76,14 +77,14 @@ public void testStoringQueryBuilders() throws IOException { when(searchExecutionContext.indexVersionCreated()).thenReturn(version); when(searchExecutionContext.getWriteableRegistry()).thenReturn(writableRegistry()); when(searchExecutionContext.getParserConfig()).thenReturn(parserConfig()); - when(searchExecutionContext.getForField(fieldMapper.fieldType())).thenReturn( + when(searchExecutionContext.getForField(fieldMapper.field())).thenReturn( new BytesBinaryIndexFieldData(fieldMapper.name(), CoreValuesSourceType.KEYWORD) ); - when(searchExecutionContext.getFieldType(Mockito.anyString())).thenAnswer(invocation -> { + when(searchExecutionContext.getMappedField(Mockito.anyString())).thenAnswer(invocation -> { final String fieldName = (String) invocation.getArguments()[0]; - return new KeywordFieldMapper.KeywordFieldType(fieldName); + return new MappedField(fieldName, new KeywordFieldMapper.KeywordFieldType()); }); - PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.fieldType(), searchExecutionContext); + PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.field(), searchExecutionContext); try (IndexReader indexReader = DirectoryReader.open(directory)) { LeafReaderContext leafContext = indexReader.leaves().get(0); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldMapper.java index 94b6e32995688..67a1f177073cb 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldMapper.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.StringFieldType; @@ -61,7 +61,6 @@ public static final class CollationFieldType extends StringFieldType { private final int ignoreAbove; public CollationFieldType( - String name, boolean isSearchable, boolean isStored, boolean hasDocValues, @@ -70,18 +69,18 @@ public CollationFieldType( int ignoreAbove, Map meta ) { - super(name, isSearchable, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); + super(isSearchable, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); this.collator = collator; this.nullValue = nullValue; this.ignoreAbove = ignoreAbove; } - public CollationFieldType(String name, boolean searchable, Collator collator) { - this(name, searchable, false, true, collator, null, Integer.MAX_VALUE, Collections.emptyMap()); + public CollationFieldType(boolean searchable, Collator collator) { + this(searchable, false, true, collator, null, Integer.MAX_VALUE, Collections.emptyMap()); } - public CollationFieldType(String name, Collator collator) { - this(name, true, false, true, collator, null, Integer.MAX_VALUE, Collections.emptyMap()); + public CollationFieldType(Collator collator) { + this(true, false, true, collator, null, Integer.MAX_VALUE, Collections.emptyMap()); } @Override @@ -90,12 +89,12 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new SourceValueFetcher(name(), context, nullValue) { + return new SourceValueFetcher(name, context, nullValue) { @Override protected String parseSourceValue(Object value) { String keywordValue = value.toString(); @@ -108,10 +107,10 @@ protected String parseSourceValue(Object value) { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); return new SortedSetOrdinalsIndexFieldData.Builder( - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new DelegateDocValuesField( new ScriptDocValues.Strings(new ScriptDocValues.StringsSupplier(FieldData.toString(dv))), @@ -121,7 +120,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } @Override - protected BytesRef indexedValueForSearch(Object value) { + protected BytesRef indexedValueForSearch(String name, Object value) { if (value == null) { return null; } @@ -139,6 +138,7 @@ protected BytesRef indexedValueForSearch(Object value) { @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -151,6 +151,7 @@ public Query fuzzyQuery( @Override public Query prefixQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, @@ -161,6 +162,7 @@ public Query prefixQuery( @Override public Query wildcardQuery( + String name, String value, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitive, @@ -171,6 +173,7 @@ public Query wildcardQuery( @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -209,7 +212,7 @@ public BytesRef parseBytesRef(Object value) { }; @Override - public DocValueFormat docValueFormat(final String format, final ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, final String format, final ZoneId timeZone) { return COLLATE_FORMAT; } } @@ -327,7 +330,6 @@ public ICUCollationKeywordFieldMapper build(MapperBuilderContext context) { final CollatorParams params = collatorParams(); final Collator collator = params.buildCollator(); CollationFieldType ft = new CollationFieldType( - context.buildFullName(name), indexed.getValue(), stored.getValue(), hasDocValues.getValue(), @@ -339,7 +341,7 @@ public ICUCollationKeywordFieldMapper build(MapperBuilderContext context) { return new ICUCollationKeywordFieldMapper( name, buildFieldType(), - ft, + new MappedField(context.buildFullName(name), ft), multiFieldsBuilder.build(this, context), copyTo.build(), collator, @@ -473,13 +475,13 @@ public Collator buildCollator() { protected ICUCollationKeywordFieldMapper( String simpleName, FieldType fieldType, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Collator collator, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo, false, null); + super(simpleName, mappedField, multiFields, copyTo, false, null); assert collator.isFrozen(); this.fieldType = fieldType; this.params = builder.collatorParams(); @@ -493,7 +495,7 @@ protected ICUCollationKeywordFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override @@ -534,14 +536,14 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio final BytesRef binaryValue = new BytesRef(key.bytes, 0, key.size); if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new Field(mappedFieldType.name(), binaryValue, fieldType); + Field field = new Field(mappedField.name(), binaryValue, fieldType); context.doc().add(field); } if (hasDocValues) { - context.doc().add(new SortedSetDocValuesField(fieldType().name(), binaryValue)); + context.doc().add(new SortedSetDocValuesField(name(), binaryValue)); } else if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/CollationFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/CollationFieldTypeTests.java index 5a03632b0052e..2cec7e52788bd 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/CollationFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/CollationFieldTypeTests.java @@ -34,7 +34,7 @@ public class CollationFieldTypeTests extends FieldTypeTestCase { private static final Collator DEFAULT_COLLATOR = Collator.getInstance(ULocale.ROOT).freeze(); private static CollationFieldType createFieldType() { - return new CollationFieldType("field", DEFAULT_COLLATOR); + return new CollationFieldType(DEFAULT_COLLATOR); } public void testIsFieldWithinQuery() throws IOException { @@ -43,6 +43,7 @@ public void testIsFieldWithinQuery() throws IOException { assertEquals( Relation.INTERSECTS, ft.isFieldWithinQuery( + "field", null, RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5), RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5), @@ -59,21 +60,21 @@ public void testTermQuery() { Collator collator = Collator.getInstance(new ULocale("tr")); collator.setStrength(Collator.PRIMARY); collator.freeze(); - MappedFieldType ft = new CollationFieldType("field", collator); + MappedFieldType ft = new CollationFieldType(collator); RawCollationKey key = collator.getRawCollationKey("ı will use turkish casıng", null); BytesRef expected = new BytesRef(key.bytes, 0, key.size); - assertEquals(new TermQuery(new Term("field", expected)), ft.termQuery("I WİLL USE TURKİSH CASING", null)); + assertEquals(new TermQuery(new Term("field", expected)), ft.termQuery("field", "I WİLL USE TURKİSH CASING", null)); - MappedFieldType unsearchable = new CollationFieldType("field", false, collator); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("bar", null)); + MappedFieldType unsearchable = new CollationFieldType(false, collator); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", "bar", null)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } public void testTermsQuery() { Collator collator = DEFAULT_COLLATOR; - MappedFieldType ft = new CollationFieldType("field", collator); + MappedFieldType ft = new CollationFieldType(collator); RawCollationKey fooKey = collator.getRawCollationKey("foo", null); RawCollationKey barKey = collator.getRawCollationKey("bar", null); @@ -82,12 +83,12 @@ public void testTermsQuery() { terms.add(new BytesRef(fooKey.bytes, 0, fooKey.size)); terms.add(new BytesRef(barKey.bytes, 0, barKey.size)); - assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), null)); + assertEquals(new TermInSetQuery("field", terms), ft.termsQuery("field", Arrays.asList("foo", "bar"), null)); - MappedFieldType unsearchable = new CollationFieldType("field", false, collator); + MappedFieldType unsearchable = new CollationFieldType(false, collator); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.termsQuery(Arrays.asList("foo", "bar"), null) + () -> unsearchable.termsQuery("field", Arrays.asList("foo", "bar"), null) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } @@ -96,7 +97,7 @@ public void testRegexpQuery() { MappedFieldType ft = createFieldType(); UnsupportedOperationException e = expectThrows( UnsupportedOperationException.class, - () -> ft.regexpQuery("foo.*", 0, 0, 10, null, randomMockContext()) + () -> ft.regexpQuery("field", "foo.*", 0, 0, 10, null, randomMockContext()) ); assertEquals("[regexp] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } @@ -105,7 +106,7 @@ public void testFuzzyQuery() { MappedFieldType ft = createFieldType(); UnsupportedOperationException e = expectThrows( UnsupportedOperationException.class, - () -> ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, randomMockContext()) + () -> ft.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, randomMockContext()) ); assertEquals("[fuzzy] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } @@ -114,7 +115,7 @@ public void testPrefixQuery() { MappedFieldType ft = createFieldType(); UnsupportedOperationException e = expectThrows( UnsupportedOperationException.class, - () -> ft.prefixQuery("prefix", null, randomMockContext()) + () -> ft.prefixQuery("field", "prefix", null, randomMockContext()) ); assertEquals("[prefix] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } @@ -123,7 +124,7 @@ public void testWildcardQuery() { MappedFieldType ft = createFieldType(); UnsupportedOperationException e = expectThrows( UnsupportedOperationException.class, - () -> ft.wildcardQuery("foo*", null, randomMockContext()) + () -> ft.wildcardQuery("field", "foo*", null, randomMockContext()) ); assertEquals("[wildcard] queries are not supported on [icu_collation_keyword] fields.", e.getMessage()); } @@ -141,21 +142,21 @@ public void testRangeQuery() { false ); - assertEquals(expected, ft.rangeQuery("a", "b", false, false, null, null, null, MOCK_CONTEXT)); + assertEquals(expected, ft.rangeQuery("field", "a", "b", false, false, null, null, null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.rangeQuery("a", "b", true, true, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.rangeQuery("field", "a", "b", true, true, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[range] queries on [text] or [keyword] fields cannot be executed when " + "'search.allow_expensive_queries' is set to false.", ee.getMessage() ); - MappedFieldType unsearchable = new CollationFieldType("field", false, DEFAULT_COLLATOR); + MappedFieldType unsearchable = new CollationFieldType(false, DEFAULT_COLLATOR); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.rangeQuery("a", "b", false, false, null, null, null, MOCK_CONTEXT) + () -> unsearchable.rangeQuery("field", "a", "b", false, false, null, null, null, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java index 6dfa74225a16a..c7c1e3276dd69 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/plugin/analysis/icu/ICUCollationKeywordFieldTypeTests.java @@ -19,17 +19,17 @@ public class ICUCollationKeywordFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { ICUCollationKeywordFieldMapper mapper = new ICUCollationKeywordFieldMapper.Builder("field").build(MapperBuilderContext.ROOT); - assertEquals(List.of("42"), fetchSourceValue(mapper.fieldType(), 42L)); - assertEquals(List.of("true"), fetchSourceValue(mapper.fieldType(), true)); + assertEquals(List.of("42"), fetchSourceValue(mapper.field(), 42L)); + assertEquals(List.of("true"), fetchSourceValue(mapper.field(), true)); ICUCollationKeywordFieldMapper ignoreAboveMapper = new ICUCollationKeywordFieldMapper.Builder("field").ignoreAbove(4) .build(MapperBuilderContext.ROOT); - assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper.fieldType(), "value")); - assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper.fieldType(), 42L)); - assertEquals(List.of("true"), fetchSourceValue(ignoreAboveMapper.fieldType(), true)); + assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper.field(), "value")); + assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper.field(), 42L)); + assertEquals(List.of("true"), fetchSourceValue(ignoreAboveMapper.field(), true)); ICUCollationKeywordFieldMapper nullValueMapper = new ICUCollationKeywordFieldMapper.Builder("field").nullValue("NULL") .build(MapperBuilderContext.ROOT); - assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper.fieldType(), null)); + assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper.field(), null)); } } diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 02854917e4ae3..12b885f8400ba 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextParams; @@ -114,14 +115,14 @@ protected Parameter[] getParameters() { meta }; } - private AnnotatedTextFieldType buildFieldType(FieldType fieldType, MapperBuilderContext context) { + private AnnotatedTextFieldType buildFieldType(FieldType fieldType) { TextSearchInfo tsi = new TextSearchInfo( fieldType, similarity.get(), wrapAnalyzer(analyzers.getSearchAnalyzer()), wrapAnalyzer(analyzers.getSearchQuoteAnalyzer()) ); - return new AnnotatedTextFieldType(context.buildFullName(name), store.getValue(), tsi, meta.getValue()); + return new AnnotatedTextFieldType(store.getValue(), tsi, meta.getValue()); } @Override @@ -140,7 +141,7 @@ public AnnotatedTextFieldMapper build(MapperBuilderContext context) { return new AnnotatedTextFieldMapper( name, fieldType, - buildFieldType(fieldType, context), + new MappedField(context.buildFullName(name), buildFieldType(fieldType)), multiFieldsBuilder.build(this, context), copyTo.build(), this @@ -467,12 +468,12 @@ private void emitAnnotation(int firstSpannedTextPosInc, int annotationPosLen) th public static final class AnnotatedTextFieldType extends TextFieldMapper.TextFieldType { - private AnnotatedTextFieldType(String name, boolean store, TextSearchInfo tsi, Map meta) { - super(name, true, store, tsi, meta); + private AnnotatedTextFieldType(boolean store, TextSearchInfo tsi, Map meta) { + super(true, store, tsi, meta); } public AnnotatedTextFieldType(String name, Map meta) { - super(name, true, false, meta); + super(true, false, meta); } @Override @@ -489,12 +490,12 @@ public String typeName() { protected AnnotatedTextFieldMapper( String simpleName, FieldType fieldType, - AnnotatedTextFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + super(simpleName, mappedField, multiFields, copyTo); assert fieldType.tokenized(); this.fieldType = fieldType; this.builder = builder; @@ -503,7 +504,7 @@ protected AnnotatedTextFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), indexAnalyzer); + return Map.of(mappedField.name(), indexAnalyzer); } @Override @@ -515,10 +516,10 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new Field(mappedFieldType.name(), value, fieldType); + Field field = new Field(mappedField.name(), value, fieldType); context.doc().add(field); if (fieldType.omitNorms()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } } diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java index 090df42126706..ff43b4e840372 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighter.java @@ -11,7 +11,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.uhighlight.PassageFormatter; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer; import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText; import org.elasticsearch.index.query.SearchExecutionContext; @@ -33,11 +33,11 @@ public class AnnotatedTextHighlighter extends UnifiedHighlighter { protected List loadFieldValues( CustomUnifiedHighlighter highlighter, SearchExecutionContext searchContext, - MappedFieldType fieldType, + MappedField mappedField, HitContext hitContext, boolean forceSource ) throws IOException { - List fieldValues = super.loadFieldValues(highlighter, searchContext, fieldType, hitContext, forceSource); + List fieldValues = super.loadFieldValues(highlighter, searchContext, mappedField, hitContext, forceSource); List strings = new ArrayList<>(fieldValues.size()); AnnotatedText[] annotations = new AnnotatedText[fieldValues.size()]; diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java index 0ead11b1e2ae9..f7135a0852c47 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -24,17 +25,17 @@ public class AnnotatedTextFieldTypeTests extends FieldTypeTestCase { public void testIntervals() throws IOException { MappedFieldType ft = new AnnotatedTextFieldMapper.AnnotatedTextFieldType("field", Collections.emptyMap()); - IntervalsSource source = ft.termIntervals(new BytesRef("donald"), null); + IntervalsSource source = ft.termIntervals("field", new BytesRef("donald"), null); assertEquals(Intervals.term("donald"), source); } public void testFetchSourceValue() throws IOException { - MappedFieldType fieldType = new AnnotatedTextFieldMapper.Builder("field", Version.CURRENT, createDefaultIndexAnalyzers()).build( + MappedField mappedField = new AnnotatedTextFieldMapper.Builder("field", Version.CURRENT, createDefaultIndexAnalyzers()).build( MapperBuilderContext.ROOT - ).fieldType(); + ).field(); - assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); - assertEquals(List.of("42"), fetchSourceValue(fieldType, 42L)); - assertEquals(List.of("true"), fetchSourceValue(fieldType, true)); + assertEquals(List.of("value"), fetchSourceValue(mappedField, "value")); + assertEquals(List.of("42"), fetchSourceValue(mappedField, 42L)); + assertEquals(List.of("true"), fetchSourceValue(mappedField, true)); } } diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index c33aad0c3d42c..dc49a66537d00 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; @@ -67,7 +68,7 @@ protected Parameter[] getParameters() { public Murmur3FieldMapper build(MapperBuilderContext context) { return new Murmur3FieldMapper( name, - new Murmur3FieldType(context.buildFullName(name), stored.getValue(), meta.getValue()), + new MappedField(context.buildFullName(name), new Murmur3FieldType(stored.getValue(), meta.getValue())), multiFieldsBuilder.build(this, context), copyTo.build() ); @@ -79,8 +80,8 @@ public Murmur3FieldMapper build(MapperBuilderContext context) { // this only exists so a check can be done to match the field type to using murmur3 hashing... public static class Murmur3FieldType extends MappedFieldType { - private Murmur3FieldType(String name, boolean isStored, Map meta) { - super(name, false, isStored, true, TextSearchInfo.NONE, meta); + private Murmur3FieldType(boolean isStored, Map meta) { + super(false, isStored, true, TextSearchInfo.NONE, meta); } @Override @@ -89,24 +90,24 @@ public String typeName() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, Murmur3DocValueField::new); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new SortedNumericIndexFieldData.Builder(name, NumericType.LONG, Murmur3DocValueField::new); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.toString(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.toString(name, context, format); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new IllegalArgumentException("Murmur3 fields are not searchable: [" + name() + "]"); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new IllegalArgumentException("Murmur3 fields are not searchable: [" + name + "]"); } } - protected Murmur3FieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, mappedFieldType, multiFields, copyTo); + protected Murmur3FieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, mappedField, multiFields, copyTo); } @Override @@ -125,7 +126,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio if (value != null) { final BytesRef bytes = new BytesRef(value.toString()); final long hash = MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, new MurmurHash3.Hash128()).h1; - context.doc().add(new SortedNumericDocValuesField(fieldType().name(), hash)); + context.doc().add(new SortedNumericDocValuesField(name(), hash)); if (fieldType().isStored()) { context.doc().add(new StoredField(name(), hash)); } diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 385efcb0ebaee..02dc458c08355 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -12,7 +12,7 @@ import org.elasticsearch.index.mapper.DocValueFetcher; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -43,33 +43,33 @@ protected Parameter[] getParameters() { @Override public SizeFieldMapper build() { - return new SizeFieldMapper(enabled.getValue(), new SizeFieldType()); + return new SizeFieldMapper(enabled.getValue(), new MappedField(NAME, new SizeFieldType())); } } private static class SizeFieldType extends NumberFieldType { SizeFieldType() { - super(NAME, NumberType.INTEGER); + super(NumberType.INTEGER); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (hasDocValues() == false) { return (lookup, ignoredValues) -> List.of(); } - return new DocValueFetcher(docValueFormat(format, null), context.getForField(this)); + return new DocValueFetcher(docValueFormat(name, format, null), context.getForField(new MappedField(name, this))); } } public static final TypeParser PARSER = new ConfigurableTypeParser( - c -> new SizeFieldMapper(Explicit.IMPLICIT_FALSE, new SizeFieldType()), + c -> new SizeFieldMapper(Explicit.IMPLICIT_FALSE, new MappedField(NAME, new SizeFieldType())), c -> new Builder() ); private final Explicit enabled; - private SizeFieldMapper(Explicit enabled, MappedFieldType mappedFieldType) { - super(mappedFieldType); + private SizeFieldMapper(Explicit enabled, MappedField mappedField) { + super(mappedField); this.enabled = enabled; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 55ff39715faed..5cbbf690061a2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -21,7 +21,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; @@ -326,8 +326,8 @@ private void assertConcreteMappingsOnAll(final String index, final String... fie assertThat("index service doesn't exists on " + node, indexService, notNullValue()); MapperService mapperService = indexService.mapperService(); for (String fieldName : fieldNames) { - MappedFieldType fieldType = mapperService.fieldType(fieldName); - assertNotNull("field " + fieldName + " doesn't exists on " + node, fieldType); + MappedField mappedField = mapperService.mappedField(fieldName); + assertNotNull("field " + fieldName + " doesn't exists on " + node, mappedField); } } assertMappingOnMaster(index, fieldNames); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index a52940ae9a413..cd9a1521543c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.index.query.QueryBuilder; @@ -633,7 +634,7 @@ private static final class TestMetadataMapper extends MetadataFieldMapper { private static final String FIELD_NAME = "_test"; protected TestMetadataMapper() { - super(new KeywordFieldMapper.KeywordFieldType(FIELD_NAME)); + super(new MappedField(FIELD_NAME, new KeywordFieldMapper.KeywordFieldType())); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 3eaa88570d8eb..98e0c407ec36e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -38,7 +38,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -185,13 +185,13 @@ private static Analyzer getAnalyzer(AnalyzeAction.Request request, AnalysisRegis if (indexService == null) { throw new IllegalArgumentException("analysis based on a specific field requires an index"); } - MappedFieldType fieldType = indexService.mapperService().fieldType(request.field()); - if (fieldType != null) { - if (fieldType instanceof StringFieldType) { + MappedField mappedField = indexService.mapperService().mappedField(request.field()); + if (mappedField != null) { + if (mappedField.type() instanceof StringFieldType) { return indexService.mapperService() .indexAnalyzer( - fieldType.name(), - f -> { throw new IllegalArgumentException("No analyzer configured for field " + fieldType.name()); } + mappedField.name(), + f -> { throw new IllegalArgumentException("No analyzer configured for field " + mappedField.name()); } ); } else { throw new IllegalArgumentException( diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 08486b6a4aca3..960932166b6e9 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -11,7 +11,7 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.RuntimeField; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -109,20 +109,20 @@ static Map retrieveFieldCaps( boolean includeParentObjects = checkIncludeParents(filters); - Predicate filter = buildFilter(indexFieldfilter, filters, types, context); + Predicate filter = buildFilter(indexFieldfilter, filters, types, context); Map responseMap = new HashMap<>(); for (String field : fieldNames) { - MappedFieldType ft = context.getFieldType(field); - if (filter.test(ft)) { + MappedField mappedField = context.getMappedField(field); + if (filter.test(mappedField)) { IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( field, - ft.familyTypeName(), + mappedField.familyTypeName(), context.isMetadataField(field), - ft.isSearchable(), - ft.isAggregatable(), - ft.isDimension(), - ft.getMetricType(), - ft.meta() + mappedField.isSearchable(), + mappedField.isAggregatable(), + mappedField.isDimension(), + mappedField.getMetricType(), + mappedField.meta() ); responseMap.put(field, fieldCap); } else { @@ -132,16 +132,16 @@ static Map retrieveFieldCaps( // Check the ancestor of the field to find nested and object fields. // Runtime fields are excluded since they can override any path. // TODO find a way to do this that does not require an instanceof check - if (ft instanceof RuntimeField == false && includeParentObjects) { - int dotIndex = ft.name().lastIndexOf('.'); + if (mappedField instanceof RuntimeField == false && includeParentObjects) { + int dotIndex = mappedField.name().lastIndexOf('.'); while (dotIndex > -1) { - String parentField = ft.name().substring(0, dotIndex); + String parentField = mappedField.name().substring(0, dotIndex); if (responseMap.containsKey(parentField)) { // we added this path on another field already break; } // checks if the parent field contains sub-fields - if (context.getFieldType(parentField) == null) { + if (context.getMappedField(parentField) == null) { // no field type, it must be an object field String type = context.nestedLookup().getNestedMappers().get(parentField) != null ? "nested" : "object"; IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( @@ -187,14 +187,14 @@ private static boolean canMatchShard( return SearchService.queryStillMatchesAfterRewrite(searchRequest, searchExecutionContext); } - private static Predicate buildFilter( + private static Predicate buildFilter( Predicate fieldFilter, String[] filters, String[] fieldTypes, SearchExecutionContext context ) { // security filters don't exclude metadata fields - Predicate fcf = ft -> fieldFilter.test(ft.name()) || context.isMetadataField(ft.name()); + Predicate fcf = mappedField -> fieldFilter.test(mappedField.name()) || context.isMetadataField(mappedField.name()); if (fieldTypes.length > 0) { Set acceptedTypes = Set.of(fieldTypes); fcf = fcf.and(ft -> acceptedTypes.contains(ft.familyTypeName())); @@ -203,11 +203,11 @@ private static Predicate buildFilter( if ("parent".equals(filter) || "-parent".equals(filter)) { continue; } - Predicate next = switch (filter) { - case "+metadata" -> ft -> context.isMetadataField(ft.name()); - case "-metadata" -> ft -> context.isMetadataField(ft.name()) == false; - case "-nested" -> ft -> context.nestedLookup().getNestedParent(ft.name()) == null; - case "-multifield" -> ft -> context.isMultiField(ft.name()) == false; + Predicate next = switch (filter) { + case "+metadata" -> mappedField -> context.isMetadataField(mappedField.name()); + case "-metadata" -> mappedField -> context.isMetadataField(mappedField.name()) == false; + case "-nested" -> mappedField -> context.nestedLookup().getNestedParent(mappedField.name()) == null; + case "-multifield" -> mappedField -> context.isMultiField(mappedField.name()) == false; default -> throw new IllegalArgumentException("Unknown field caps filter [" + filter + "]"); }; fcf = fcf.and(next); diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index e651b6b7a4da9..197f17f670742 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -205,7 +205,7 @@ public IndexService( // The sort order is validated right after the merge of the mapping later in the process. this.indexSortSupplier = () -> indexSettings.getIndexSortConfig() .buildIndexSort( - mapperService::fieldType, + mapperService::mappedField, (fieldType, searchLookup) -> indexFieldData.getForField(fieldType, indexFieldData.index().getName(), searchLookup) ); } else { diff --git a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java index 0cf09d0d49f77..03ab762adbba8 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.lookup.SearchLookup; @@ -204,8 +204,8 @@ public boolean hasPrimarySortOnField(String field) { * or returns null if this index has no sort. */ public Sort buildIndexSort( - Function fieldTypeLookup, - BiFunction, IndexFieldData> fieldDataLookup + Function fieldTypeLookup, + BiFunction, IndexFieldData> fieldDataLookup ) { if (hasIndexSort() == false) { return null; @@ -214,15 +214,15 @@ public Sort buildIndexSort( final SortField[] sortFields = new SortField[sortSpecs.length]; for (int i = 0; i < sortSpecs.length; i++) { FieldSortSpec sortSpec = sortSpecs[i]; - final MappedFieldType ft = fieldTypeLookup.apply(sortSpec.field); - if (ft == null) { + final MappedField mappedField = fieldTypeLookup.apply(sortSpec.field); + if (mappedField == null) { String err = "unknown index sort field:[" + sortSpec.field + "]"; if (this.indexMode == IndexMode.TIME_SERIES) { err += " required by [" + IndexSettings.MODE.getKey() + "=time_series]"; } throw new IllegalArgumentException(err); } - if (Objects.equals(ft.name(), sortSpec.field) == false) { + if (Objects.equals(mappedField.name(), sortSpec.field) == false) { if (this.indexCreatedVersion.onOrAfter(Version.V_7_13_0)) { throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field"); } else { @@ -234,7 +234,7 @@ public Sort buildIndexSort( + "] defined on field [" + sortSpec.field + "] which resolves to field [" - + ft.name() + + mappedField.name() + "]. " + "You will not be able to define an index sort over aliased fields in new indexes" ); @@ -248,8 +248,12 @@ public Sort buildIndexSort( IndexFieldData fieldData; try { fieldData = fieldDataLookup.apply( - ft, - () -> { throw new UnsupportedOperationException("index sorting not supported on runtime field [" + ft.name() + "]"); } + mappedField, + () -> { + throw new UnsupportedOperationException( + "index sorting not supported on runtime field [" + mappedField.name() + "]" + ); + } ); } catch (Exception e) { throw new IllegalArgumentException("docvalues not found for index sort field:[" + sortSpec.field + "]", e); diff --git a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java index 48d1037df9e6b..e402e9b6f23ec 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -14,7 +14,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardState; @@ -106,18 +106,18 @@ private static class FieldDataWarmer implements IndexWarmer.Listener { @Override public TerminationHandle warmReader(final IndexShard indexShard, final ElasticsearchDirectoryReader reader) { final MapperService mapperService = indexShard.mapperService(); - final Map warmUpGlobalOrdinals = new HashMap<>(); - for (MappedFieldType fieldType : mapperService.getEagerGlobalOrdinalsFields()) { - final String indexName = fieldType.name(); - warmUpGlobalOrdinals.put(indexName, fieldType); + final Map warmUpGlobalOrdinals = new HashMap<>(); + for (MappedField mappedField : mapperService.getEagerGlobalOrdinalsFields()) { + final String indexName = mappedField.name(); + warmUpGlobalOrdinals.put(indexName, mappedField); } final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size()); - for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) { + for (final MappedField mappedField : warmUpGlobalOrdinals.values()) { executor.execute(() -> { try { final long start = System.nanoTime(); IndexFieldData.Global ifd = indexFieldDataService.getForField( - fieldType, + mappedField, indexFieldDataService.index().getName(), () -> { throw new UnsupportedOperationException("search lookup not available when warming an index"); } ); @@ -131,14 +131,14 @@ public TerminationHandle warmReader(final IndexShard indexShard, final Elasticse .logger() .trace( "warmed global ordinals for [{}], took [{}]", - fieldType.name(), + mappedField.name(), TimeValue.timeValueNanos(System.nanoTime() - start) ); } } catch (Exception e) { indexShard.warmerService() .logger() - .warn(() -> "failed to warm-up global ordinals for [" + fieldType.name() + "]", e); + .warn(() -> "failed to warm-up global ordinals for [" + mappedField.name() + "]", e); } finally { latch.countDown(); } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 7836efe3fda27..b809b1b62d883 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; @@ -99,12 +99,12 @@ public synchronized void clearField(final String fieldName) { */ @SuppressWarnings("unchecked") public > IFD getForField( - MappedFieldType fieldType, + MappedField mappedField, String fullyQualifiedIndexName, Supplier searchLookup ) { - final String fieldName = fieldType.name(); - IndexFieldData.Builder builder = fieldType.fielddataBuilder(fullyQualifiedIndexName, searchLookup); + final String fieldName = mappedField.name(); + IndexFieldData.Builder builder = mappedField.fielddataBuilder(fullyQualifiedIndexName, searchLookup); IndexFieldDataCache cache; synchronized (this) { diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index b537c35001f20..21b041377ac21 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.LegacyTypeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.Uid; @@ -81,12 +81,12 @@ public Set getFieldNames() { return requiredFields; } - public final void postProcess(Function fieldTypeLookup) { + public final void postProcess(Function mappedFieldLookup) { for (Map.Entry> entry : fields().entrySet()) { - MappedFieldType fieldType = fieldTypeLookup.apply(entry.getKey()); + MappedField mappedField = mappedFieldLookup.apply(entry.getKey()); List fieldValues = entry.getValue(); for (int i = 0; i < fieldValues.size(); i++) { - fieldValues.set(i, fieldType.valueForDisplay(fieldValues.get(i))); + fieldValues.set(i, mappedField.valueForDisplay(fieldValues.get(i))); } } } diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index febb0decd97bb..2d0702aa3b81e 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -11,7 +11,7 @@ import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Uid; import java.util.List; @@ -20,7 +20,7 @@ * {@linkplain StoredFieldVisitor} that loads a single field value. */ public final class SingleFieldsVisitor extends StoredFieldVisitor { - private final MappedFieldType field; + private final MappedField field; private final List destination; /** @@ -28,7 +28,7 @@ public final class SingleFieldsVisitor extends StoredFieldVisitor { * @param field the name of the field to load * @param destination where to put the field's values */ - public SingleFieldsVisitor(MappedFieldType field, List destination) { + public SingleFieldsVisitor(MappedField field, List destination) { this.field = field; this.destination = destination; } diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index df7d454ace53a..d2b247aca6c00 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -261,7 +261,7 @@ private GetResult innerGetFetch( // put stored fields into result objects if (fieldVisitor.fields().isEmpty() == false) { - fieldVisitor.postProcess(mapperService::fieldType); + fieldVisitor.postProcess(mapperService::mappedField); documentFields = new HashMap<>(); metadataFields = new HashMap<>(); for (Map.Entry> entry : fieldVisitor.fields().entrySet()) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index 04a6d62576073..4465a9413868b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -88,21 +88,20 @@ public abstract static class AbstractGeometryFieldType extends MappedFieldTyp protected final Parser geometryParser; protected AbstractGeometryFieldType( - String name, boolean indexed, boolean stored, boolean hasDocValues, Parser geometryParser, Map meta ) { - super(name, indexed, stored, hasDocValues, TextSearchInfo.NONE, meta); + super(indexed, stored, hasDocValues, TextSearchInfo.NONE, meta); this.geometryParser = geometryParser; } @Override - public final Query termQuery(Object value, SearchExecutionContext context) { + public final Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException( - "Geometry fields do not support exact searching, use dedicated geometry queries instead: [" + name() + "]" + "Geometry fields do not support exact searching, use dedicated geometry queries instead: [" + name + "]" ); } @@ -112,9 +111,9 @@ public final Query termQuery(Object value, SearchExecutionContext context) { protected abstract Function, List> getFormatter(String format); @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { Function, List> formatter = getFormatter(format != null ? format : GeometryFormatterFactory.GEOJSON); - return new ArraySourceValueFetcher(name(), context) { + return new ArraySourceValueFetcher(name, context) { @Override protected Object parseSourceValue(Object value) { final List values = new ArrayList<>(); @@ -131,14 +130,14 @@ protected Object parseSourceValue(Object value) { protected AbstractGeometryFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, Explicit ignoreMalformed, Explicit ignoreZValue, MultiFields multiFields, CopyTo copyTo, Parser parser ) { - super(simpleName, mappedFieldType, multiFields, copyTo, false, null); + super(simpleName, mappedField, multiFields, copyTo, false, null); this.ignoreMalformed = ignoreMalformed; this.ignoreZValue = ignoreZValue; this.parser = parser; @@ -146,13 +145,13 @@ protected AbstractGeometryFieldMapper( protected AbstractGeometryFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Parser parser, String onScriptError ) { - super(simpleName, mappedFieldType, multiFields, copyTo, true, onScriptError); + super(simpleName, mappedField, multiFields, copyTo, true, onScriptError); this.ignoreMalformed = Explicit.EXPLICIT_FALSE; this.ignoreZValue = Explicit.EXPLICIT_FALSE; this.parser = parser; @@ -161,7 +160,7 @@ protected AbstractGeometryFieldMapper( @Override @SuppressWarnings("unchecked") public AbstractGeometryFieldType fieldType() { - return (AbstractGeometryFieldType) mappedFieldType; + return (AbstractGeometryFieldType) mappedField.type(); } @Override @@ -180,15 +179,15 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio public final void parse(DocumentParserContext context) throws IOException { if (hasScript) { throw new MapperParsingException( - "failed to parse field [" + fieldType().name() + "] of type + " + contentType() + "]", + "failed to parse field [" + name() + "] of type + " + contentType() + "]", new IllegalArgumentException("Cannot index data directly into a field with a [script] parameter") ); } parser.parse(context.parser(), v -> index(context, v), e -> { if (ignoreMalformed()) { - context.addIgnoredField(fieldType().name()); + context.addIgnoredField(name()); } else { - throw new MapperParsingException("failed to parse field [" + fieldType().name() + "] of type [" + contentType() + "]", e); + throw new MapperParsingException("failed to parse field [" + name() + "] of type [" + contentType() + "]", e); } }); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java index 571f0d59a9f20..ad88eac786228 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java @@ -36,7 +36,7 @@ public static Parameter nullValueParam( protected AbstractPointGeometryFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, Explicit ignoreMalformed, Explicit ignoreZValue, @@ -44,19 +44,19 @@ protected AbstractPointGeometryFieldMapper( CopyTo copyTo, Parser parser ) { - super(simpleName, mappedFieldType, ignoreMalformed, ignoreZValue, multiFields, copyTo, parser); + super(simpleName, mappedField, ignoreMalformed, ignoreZValue, multiFields, copyTo, parser); this.nullValue = nullValue; } protected AbstractPointGeometryFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Parser parser, String onScriptError ) { - super(simpleName, mappedFieldType, multiFields, copyTo, parser, onScriptError); + super(simpleName, mappedField, multiFields, copyTo, parser, onScriptError); this.nullValue = null; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java index 66d1390ea59ff..50dd5aebf373e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java @@ -31,6 +31,7 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.function.BiFunction; import java.util.function.Function; import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; @@ -41,17 +42,16 @@ abstract class AbstractScriptFieldType extends MappedFieldType { protected final Script script; - private final Function factory; + private final BiFunction factory; private final boolean isResultDeterministic; AbstractScriptFieldType( - String name, - Function factory, + BiFunction factory, Script script, boolean isResultDeterministic, Map meta ) { - super(name, false, false, false, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(false, false, false, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.factory = factory; this.script = Objects.requireNonNull(script); this.isResultDeterministic = isResultDeterministic; @@ -63,12 +63,13 @@ public final boolean isSearchable() { } @Override - public final boolean isAggregatable() { + public final boolean isAggregatable(String name) { return true; } @Override public final Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -80,12 +81,13 @@ public final Query rangeQuery( ) { if (relation == ShapeRelation.DISJOINT) { String message = "Runtime field [%s] of type [%s] does not support DISJOINT ranges"; - throw new IllegalArgumentException(String.format(Locale.ROOT, message, name(), typeName())); + throw new IllegalArgumentException(String.format(Locale.ROOT, message, name, typeName())); } - return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, parser, context); + return rangeQuery(name, lowerTerm, upperTerm, includeLower, includeUpper, timeZone, parser, context); } protected abstract Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -97,6 +99,7 @@ protected abstract Query rangeQuery( @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -104,21 +107,34 @@ public Query fuzzyQuery( boolean transpositions, SearchExecutionContext context ) { - throw new IllegalArgumentException(unsupported("fuzzy", "keyword and text")); + throw new IllegalArgumentException(unsupported(name, "fuzzy", "keyword and text")); } @Override - public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { - throw new IllegalArgumentException(unsupported("prefix", "keyword, text and wildcard")); + public Query prefixQuery( + String name, + String value, + MultiTermQuery.RewriteMethod method, + boolean caseInsensitive, + SearchExecutionContext context + ) { + throw new IllegalArgumentException(unsupported(name, "prefix", "keyword, text and wildcard")); } @Override - public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { - throw new IllegalArgumentException(unsupported("wildcard", "keyword, text and wildcard")); + public Query wildcardQuery( + String name, + String value, + MultiTermQuery.RewriteMethod method, + boolean caseInsensitive, + SearchExecutionContext context + ) { + throw new IllegalArgumentException(unsupported(name, "wildcard", "keyword, text and wildcard")); } @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -126,36 +142,47 @@ public Query regexpQuery( MultiTermQuery.RewriteMethod method, SearchExecutionContext context ) { - throw new IllegalArgumentException(unsupported("regexp", "keyword and text")); + throw new IllegalArgumentException(unsupported(name, "regexp", "keyword and text")); } @Override - public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) { - throw new IllegalArgumentException(unsupported("phrase", "text")); + public Query phraseQuery(String name, TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) { + throw new IllegalArgumentException(unsupported(name, "phrase", "text")); } @Override - public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) { - throw new IllegalArgumentException(unsupported("phrase", "text")); + public Query multiPhraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) { + throw new IllegalArgumentException(unsupported(name, "phrase", "text")); } @Override - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) { - throw new IllegalArgumentException(unsupported("phrase prefix", "text")); + public Query phrasePrefixQuery(String name, TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) { + throw new IllegalArgumentException(unsupported(name, "phrase prefix", "text")); } @Override - public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) { - throw new IllegalArgumentException(unsupported("span prefix", "text")); + public SpanQuery spanPrefixQuery( + String name, + String value, + SpanMultiTermQueryWrapper.SpanRewriteMethod method, + SearchExecutionContext context + ) { + throw new IllegalArgumentException(unsupported(name, "span prefix", "text")); } - private String unsupported(String query, String supported) { + private String unsupported(String name, String query, String supported) { return String.format( Locale.ROOT, "Can only use %s queries on %s fields - not on [%s] which is a runtime field of type [%s]", query, supported, - name(), + name, typeName() ); } @@ -172,35 +199,35 @@ protected final void applyScriptContext(SearchExecutionContext context) { } @Override - public final ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return new DocValueFetcher(docValueFormat(format, null), context.getForField(this)); + public final ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return new DocValueFetcher(docValueFormat(name, format, null), context.getForField(new MappedField(name, this))); } /** * Create a script leaf factory. */ - protected final LeafFactory leafFactory(SearchLookup searchLookup) { - return factory.apply(searchLookup); + protected final LeafFactory leafFactory(String name, SearchLookup searchLookup) { + return factory.apply(name, searchLookup); } /** * Create a script leaf factory for queries. */ - protected final LeafFactory leafFactory(SearchExecutionContext context) { + protected final LeafFactory leafFactory(String name, SearchExecutionContext context) { /* * Forking here causes us to count this field in the field data loop * detection code as though we were resolving field data for this field. * We're not, but running the query is close enough. */ - return leafFactory(context.lookup().forkAndTrackFieldReferences(name())); + return leafFactory(name, context.lookup().forkAndTrackFieldReferences(name)); } @Override - public void validateMatchedRoutingPath() { + public void validateMatchedRoutingPath(String name) { throw new IllegalArgumentException( "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "and without the [script] parameter. [" - + name() + + name + "] was a runtime [" + typeName() + "]." @@ -256,17 +283,17 @@ protected final RuntimeField createChildRuntimeField( String fullName = parent + "." + name; return new LeafRuntimeField( name, - createFieldType(fullName, getCompositeLeafFactory(parentScriptFactory), getScript(), meta()), + new MappedField(fullName, createFieldType(getCompositeLeafFactory(parentScriptFactory), getScript(), meta())), getParameters() ); } final RuntimeField createRuntimeField(Factory scriptFactory) { - AbstractScriptFieldType fieldType = createFieldType(name, scriptFactory, getScript(), meta()); - return new LeafRuntimeField(name, fieldType, getParameters()); + AbstractScriptFieldType fieldType = createFieldType(scriptFactory, getScript(), meta()); + return new LeafRuntimeField(name, new MappedField(name, fieldType), getParameters()); } - abstract AbstractScriptFieldType createFieldType(String name, Factory factory, Script script, Map meta); + abstract AbstractScriptFieldType createFieldType(Factory factory, Script script, Map meta); @Override protected List> getParameters() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java index 814bf8622c33d..ff7c60911626f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java @@ -41,7 +41,6 @@ public abstract static class AbstractShapeGeometryFieldType extends AbstractG private final Orientation orientation; protected AbstractShapeGeometryFieldType( - String name, boolean isSearchable, boolean isStored, boolean hasDocValues, @@ -49,7 +48,7 @@ protected AbstractShapeGeometryFieldType( Orientation orientation, Map meta ) { - super(name, isSearchable, isStored, hasDocValues, parser, meta); + super(isSearchable, isStored, hasDocValues, parser, meta); this.orientation = orientation; } @@ -63,7 +62,7 @@ public Orientation orientation() { protected AbstractShapeGeometryFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, Explicit ignoreMalformed, Explicit coerce, Explicit ignoreZValue, @@ -72,7 +71,7 @@ protected AbstractShapeGeometryFieldMapper( CopyTo copyTo, Parser parser ) { - super(simpleName, mappedFieldType, ignoreMalformed, ignoreZValue, multiFields, copyTo, parser); + super(simpleName, mappedField, ignoreMalformed, ignoreZValue, multiFields, copyTo, parser); this.coerce = coerce; this.orientation = orientation; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index eb6a1d611b11d..8db8db50ec3f5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -66,7 +66,10 @@ public Parameter[] getParameters() { public BinaryFieldMapper build(MapperBuilderContext context) { return new BinaryFieldMapper( name, - new BinaryFieldType(context.buildFullName(name), stored.getValue(), hasDocValues.getValue(), meta.getValue()), + new MappedField( + context.buildFullName(name), + new BinaryFieldType(stored.getValue(), hasDocValues.getValue(), meta.getValue()) + ), multiFieldsBuilder.build(this, context), copyTo.build(), this @@ -78,12 +81,12 @@ public BinaryFieldMapper build(MapperBuilderContext context) { public static final class BinaryFieldType extends MappedFieldType { - private BinaryFieldType(String name, boolean isStored, boolean hasDocValues, Map meta) { - super(name, false, isStored, hasDocValues, TextSearchInfo.NONE, meta); + private BinaryFieldType(boolean isStored, boolean hasDocValues, Map meta) { + super(false, isStored, hasDocValues, TextSearchInfo.NONE, meta); } - public BinaryFieldType(String name) { - this(name, false, true, Collections.emptyMap()); + public BinaryFieldType() { + this(false, true, Collections.emptyMap()); } @Override @@ -92,12 +95,12 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.identity(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.identity(name, context, format); } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { return DocValueFormat.BINARY; } @@ -121,13 +124,13 @@ public BytesReference valueForDisplay(Object value) { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new BytesBinaryIndexFieldData.Builder(name(), CoreValuesSourceType.KEYWORD); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new BytesBinaryIndexFieldData.Builder(name, CoreValuesSourceType.KEYWORD); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException("Binary fields do not support searching"); } } @@ -135,14 +138,8 @@ public Query termQuery(Object value, SearchExecutionContext context) { private final boolean stored; private final boolean hasDocValues; - protected BinaryFieldMapper( - String simpleName, - MappedFieldType mappedFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder - ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + protected BinaryFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo); this.stored = builder.stored.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); } @@ -163,14 +160,14 @@ public void indexValue(DocumentParserContext context, byte[] value) { return; } if (stored) { - context.doc().add(new StoredField(fieldType().name(), value)); + context.doc().add(new StoredField(name(), value)); } if (hasDocValues) { - CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().name()); + CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(name()); if (field == null) { - field = new CustomBinaryDocValuesField(fieldType().name(), value); - context.doc().addWithKey(fieldType().name(), field); + field = new CustomBinaryDocValuesField(name(), value); + context.doc().addWithKey(name(), field); } else { field.add(value); } @@ -178,7 +175,7 @@ public void indexValue(DocumentParserContext context, byte[] value) { // Only add an entry to the field names field if the field is stored // but has no doc values so exists query will work on a field with // no doc values - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 9736898a22d09..fdc1f2d4db7ec 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -116,8 +116,7 @@ protected Parameter[] getParameters() { @Override public BooleanFieldMapper build(MapperBuilderContext context) { - MappedFieldType ft = new BooleanFieldType( - context.buildFullName(name), + BooleanFieldType ft = new BooleanFieldType( indexed.getValue() && indexCreatedVersion.isLegacyIndexVersion() == false, stored.getValue(), docValues.getValue(), @@ -126,7 +125,13 @@ public BooleanFieldMapper build(MapperBuilderContext context) { meta.getValue() ); - return new BooleanFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this); + return new BooleanFieldMapper( + name, + new MappedField(context.buildFullName(name), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), + this + ); } private FieldValues scriptValues() { @@ -155,7 +160,6 @@ public static final class BooleanFieldType extends TermBasedFieldType { private final FieldValues scriptValues; public BooleanFieldType( - String name, boolean isIndexed, boolean isStored, boolean hasDocValues, @@ -163,21 +167,21 @@ public BooleanFieldType( FieldValues scriptValues, Map meta ) { - super(name, isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); + super(isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); this.nullValue = nullValue; this.scriptValues = scriptValues; } - public BooleanFieldType(String name) { - this(name, true); + public BooleanFieldType() { + this(true); } - public BooleanFieldType(String name, boolean isIndexed) { - this(name, isIndexed, true); + public BooleanFieldType(boolean isIndexed) { + this(isIndexed, true); } - public BooleanFieldType(String name, boolean isIndexed, boolean hasDocValues) { - this(name, isIndexed, isIndexed, hasDocValues, false, null, Collections.emptyMap()); + public BooleanFieldType(boolean isIndexed, boolean hasDocValues) { + this(isIndexed, isIndexed, hasDocValues, false, null, Collections.emptyMap()); } @Override @@ -191,14 +195,14 @@ public boolean isSearchable() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } if (this.scriptValues != null) { return FieldValues.valueFetcher(this.scriptValues, context); } - return new SourceValueFetcher(name(), context, nullValue) { + return new SourceValueFetcher(name, context, nullValue) { @Override protected Boolean parseSourceValue(Object value) { if (value instanceof Boolean) { @@ -212,7 +216,7 @@ protected Boolean parseSourceValue(Object value) { } @Override - public BytesRef indexedValueForSearch(Object value) { + public BytesRef indexedValueForSearch(String name, Object value) { if (value == null) { return Values.FALSE; } @@ -232,8 +236,8 @@ public BytesRef indexedValueForSearch(Object value) { }; } - private long docValueForSearch(Object value) { - BytesRef ref = indexedValueForSearch(value); + private long docValueForSearch(String name, Object value) { + BytesRef ref = indexedValueForSearch(name, value); if (Values.TRUE.equals(ref)) { return 1; } else { @@ -254,37 +258,37 @@ public Boolean valueForDisplay(Object value) { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.BOOLEAN, BooleanDocValuesField::new); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new SortedNumericIndexFieldData.Builder(name, NumericType.BOOLEAN, BooleanDocValuesField::new); } @Override - public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { - checkNoFormat(format); - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, @Nullable String format, ZoneId timeZone) { + checkNoFormat(name, format); + checkNoTimeZone(name, timeZone); return DocValueFormat.BOOLEAN; } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.termQuery(value, context); + return super.termQuery(name, value, context); } else { - return SortedNumericDocValuesField.newSlowExactQuery(name(), docValueForSearch(value)); + return SortedNumericDocValuesField.newSlowExactQuery(name, docValueForSearch(name, value)); } } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.termsQuery(values, context); + return super.termsQuery(name, values, context); } else { BooleanQuery.Builder builder = new BooleanQuery.Builder(); for (Object value : values) { - builder.add(termQuery(value, context), BooleanClause.Occur.SHOULD); + builder.add(termQuery(name, value, context), BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(builder.build()); } @@ -292,18 +296,19 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { return new TermRangeQuery( - name(), - lowerTerm == null ? null : indexedValueForSearch(lowerTerm), - upperTerm == null ? null : indexedValueForSearch(upperTerm), + name, + lowerTerm == null ? null : indexedValueForSearch(name, lowerTerm), + upperTerm == null ? null : indexedValueForSearch(name, upperTerm), includeLower, includeUpper ); @@ -311,13 +316,13 @@ public Query rangeQuery( long l = 0; long u = 1; if (lowerTerm != null) { - l = docValueForSearch(lowerTerm); + l = docValueForSearch(name, lowerTerm); if (includeLower == false) { l = Math.max(1, l + 1); } } if (upperTerm != null) { - u = docValueForSearch(upperTerm); + u = docValueForSearch(name, upperTerm); if (includeUpper == false) { l = Math.min(0, l - 1); } @@ -325,7 +330,7 @@ public Query rangeQuery( if (l > u) { return new MatchNoDocsQuery(); } - return SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u); + return SortedNumericDocValuesField.newSlowRangeQuery(name, l, u); } } } @@ -339,14 +344,8 @@ public Query rangeQuery( private final ScriptCompiler scriptCompiler; private final Version indexCreatedVersion; - protected BooleanFieldMapper( - String simpleName, - MappedFieldType mappedFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder - ) { - super(simpleName, mappedFieldType, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.getValue()); + protected BooleanFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.getValue()); this.nullValue = builder.nullValue.getValue(); this.stored = builder.stored.getValue(); this.indexed = builder.indexed.getValue(); @@ -359,7 +358,7 @@ protected BooleanFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override @@ -390,15 +389,15 @@ private void indexValue(DocumentParserContext context, Boolean value) { return; } if (indexed) { - context.doc().add(new Field(fieldType().name(), value ? "T" : "F", Defaults.FIELD_TYPE)); + context.doc().add(new Field(name(), value ? "T" : "F", Defaults.FIELD_TYPE)); } if (stored) { - context.doc().add(new StoredField(fieldType().name(), value ? "T" : "F")); + context.doc().add(new StoredField(name(), value ? "T" : "F")); } if (hasDocValues) { - context.doc().add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0)); + context.doc().add(new SortedNumericDocValuesField(name(), value ? 1 : 0)); } else { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java index 2e837d0f5d54f..cfd07127b968e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java @@ -42,13 +42,8 @@ private static class Builder extends AbstractScriptFieldType.Builder createFieldType( - String name, - BooleanFieldScript.Factory factory, - Script script, - Map meta - ) { - return new BooleanScriptFieldType(name, factory, script, meta); + AbstractScriptFieldType createFieldType(BooleanFieldScript.Factory factory, Script script, Map meta) { + return new BooleanScriptFieldType(factory, script, meta); } @Override @@ -67,10 +62,9 @@ public static RuntimeField sourceOnly(String name) { return new Builder(name).createRuntimeField(BooleanFieldScript.PARSE_FROM_SOURCE); } - BooleanScriptFieldType(String name, BooleanFieldScript.Factory scriptFactory, Script script, Map meta) { + BooleanScriptFieldType(BooleanFieldScript.Factory scriptFactory, Script script, Map meta) { super( - name, - searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup), + (name, searchLookup) -> scriptFactory.newFactory(name, script.getParams(), searchLookup), script, scriptFactory.isResultDeterministic(), meta @@ -95,25 +89,30 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - checkNoFormat(format); - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + checkNoFormat(name, format); + checkNoTimeZone(name, timeZone); return DocValueFormat.BOOLEAN; } @Override - public BooleanScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new BooleanScriptFieldData.Builder(name(), leafFactory(searchLookup.get()), BooleanDocValuesField::new); + public BooleanScriptFieldData.Builder fielddataBuilder( + String name, + String fullyQualifiedIndexName, + Supplier searchLookup + ) { + return new BooleanScriptFieldData.Builder(name, leafFactory(name, searchLookup.get()), BooleanDocValuesField::new); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { applyScriptContext(context); - return new BooleanScriptFieldExistsQuery(script, leafFactory(context), name()); + return new BooleanScriptFieldExistsQuery(script, leafFactory(name, context), name); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -174,23 +173,23 @@ public Query rangeQuery( } } - return termsQuery(trueAllowed, falseAllowed, context); + return termsQuery(name, trueAllowed, falseAllowed, context); } @Override - public Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) { + public Query termQueryCaseInsensitive(String name, Object value, SearchExecutionContext context) { applyScriptContext(context); - return new BooleanScriptFieldTermQuery(script, leafFactory(context.lookup()), name(), toBoolean(value, true)); + return new BooleanScriptFieldTermQuery(script, leafFactory(name, context.lookup()), name, toBoolean(value, true)); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { applyScriptContext(context); - return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), toBoolean(value, false)); + return new BooleanScriptFieldTermQuery(script, leafFactory(name, context), name, toBoolean(value, false)); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { if (values.isEmpty()) { return Queries.newMatchNoDocsQuery("Empty terms query"); } @@ -203,21 +202,21 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { falseAllowed = true; } } - return termsQuery(trueAllowed, falseAllowed, context); + return termsQuery(name, trueAllowed, falseAllowed, context); } - private Query termsQuery(boolean trueAllowed, boolean falseAllowed, SearchExecutionContext context) { + private Query termsQuery(String name, boolean trueAllowed, boolean falseAllowed, SearchExecutionContext context) { if (trueAllowed) { if (falseAllowed) { // Either true or false - return existsQuery(context); + return existsQuery(name, context); } applyScriptContext(context); - return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), true); + return new BooleanScriptFieldTermQuery(script, leafFactory(name, context), name, true); } if (falseAllowed) { applyScriptContext(context); - return new BooleanScriptFieldTermQuery(script, leafFactory(context), name(), false); + return new BooleanScriptFieldTermQuery(script, leafFactory(name, context), name, false); } return new MatchNoDocsQuery("neither true nor false allowed"); } @@ -227,7 +226,7 @@ private static boolean toBoolean(Object value) { } /** - * Convert the term into a boolean. Inspired by {@link BooleanFieldMapper.BooleanFieldType#indexedValueForSearch(Object)}. + * Convert the term into a boolean. Inspired by {@link BooleanFieldMapper.BooleanFieldType#indexedValueForSearch(String, Object)}. */ private static boolean toBoolean(Object value, boolean caseInsensitive) { if (value == null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index fffff331cf44b..584d30a83a2b9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -203,9 +203,15 @@ public CompletionFieldMapper build(MapperBuilderContext context) { new CompletionAnalyzer(this.searchAnalyzer.getValue(), preserveSeparators.getValue(), preservePosInc.getValue()) ); - CompletionFieldType ft = new CompletionFieldType(context.buildFullName(name), completionAnalyzer, meta.getValue()); + CompletionFieldType ft = new CompletionFieldType(completionAnalyzer, meta.getValue()); ft.setContextMappings(contexts.getValue()); - return new CompletionFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this); + return new CompletionFieldMapper( + name, + new MappedField(context.buildFullName(name), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), + this + ); } private void checkCompletionContextsLimit() { @@ -249,8 +255,8 @@ public static final class CompletionFieldType extends TermBasedFieldType { private ContextMappings contextMappings = null; - public CompletionFieldType(String name, NamedAnalyzer searchAnalyzer, Map meta) { - super(name, true, false, false, new TextSearchInfo(Defaults.FIELD_TYPE, null, searchAnalyzer, searchAnalyzer), meta); + public CompletionFieldType(NamedAnalyzer searchAnalyzer, Map meta) { + super(true, false, false, new TextSearchInfo(Defaults.FIELD_TYPE, null, searchAnalyzer, searchAnalyzer), meta); } public void setContextMappings(ContextMappings contextMappings) { @@ -275,24 +281,25 @@ public ContextMappings getContextMappings() { /** * Completion prefix query */ - public CompletionQuery prefixQuery(Object value) { + public CompletionQuery prefixQuery(String name, Object value) { return new PrefixCompletionQuery( getTextSearchInfo().searchAnalyzer().analyzer(), - new Term(name(), indexedValueForSearch(value)) + new Term(name, indexedValueForSearch(name, value)) ); } /** * Completion prefix regular expression query */ - public CompletionQuery regexpQuery(Object value, int flags, int maxDeterminizedStates) { - return new RegexCompletionQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates); + public CompletionQuery regexpQuery(String name, Object value, int flags, int maxDeterminizedStates) { + return new RegexCompletionQuery(new Term(name, indexedValueForSearch(name, value)), flags, maxDeterminizedStates); } /** * Completion prefix fuzzy query */ public CompletionQuery fuzzyQuery( + String name, String value, Fuzziness fuzziness, int nonFuzzyPrefixLength, @@ -303,7 +310,7 @@ public CompletionQuery fuzzyQuery( ) { return new FuzzyCompletionQuery( getTextSearchInfo().searchAnalyzer().analyzer(), - new Term(name(), indexedValueForSearch(value)), + new Term(name, indexedValueForSearch(name, value)), null, fuzziness.asDistance(), transpositions, @@ -320,12 +327,12 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new ArraySourceValueFetcher(name(), context) { + return new ArraySourceValueFetcher(name, context) { @Override protected List parseSourceValue(Object value) { if (value instanceof List) { @@ -344,14 +351,8 @@ protected List parseSourceValue(Object value) { private final NamedAnalyzer indexAnalyzer; - public CompletionFieldMapper( - String simpleName, - MappedFieldType mappedFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder - ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + public CompletionFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo); this.builder = builder; this.maxInputLength = builder.maxInputLength.getValue(); this.indexAnalyzer = builder.buildAnalyzer(); @@ -359,7 +360,7 @@ public CompletionFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), indexAnalyzer); + return Map.of(mappedField.name(), indexAnalyzer); } @Override @@ -414,7 +415,7 @@ public void parse(DocumentParserContext context) throws IOException { for (Map.Entry completionInput : inputMap.entrySet()) { String input = completionInput.getKey(); if (input.trim().isEmpty()) { - context.addIgnoredField(mappedFieldType.name()); + context.addIgnoredField(mappedField.name()); continue; } // truncate input @@ -428,18 +429,18 @@ public void parse(DocumentParserContext context) throws IOException { } CompletionInputMetadata metadata = completionInput.getValue(); if (fieldType().hasContextMappings()) { - fieldType().getContextMappings().addField(context.doc(), fieldType().name(), input, metadata.weight, metadata.contexts); + fieldType().getContextMappings().addField(context.doc(), name(), input, metadata.weight, metadata.contexts); } else { - context.doc().add(new SuggestField(fieldType().name(), input, metadata.weight)); + context.doc().add(new SuggestField(name(), input, metadata.weight)); } } - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); for (CompletionInputMetadata metadata : inputMap.values()) { multiFields.parse( this, context, - () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) + () -> context.switchParser(new MultiFieldParser(metadata, name(), context.parser().getTokenLocation())) ); } } @@ -513,7 +514,7 @@ private void parse( weight = weightValue.intValue(); } else if (Fields.CONTENT_FIELD_NAME_CONTEXTS.equals(currentFieldName)) { if (fieldType().hasContextMappings() == false) { - throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().name() + "]"); + throw new IllegalArgumentException("contexts field is not supported for field: [" + name() + "]"); } ContextMappings contextMappings = fieldType().getContextMappings(); XContentParser.Token currentToken = parser.currentToken(); @@ -595,7 +596,7 @@ protected String contentType() { public void doValidate(MappingLookup mappers) { if (fieldType().hasContextMappings()) { for (ContextMapping contextMapping : fieldType().getContextMappings()) { - contextMapping.validateReferences(builder.indexVersionCreated, s -> mappers.fieldTypesLookup().get(s)); + contextMapping.validateReferences(builder.indexVersionCreated, s -> mappers.mappedFieldsLookup().get(s)); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java b/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java index a8e9b487160de..8ba4016978d73 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java @@ -106,8 +106,8 @@ public String name() { } @Override - public Stream asMappedFieldTypes() { - return subfields.stream().flatMap(RuntimeField::asMappedFieldTypes); + public Stream asMappedFields() { + return subfields.stream().flatMap(RuntimeField::asMappedFields); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java index 1f8e7fc77b072..a86f355786b80 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java @@ -30,13 +30,13 @@ */ public abstract class ConstantFieldType extends MappedFieldType { - public ConstantFieldType(String name, Map meta) { - super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + public ConstantFieldType(Map meta) { + super(true, false, true, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); assert isSearchable(); } @Override - public final boolean isAggregatable() { + public final boolean isAggregatable(String name) { return true; } @@ -51,7 +51,7 @@ private static String valueToString(Object value) { } @Override - public final Query termQuery(Object value, SearchExecutionContext context) { + public final Query termQuery(String name, Object value, SearchExecutionContext context) { String pattern = valueToString(value); if (matches(pattern, false, context)) { return Queries.newMatchAllQuery(); @@ -61,7 +61,7 @@ public final Query termQuery(Object value, SearchExecutionContext context) { } @Override - public final Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) { + public final Query termQueryCaseInsensitive(String name, Object value, SearchExecutionContext context) { String pattern = valueToString(value); if (matches(pattern, true, context)) { return Queries.newMatchAllQuery(); @@ -71,7 +71,7 @@ public final Query termQueryCaseInsensitive(Object value, SearchExecutionContext } @Override - public final Query termsQuery(Collection values, SearchExecutionContext context) { + public final Query termsQuery(String name, Collection values, SearchExecutionContext context) { for (Object value : values) { String pattern = valueToString(value); if (matches(pattern, false, context)) { @@ -84,6 +84,7 @@ public final Query termsQuery(Collection values, SearchExecutionContext conte @Override public final Query prefixQuery( + String name, String prefix, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitive, @@ -99,6 +100,7 @@ public final Query prefixQuery( @Override public final Query wildcardQuery( + String name, String value, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitive, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java index 5766cf2bb71dd..ac89ae9c23278 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DataStreamTimestampFieldMapper.java @@ -47,7 +47,7 @@ public static final class TimestampFieldType extends MappedFieldType { static final TimestampFieldType INSTANCE = new TimestampFieldType(); private TimestampFieldType() { - super(NAME, false, false, false, TextSearchInfo.NONE, Map.of()); + super(false, false, false, TextSearchInfo.NONE, Map.of()); } @Override @@ -56,17 +56,17 @@ public String typeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support term queries"); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support term queries"); } @Override - public Query existsQuery(SearchExecutionContext context) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support exists queries"); + public Query existsQuery(String name, SearchExecutionContext context) { + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support exists queries"); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } } @@ -101,7 +101,7 @@ public MetadataFieldMapper build() { private final boolean enabled; private DataStreamTimestampFieldMapper(boolean enabled) { - super(TimestampFieldType.INSTANCE); + super(new MappedField(NAME, TimestampFieldType.INSTANCE)); this.enabled = enabled; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index a7cadc7e40a8b..ced83162630fe 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -338,7 +338,6 @@ private Long parseNullValue(DateFieldType fieldType) { @Override public DateFieldMapper build(MapperBuilderContext context) { DateFieldType ft = new DateFieldType( - context.buildFullName(name()), index.getValue() && indexCreatedVersion.isLegacyIndexVersion() == false, index.getValue(), store.getValue(), @@ -351,7 +350,15 @@ public DateFieldMapper build(MapperBuilderContext context) { ); Long nullTimestamp = parseNullValue(ft); - return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), nullTimestamp, resolution, this); + return new DateFieldMapper( + name, + new MappedField(context.buildFullName(name()), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), + nullTimestamp, + resolution, + this + ); } } @@ -390,7 +397,6 @@ public static final class DateFieldType extends MappedFieldType { private final boolean pointsMetadataAvailable; public DateFieldType( - String name, boolean isIndexed, boolean pointsMetadataAvailable, boolean isStored, @@ -401,7 +407,7 @@ public DateFieldType( FieldValues scriptValues, Map meta ) { - super(name, isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.dateTimeFormatter = dateTimeFormatter; this.dateMathParser = dateTimeFormatter.toDateMathParser(); this.resolution = resolution; @@ -411,7 +417,6 @@ public DateFieldType( } public DateFieldType( - String name, boolean isIndexed, boolean isStored, boolean hasDocValues, @@ -421,16 +426,15 @@ public DateFieldType( FieldValues scriptValues, Map meta ) { - this(name, isIndexed, isIndexed, isStored, hasDocValues, dateTimeFormatter, resolution, nullValue, scriptValues, meta); + this(isIndexed, isIndexed, isStored, hasDocValues, dateTimeFormatter, resolution, nullValue, scriptValues, meta); } - public DateFieldType(String name) { - this(name, true, true, false, true, DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, null, null, Collections.emptyMap()); + public DateFieldType() { + this(true, true, false, true, DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, null, null, Collections.emptyMap()); } - public DateFieldType(String name, boolean isIndexed) { + public DateFieldType(boolean isIndexed) { this( - name, isIndexed, isIndexed, false, @@ -443,16 +447,16 @@ public DateFieldType(String name, boolean isIndexed) { ); } - public DateFieldType(String name, DateFormatter dateFormatter) { - this(name, true, true, false, true, dateFormatter, Resolution.MILLISECONDS, null, null, Collections.emptyMap()); + public DateFieldType(DateFormatter dateFormatter) { + this(true, true, false, true, dateFormatter, Resolution.MILLISECONDS, null, null, Collections.emptyMap()); } - public DateFieldType(String name, Resolution resolution) { - this(name, true, true, false, true, DEFAULT_DATE_TIME_FORMATTER, resolution, null, null, Collections.emptyMap()); + public DateFieldType(Resolution resolution) { + this(true, true, false, true, DEFAULT_DATE_TIME_FORMATTER, resolution, null, null, Collections.emptyMap()); } - public DateFieldType(String name, Resolution resolution, DateFormatter dateFormatter) { - this(name, true, true, false, true, dateFormatter, resolution, null, null, Collections.emptyMap()); + public DateFieldType(Resolution resolution, DateFormatter dateFormatter) { + this(true, true, false, true, dateFormatter, resolution, null, null, Collections.emptyMap()); } @Override @@ -496,12 +500,12 @@ public long parse(String value) { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(this.name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { DateFormatter defaultFormatter = dateTimeFormatter(); DateFormatter formatter = format != null ? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale()) @@ -509,7 +513,7 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) if (scriptValues != null) { return FieldValues.valueFetcher(scriptValues, v -> format((long) v, formatter), context); } - return new SourceValueFetcher(name(), context, nullValue) { + return new SourceValueFetcher(name, context, nullValue) { @Override public String parseSourceValue(Object value) { String date = value instanceof Number ? NUMBER_FORMAT.format(value) : value.toString(); @@ -530,12 +534,13 @@ public boolean isSearchable() { } @Override - public Query termQuery(Object value, @Nullable SearchExecutionContext context) { - return rangeQuery(value, value, true, true, ShapeRelation.INTERSECTS, null, null, context); + public Query termQuery(String name, Object value, @Nullable SearchExecutionContext context) { + return rangeQuery(name, value, value, true, true, ShapeRelation.INTERSECTS, null, null, context); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -545,9 +550,9 @@ public Query rangeQuery( @Nullable DateMathParser forcedDateParser, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); if (relation == ShapeRelation.DISJOINT) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support DISJOINT ranges"); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support DISJOINT ranges"); } DateMathParser parser; if (forcedDateParser == null) { @@ -563,16 +568,16 @@ public Query rangeQuery( return dateRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, parser, context, resolution, (l, u) -> { Query query; if (isIndexed()) { - query = LongPoint.newRangeQuery(name(), l, u); + query = LongPoint.newRangeQuery(name, l, u); if (hasDocValues()) { - Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u); + Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(name, l, u); query = new IndexOrDocValuesQuery(query, dvQuery); } } else { - query = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u); + query = SortedNumericDocValuesField.newSlowRangeQuery(name, l, u); } - if (hasDocValues() && context.indexSortedOnField(name())) { - query = new IndexSortSortedNumericDocValuesRangeQuery(name(), l, u, query); + if (hasDocValues() && context.indexSortedOnField(name)) { + query = new IndexSortSortedNumericDocValuesRangeQuery(name, l, u, query); } return query; }); @@ -644,19 +649,19 @@ public static long parseToLong( } @Override - public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query distanceFeatureQuery(String name, Object origin, String pivot, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); long originLong = parseToLong(origin, true, null, null, context::nowInMillis); TimeValue pivotTime = TimeValue.parseTimeValue(pivot, "distance_feature.pivot"); long pivotLong = resolution.convert(pivotTime); // As we already apply boost in AbstractQueryBuilder::toQuery, we always passing a boost of 1.0 to distanceFeatureQuery if (isIndexed()) { - return LongPoint.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); + return LongPoint.newDistanceFeatureQuery(name, 1.0f, originLong, pivotLong); } else { return new LongScriptFieldDistanceFeatureQuery( new Script(""), - ctx -> new SortedNumericDocValuesLongFieldScript(name(), context.lookup(), ctx), - name(), + ctx -> new SortedNumericDocValuesLongFieldScript(name, context.lookup(), ctx), + name, originLong, pivotLong ); @@ -665,6 +670,7 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo @Override public Relation isFieldWithinQuery( + String name, IndexReader reader, Object from, Object to, @@ -678,13 +684,13 @@ public Relation isFieldWithinQuery( // we don't have a quick way to run this check on doc values, so fall back to default assuming we are within bounds return Relation.INTERSECTS; } - byte[] minPackedValue = PointValues.getMinPackedValue(reader, name()); + byte[] minPackedValue = PointValues.getMinPackedValue(reader, name); if (minPackedValue == null) { // no points, so nothing matches return Relation.DISJOINT; } long minValue = LongPoint.decodeDimension(minPackedValue, 0); - long maxValue = LongPoint.decodeDimension(PointValues.getMaxPackedValue(reader, name()), 0); + long maxValue = LongPoint.decodeDimension(PointValues.getMaxPackedValue(reader, name), 0); return isFieldWithinQuery(minValue, maxValue, from, to, includeLower, includeUpper, timeZone, dateParser, context); } @@ -749,9 +755,9 @@ public Function pointReaderIfPossible() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), resolution.numericType(), resolution.getDefaultToScriptFieldFactory()); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new SortedNumericIndexFieldData.Builder(name, resolution.numericType(), resolution.getDefaultToScriptFieldFactory()); } @Override @@ -764,7 +770,7 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, @Nullable String format, ZoneId timeZone) { DateFormatter dateTimeFormatter = this.dateTimeFormatter; if (format != null) { dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale()); @@ -798,14 +804,14 @@ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { private DateFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Long nullValue, Resolution resolution, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.get()); + super(simpleName, mappedField, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.get()); this.store = builder.store.getValue(); this.indexed = builder.index.getValue(); this.hasDocValues = builder.docValues.getValue(); @@ -852,7 +858,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio timestamp = fieldType().parse(dateAsString); } catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) { if (ignoreMalformed) { - context.addIgnoredField(mappedFieldType.name()); + context.addIgnoredField(mappedField.name()); return; } else { throw e; @@ -865,15 +871,15 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio private void indexValue(DocumentParserContext context, long timestamp) { if (indexed) { - context.doc().add(new LongPoint(fieldType().name(), timestamp)); + context.doc().add(new LongPoint(name(), timestamp)); } if (hasDocValues) { - context.doc().add(new SortedNumericDocValuesField(fieldType().name(), timestamp)); + context.doc().add(new SortedNumericDocValuesField(name(), timestamp)); } else if (store || indexed) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } if (store) { - context.doc().add(new StoredField(fieldType().name(), timestamp)); + context.doc().add(new StoredField(name(), timestamp)); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java index 7bd9ce529fe59..599eee6907b80 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java @@ -87,11 +87,11 @@ protected List> getParameters() { } @Override - AbstractScriptFieldType createFieldType(String name, DateFieldScript.Factory factory, Script script, Map meta) { + AbstractScriptFieldType createFieldType(DateFieldScript.Factory factory, Script script, Map meta) { String pattern = format.getValue() == null ? DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern() : format.getValue(); Locale locale = this.locale.getValue() == null ? Locale.ROOT : this.locale.getValue(); DateFormatter dateTimeFormatter = DateFormatter.forPattern(pattern).withLocale(locale); - return new DateScriptFieldType(name, factory, dateTimeFormatter, script, meta); + return new DateScriptFieldType(factory, dateTimeFormatter, script, meta); } @Override @@ -114,16 +114,9 @@ public static RuntimeField sourceOnly(String name, DateFormatter dateTimeFormatt private final DateFormatter dateTimeFormatter; private final DateMathParser dateMathParser; - DateScriptFieldType( - String name, - DateFieldScript.Factory scriptFactory, - DateFormatter dateTimeFormatter, - Script script, - Map meta - ) { + DateScriptFieldType(DateFieldScript.Factory scriptFactory, DateFormatter dateTimeFormatter, Script script, Map meta) { super( - name, - searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup, dateTimeFormatter), + (name, searchLookup) -> scriptFactory.newFactory(name, script.getParams(), searchLookup, dateTimeFormatter), script, scriptFactory.isResultDeterministic(), meta @@ -147,7 +140,7 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, @Nullable String format, ZoneId timeZone) { DateFormatter dateTimeFormatter = this.dateTimeFormatter; if (format != null) { dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale()); @@ -159,12 +152,16 @@ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { } @Override - public DateScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier lookup) { - return new DateScriptFieldData.Builder(name(), leafFactory(lookup.get()), Resolution.MILLISECONDS.getDefaultToScriptFieldFactory()); + public DateScriptFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier lookup) { + return new DateScriptFieldData.Builder( + name, + leafFactory(name, lookup.get()), + Resolution.MILLISECONDS.getDefaultToScriptFieldFactory() + ); } @Override - public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { + public Query distanceFeatureQuery(String name, Object origin, String pivot, SearchExecutionContext context) { applyScriptContext(context); return DateFieldType.handleNow(context, now -> { long originLong = DateFieldType.parseToLong( @@ -178,8 +175,8 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo TimeValue pivotTime = TimeValue.parseTimeValue(pivot, "distance_feature.pivot"); return new LongScriptFieldDistanceFeatureQuery( script, - leafFactory(context)::newInstance, - name(), + leafFactory(name, context)::newInstance, + name, originLong, pivotTime.getMillis() ); @@ -187,13 +184,14 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { applyScriptContext(context); - return new LongScriptFieldExistsQuery(script, leafFactory(context)::newInstance, name()); + return new LongScriptFieldExistsQuery(script, leafFactory(name, context)::newInstance, name); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -213,21 +211,21 @@ public Query rangeQuery( parser, context, DateFieldMapper.Resolution.MILLISECONDS, - (l, u) -> new LongScriptFieldRangeQuery(script, leafFactory(context)::newInstance, name(), l, u) + (l, u) -> new LongScriptFieldRangeQuery(script, leafFactory(name, context)::newInstance, name, l, u) ); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { return DateFieldType.handleNow(context, now -> { long l = DateFieldType.parseToLong(value, false, null, this.dateMathParser, now, DateFieldMapper.Resolution.MILLISECONDS); applyScriptContext(context); - return new LongScriptFieldTermQuery(script, leafFactory(context)::newInstance, name(), l); + return new LongScriptFieldTermQuery(script, leafFactory(name, context)::newInstance, name, l); }); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { if (values.isEmpty()) { return Queries.newMatchAllQuery(); } @@ -237,7 +235,7 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { terms.add(DateFieldType.parseToLong(value, false, null, this.dateMathParser, now, DateFieldMapper.Resolution.MILLISECONDS)); } applyScriptContext(context); - return new LongScriptFieldTermsQuery(script, leafFactory(context)::newInstance, name(), terms); + return new LongScriptFieldTermsQuery(script, leafFactory(name, context)::newInstance, name, terms); }); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java index 907a74c0ad259..02ba3f0e7f7c5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java @@ -34,7 +34,7 @@ public static final class DocCountFieldType extends MappedFieldType { public static final int DEFAULT_VALUE = 1; public DocCountFieldType() { - super(NAME, false, false, false, TextSearchInfo.NONE, Collections.emptyMap()); + super(false, false, false, TextSearchInfo.NONE, Collections.emptyMap()); } @Override @@ -48,22 +48,22 @@ public String familyTypeName() { } @Override - public Query existsQuery(SearchExecutionContext context) { - throw new QueryShardException(context, "Field [" + name() + "] of type [" + typeName() + "] does not support exists queries"); + public Query existsQuery(String name, SearchExecutionContext context) { + throw new QueryShardException(context, "Field [" + name + "] of type [" + typeName() + "] does not support exists queries"); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new QueryShardException(context, "Field [" + name() + "] of type [" + typeName() + "] is not searchable"); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new QueryShardException(context, "Field [" + name + "] of type [" + typeName() + "] is not searchable"); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new SourceValueFetcher(name(), context, DEFAULT_VALUE) { + return new SourceValueFetcher(name, context, DEFAULT_VALUE) { @Override protected Object parseSourceValue(Object value) { if ("".equals(value)) { @@ -77,7 +77,7 @@ protected Object parseSourceValue(Object value) { } private DocCountFieldMapper() { - super(DocCountFieldType.INSTANCE); + super(new MappedField(NAME, DocCountFieldType.INSTANCE)); } @Override @@ -87,14 +87,12 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio // Check that _doc_count is a single value and not an array if (context.doc().getByKey(NAME) != null) { - throw new IllegalArgumentException("Arrays are not allowed for field [" + fieldType().name() + "]."); + throw new IllegalArgumentException("Arrays are not allowed for field [" + name() + "]."); } int value = parser.intValue(false); if (value <= 0) { - throw new IllegalArgumentException( - "Field [" + fieldType().name() + "] must be a positive integer. Value [" + value + "] is not allowed." - ); + throw new IllegalArgumentException("Field [" + name() + "] must be a positive integer. Value [" + value + "] is not allowed."); } context.doc().addWithKey(NAME, field(value)); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index e0aa3a4715219..a376ccd0a87fc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -106,7 +106,7 @@ public void validate(IndexSettings settings, boolean checkLimits) { List routingPaths = settings.getIndexMetadata().getRoutingPaths(); for (String path : routingPaths) { for (String match : mappingLookup.getMatchingFieldNames(path)) { - mappingLookup.getFieldType(match).validateMatchedRoutingPath(); + mappingLookup.getMappedField(match).type().validateMatchedRoutingPath(match); } for (String objectName : mappingLookup.objectMappers().keySet()) { // object type is not allowed in the routing paths diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 067061443d885..79ee8e9ac9b5b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -741,10 +741,10 @@ private static Mapper getLeafMapper(final DocumentParserContext context, ObjectM // if a leaf field is not mapped, and is defined as a runtime field, then we // don't create a dynamic mapping for it and don't index it. String fieldPath = context.path().pathAsText(fieldName); - MappedFieldType fieldType = context.mappingLookup().getFieldType(fieldPath); - if (fieldType != null) { + MappedField mappedField = context.mappingLookup().getMappedField(fieldPath); + if (mappedField != null) { // we haven't found a mapper with this name above, which means if a field type is found it is for sure a runtime field. - assert fieldType.hasDocValues() == false && fieldType.isAggregatable() && fieldType.isSearchable(); + assert mappedField.hasDocValues() == false && mappedField.isAggregatable() && mappedField.isSearchable(); return NO_OP_FIELDMAPPER; } return null; @@ -752,9 +752,9 @@ private static Mapper getLeafMapper(final DocumentParserContext context, ObjectM private static final FieldMapper NO_OP_FIELDMAPPER = new FieldMapper( "no-op", - new MappedFieldType("no-op", false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) { + new MappedField("no-op", new MappedFieldType(false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) { @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } @@ -764,10 +764,10 @@ public String typeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new UnsupportedOperationException(); } - }, + }), FieldMapper.MultiFields.empty(), FieldMapper.CopyTo.empty() ) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java index a542a299df73e..ed38268734613 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java @@ -43,13 +43,8 @@ private static class Builder extends AbstractScriptFieldType.Builder createFieldType( - String name, - DoubleFieldScript.Factory factory, - Script script, - Map meta - ) { - return new DoubleScriptFieldType(name, factory, script, meta); + AbstractScriptFieldType createFieldType(DoubleFieldScript.Factory factory, Script script, Map meta) { + return new DoubleScriptFieldType(factory, script, meta); } @Override @@ -67,10 +62,9 @@ public static RuntimeField sourceOnly(String name) { return new Builder(name).createRuntimeField(DoubleFieldScript.PARSE_FROM_SOURCE); } - DoubleScriptFieldType(String name, DoubleFieldScript.Factory scriptFactory, Script script, Map meta) { + DoubleScriptFieldType(DoubleFieldScript.Factory scriptFactory, Script script, Map meta) { super( - name, - searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup), + (name, searchLookup) -> scriptFactory.newFactory(name, script.getParams(), searchLookup), script, scriptFactory.isResultDeterministic(), meta @@ -88,8 +82,8 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + checkNoTimeZone(name, timeZone); if (format == null) { return DocValueFormat.RAW; } @@ -97,18 +91,23 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) { } @Override - public DoubleScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new DoubleScriptFieldData.Builder(name(), leafFactory(searchLookup.get()), DoubleDocValuesField::new); + public DoubleScriptFieldData.Builder fielddataBuilder( + String name, + String fullyQualifiedIndexName, + Supplier searchLookup + ) { + return new DoubleScriptFieldData.Builder(name, leafFactory(name, searchLookup.get()), DoubleDocValuesField::new); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { applyScriptContext(context); - return new DoubleScriptFieldExistsQuery(script, leafFactory(context), name()); + return new DoubleScriptFieldExistsQuery(script, leafFactory(name, context), name); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -123,18 +122,18 @@ public Query rangeQuery( upperTerm, includeLower, includeUpper, - (l, u) -> new DoubleScriptFieldRangeQuery(script, leafFactory(context), name(), l, u) + (l, u) -> new DoubleScriptFieldRangeQuery(script, leafFactory(name, context), name, l, u) ); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { applyScriptContext(context); - return new DoubleScriptFieldTermQuery(script, leafFactory(context), name(), NumberType.objectToDouble(value)); + return new DoubleScriptFieldTermQuery(script, leafFactory(name, context), name, NumberType.objectToDouble(value)); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { if (values.isEmpty()) { return Queries.newMatchAllQuery(); } @@ -143,6 +142,6 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { terms.add(Double.doubleToLongBits(NumberType.objectToDouble(value))); } applyScriptContext(context); - return new DoubleScriptFieldTermsQuery(script, leafFactory(context), name(), terms); + return new DoubleScriptFieldTermsQuery(script, leafFactory(name, context), name, terms); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicMappedField.java similarity index 70% rename from server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldType.java rename to server/src/main/java/org/elasticsearch/index/mapper/DynamicMappedField.java index 7ebd539d8ceba..ee5644a770d12 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicMappedField.java @@ -9,14 +9,14 @@ package org.elasticsearch.index.mapper; /** - * Defines a MappedFieldType that exposes dynamic child field types + * Defines a MappedField that exposes dynamic child field types * * If the field is named 'my_field', then a user is able to search on * the field in both of the following ways: * - Using the field name 'my_field', which will delegate to the field type * as usual. * - Using any sub-key, for example 'my_field.some_key'. In this case, the - * search is delegated to {@link #getChildFieldType(String)}, with 'some_key' + * search is delegated to {@link #getChildField(String)}, with 'some_key' * passed as the argument. The field may create a new field type dynamically * in order to handle the search. * @@ -24,10 +24,14 @@ * field mappers generating field types that implement this interface should * explicitly disallow multi-fields. */ -public interface DynamicFieldType { +public abstract class DynamicMappedField extends MappedField { + + public DynamicMappedField(String name, MappedFieldType type) { + super(name, type); + } /** - * Returns a dynamic MappedFieldType for the given path + * Returns a dynamic MappedField for the given path */ - MappedFieldType getChildFieldType(String path); + public abstract MappedField getChildField(String path); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java index 0fc712badf14d..098d05936e218 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java @@ -81,7 +81,7 @@ public void validate(MappingLookup mappers) { "Invalid [path] value [" + path + "] for field alias [" + name() + "]: an alias cannot refer to itself." ); } - if (mappers.fieldTypesLookup().get(path) == null) { + if (mappers.mappedFieldsLookup().get(path) == null) { throw new MapperParsingException( "Invalid [path] value [" + path diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index acf5053fd6a4e..4ab0d3798ee63 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -66,7 +66,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { @SuppressWarnings("rawtypes") static final Parameter[] EMPTY_PARAMETERS = new Parameter[0]; - protected final MappedFieldType mappedFieldType; + protected final MappedField mappedField; protected final MultiFields multiFields; protected final CopyTo copyTo; protected final boolean hasScript; @@ -74,17 +74,17 @@ public abstract class FieldMapper extends Mapper implements Cloneable { /** * @param simpleName the leaf name of the mapper - * @param mappedFieldType the MappedFieldType associated with this mapper + * @param mappedField the MappedFieldType associated with this mapper * @param multiFields sub fields of this mapper * @param copyTo copyTo fields of this mapper */ - protected FieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo) { - this(simpleName, mappedFieldType, multiFields, copyTo, false, null); + protected FieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo) { + this(simpleName, mappedField, multiFields, copyTo, false, null); } /** * @param simpleName the leaf name of the mapper - * @param mappedFieldType the MappedFieldType associated with this mapper + * @param mappedField the MappedField associated with this mapper * @param multiFields sub fields of this mapper * @param copyTo copyTo fields of this mapper * @param hasScript whether a script is defined for the field @@ -92,17 +92,17 @@ protected FieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiF */ protected FieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, boolean hasScript, String onScriptError ) { super(simpleName); - if (mappedFieldType.name().isEmpty()) { + if (mappedField.name().isEmpty()) { throw new IllegalArgumentException("name cannot be empty string"); } - this.mappedFieldType = mappedFieldType; + this.mappedField = mappedField; this.multiFields = multiFields; this.copyTo = Objects.requireNonNull(copyTo); this.hasScript = hasScript; @@ -111,16 +111,20 @@ protected FieldMapper( @Override public String name() { - return fieldType().name(); + return mappedField.name(); } @Override public String typeName() { - return mappedFieldType.typeName(); + return mappedField.typeName(); } public MappedFieldType fieldType() { - return mappedFieldType; + return mappedField.type(); + } + + public final MappedField field() { + return mappedField; } /** @@ -189,7 +193,7 @@ private void rethrowAsMapperParsingException(DocumentParserContext context, Exce throw new MapperParsingException( "failed to parse field [{}] of type [{}] in {}. Could not parse field value preview,", e, - fieldType().name(), + mappedField.name(), fieldType().typeName(), context.documentDescription() ); @@ -198,7 +202,7 @@ private void rethrowAsMapperParsingException(DocumentParserContext context, Exce throw new MapperParsingException( "failed to parse field [{}] of type [{}] in {}. Preview of field's value: '{}'", e, - fieldType().name(), + mappedField.name(), fieldType().typeName(), context.documentDescription(), valuePreview diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index 3d841977d3b0b..72b221ba3f67b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -127,7 +127,7 @@ public static FieldNamesFieldType get(boolean enabled) { } private FieldNamesFieldType(boolean enabled) { - super(Defaults.NAME, true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + super(true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); this.enabled = enabled; } @@ -141,17 +141,17 @@ public boolean isEnabled() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name + "]."); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { throw new UnsupportedOperationException("Cannot run exists query on _field_names"); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { if (isEnabled() == false) { throw new IllegalStateException("Cannot run [exists] queries if the [_field_names] field is disabled"); } @@ -160,7 +160,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { "terms_query_on_field_names", "terms query on the _field_names field is deprecated and will be removed, use exists query instead" ); - return super.termQuery(value, context); + return super.termQuery(name, value, context); } } @@ -168,7 +168,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { private final boolean createdOnOrAfterV8; private FieldNamesFieldMapper(Explicit enabled, boolean createdOnOrAfterV8) { - super(FieldNamesFieldType.get(enabled.value())); + super(new MappedField(NAME, FieldNamesFieldType.get(enabled.value()))); this.enabled = enabled; this.createdOnOrAfterV8 = createdOnOrAfterV8; } @@ -187,8 +187,8 @@ public void addFieldNames(DocumentParserContext context, String field) { } private static boolean noDocValues(String field, DocumentParserContext context) { - MappedFieldType ft = context.mappingLookup().getFieldType(field); - return ft == null || ft.hasDocValues() == false; + MappedField mappedField = context.mappingLookup().getMappedField(field); + return mappedField == null || mappedField.hasDocValues() == false; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index d34a7edf67e22..aaa41a38cd347 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -156,7 +156,6 @@ public FieldMapper build(MapperBuilderContext context) { ignoreMalformed.get().value() ); GeoPointFieldType ft = new GeoPointFieldType( - context.buildFullName(name), indexed.get() && indexCreatedVersion.isLegacyIndexVersion() == false, stored.get(), hasDocValues.get(), @@ -164,10 +163,11 @@ public FieldMapper build(MapperBuilderContext context) { scriptValues(), meta.get() ); + MappedField mappedField = new MappedField(context.buildFullName(name), ft); if (this.script.get() == null) { - return new GeoPointFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), geoParser, this); + return new GeoPointFieldMapper(name, mappedField, multiFieldsBuilder.build(this, context), copyTo.build(), geoParser, this); } - return new GeoPointFieldMapper(name, ft, geoParser, this); + return new GeoPointFieldMapper(name, mappedField, geoParser, this); } } @@ -185,7 +185,7 @@ public FieldMapper build(MapperBuilderContext context) { public GeoPointFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Parser parser, @@ -193,7 +193,7 @@ public GeoPointFieldMapper( ) { super( simpleName, - mappedFieldType, + mappedField, multiFields, builder.ignoreMalformed.get(), builder.ignoreZValue.get(), @@ -206,8 +206,8 @@ public GeoPointFieldMapper( this.indexCreatedVersion = builder.indexCreatedVersion; } - public GeoPointFieldMapper(String simpleName, MappedFieldType mappedFieldType, Parser parser, Builder builder) { - super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty(), parser, builder.onScriptError.get()); + public GeoPointFieldMapper(String simpleName, MappedField mappedField, Parser parser, Builder builder) { + super(simpleName, mappedField, MultiFields.empty(), CopyTo.empty(), parser, builder.onScriptError.get()); this.builder = builder; this.scriptValues = builder.scriptValues(); this.indexCreatedVersion = builder.indexCreatedVersion; @@ -222,15 +222,15 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected void index(DocumentParserContext context, GeoPoint geometry) throws IOException { if (fieldType().isIndexed()) { - context.doc().add(new LatLonPoint(fieldType().name(), geometry.lat(), geometry.lon())); + context.doc().add(new LatLonPoint(name(), geometry.lat(), geometry.lon())); } if (fieldType().hasDocValues()) { - context.doc().add(new LatLonDocValuesField(fieldType().name(), geometry.lat(), geometry.lon())); + context.doc().add(new LatLonDocValuesField(name(), geometry.lat(), geometry.lon())); } else if (fieldType().isStored() || fieldType().isIndexed()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } if (fieldType().isStored()) { - context.doc().add(new StoredField(fieldType().name(), geometry.toString())); + context.doc().add(new StoredField(name(), geometry.toString())); } // TODO phase out geohash (which is currently used in the CompletionSuggester) // we only expose the geohash value and disallow advancing tokens, hence we can reuse the same parser throughout multiple sub-fields @@ -297,7 +297,6 @@ public static class GeoPointFieldType extends AbstractGeometryFieldType scriptValues; private GeoPointFieldType( - String name, boolean indexed, boolean stored, boolean hasDocValues, @@ -305,13 +304,13 @@ private GeoPointFieldType( FieldValues scriptValues, Map meta ) { - super(name, indexed, stored, hasDocValues, parser, meta); + super(indexed, stored, hasDocValues, parser, meta); this.scriptValues = scriptValues; } // only used in test - public GeoPointFieldType(String name) { - this(name, true, false, true, null, null, Collections.emptyMap()); + public GeoPointFieldType() { + this(true, false, true, null, null, Collections.emptyMap()); } @Override @@ -330,17 +329,23 @@ protected Function, List> getFormatter(String format) { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (scriptValues == null) { - return super.valueFetcher(context, format); + return super.valueFetcher(name, context, format); } Function, List> formatter = getFormatter(format != null ? format : GeometryFormatterFactory.GEOJSON); return FieldValues.valueListFetcher(scriptValues, formatter, context); } @Override - public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { - failIfNotIndexedNorDocValuesFallback(context); + public Query geoShapeQuery( + String name, + SearchExecutionContext context, + String fieldName, + ShapeRelation relation, + LatLonGeometry... geometries + ) { + failIfNotIndexedNorDocValuesFallback(name, context); final ShapeField.QueryRelation luceneRelation; if (relation == ShapeRelation.INTERSECTS && isPointGeometry(geometries)) { // For point queries and intersects, lucene does not match points that are encoded @@ -368,14 +373,14 @@ private boolean isPointGeometry(LatLonGeometry[] geometries) { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new AbstractLatLonPointIndexFieldData.Builder(name(), CoreValuesSourceType.GEOPOINT, GeoPointDocValuesField::new); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new AbstractLatLonPointIndexFieldData.Builder(name, CoreValuesSourceType.GEOPOINT, GeoPointDocValuesField::new); } @Override - public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query distanceFeatureQuery(String name, Object origin, String pivot, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); GeoPoint originGeoPoint; if (origin instanceof GeoPoint) { originGeoPoint = (GeoPoint) origin; @@ -392,12 +397,12 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo double pivotDouble = DistanceUnit.DEFAULT.parse(pivot, DistanceUnit.DEFAULT); if (isIndexed()) { // As we already apply boost in AbstractQueryBuilder::toQuery, we always passing a boost of 1.0 to distanceFeatureQuery - return LatLonPoint.newDistanceFeatureQuery(name(), 1.0f, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble); + return LatLonPoint.newDistanceFeatureQuery(name, 1.0f, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble); } else { return new GeoPointScriptFieldDistanceFeatureQuery( new Script(""), - ctx -> new SortedNumericDocValuesLongFieldScript(name(), context.lookup(), ctx), - name(), + ctx -> new SortedNumericDocValuesLongFieldScript(name, context.lookup(), ctx), + name, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointScriptFieldType.java index e6a5bcc82d86c..867efae1dcdd3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointScriptFieldType.java @@ -38,13 +38,8 @@ public final class GeoPointScriptFieldType extends AbstractScriptFieldType new Builder<>(name, GeoPointFieldScript.CONTEXT) { @Override - AbstractScriptFieldType createFieldType( - String name, - GeoPointFieldScript.Factory factory, - Script script, - Map meta - ) { - return new GeoPointScriptFieldType(name, factory, getScript(), meta()); + AbstractScriptFieldType createFieldType(GeoPointFieldScript.Factory factory, Script script, Map meta) { + return new GeoPointScriptFieldType(factory, getScript(), meta()); } @Override @@ -58,10 +53,9 @@ GeoPointFieldScript.Factory getCompositeLeafFactory(Function meta) { + GeoPointScriptFieldType(GeoPointFieldScript.Factory scriptFactory, Script script, Map meta) { super( - name, - searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup), + (name, searchLookup) -> scriptFactory.newFactory(name, script.getParams(), searchLookup), script, scriptFactory.isResultDeterministic(), meta @@ -75,6 +69,7 @@ public String typeName() { @Override protected Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -83,37 +78,47 @@ protected Query rangeQuery( DateMathParser parser, SearchExecutionContext context ) { - throw new IllegalArgumentException("Runtime field [" + name() + "] of type [" + typeName() + "] does not support range queries"); + throw new IllegalArgumentException("Runtime field [" + name + "] of type [" + typeName() + "] does not support range queries"); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException( - "Geometry fields do not support exact searching, use dedicated geometry queries instead: [" + name() + "]" + "Geometry fields do not support exact searching, use dedicated geometry queries instead: [" + name + "]" ); } @Override - public GeoPointScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new GeoPointScriptFieldData.Builder(name(), leafFactory(searchLookup.get()), GeoPointDocValuesField::new); + public GeoPointScriptFieldData.Builder fielddataBuilder( + String name, + String fullyQualifiedIndexName, + Supplier searchLookup + ) { + return new GeoPointScriptFieldData.Builder(name, leafFactory(name, searchLookup.get()), GeoPointDocValuesField::new); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { applyScriptContext(context); - return new GeoPointScriptFieldExistsQuery(script, leafFactory(context), name()); + return new GeoPointScriptFieldExistsQuery(script, leafFactory(name, context), name); } @Override - public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { + public Query geoShapeQuery( + String name, + SearchExecutionContext context, + String fieldName, + ShapeRelation relation, + LatLonGeometry... geometries + ) { if (relation == ShapeRelation.CONTAINS && Arrays.stream(geometries).anyMatch(g -> (g instanceof Point) == false)) { return new MatchNoDocsQuery(); } - return new GeoPointScriptFieldGeoShapeQuery(script, leafFactory(context), fieldName, relation, geometries); + return new GeoPointScriptFieldGeoShapeQuery(script, leafFactory(name, context), fieldName, relation, geometries); } @Override - public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { + public Query distanceFeatureQuery(String name, Object origin, String pivot, SearchExecutionContext context) { GeoPoint originGeoPoint; if (origin instanceof GeoPoint) { originGeoPoint = (GeoPoint) origin; @@ -127,8 +132,8 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo double pivotDouble = DistanceUnit.DEFAULT.parse(pivot, DistanceUnit.DEFAULT); return new GeoPointScriptFieldDistanceFeatureQuery( script, - leafFactory(context)::newInstance, - name(), + leafFactory(name, context)::newInstance, + name, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index a774fa95a7d66..2bc71f75f1a68 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -99,16 +99,10 @@ public GeoShapeFieldMapper build(MapperBuilderContext context) { ignoreZValue.get().value() ); GeoShapeParser geoShapeParser = new GeoShapeParser(geometryParser, orientation.get().value()); - GeoShapeFieldType ft = new GeoShapeFieldType( - context.buildFullName(name), - indexed.get(), - orientation.get().value(), - geoShapeParser, - meta.get() - ); + GeoShapeFieldType ft = new GeoShapeFieldType(indexed.get(), orientation.get().value(), geoShapeParser, meta.get()); return new GeoShapeFieldMapper( name, - ft, + new MappedField(context.buildFullName(name), ft), multiFieldsBuilder.build(this, context), copyTo.build(), new GeoShapeIndexer(orientation.get().value(), context.buildFullName(name)), @@ -120,8 +114,8 @@ public GeoShapeFieldMapper build(MapperBuilderContext context) { public static class GeoShapeFieldType extends AbstractShapeGeometryFieldType implements GeoShapeQueryable { - public GeoShapeFieldType(String name, boolean indexed, Orientation orientation, Parser parser, Map meta) { - super(name, indexed, false, false, parser, orientation, meta); + public GeoShapeFieldType(boolean indexed, Orientation orientation, Parser parser, Map meta) { + super(indexed, false, false, parser, orientation, meta); } @Override @@ -130,7 +124,13 @@ public String typeName() { } @Override - public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { + public Query geoShapeQuery( + String name, + SearchExecutionContext context, + String fieldName, + ShapeRelation relation, + LatLonGeometry... geometries + ) { // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0) if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(Version.V_7_5_0)) { throw new QueryShardException( @@ -161,7 +161,7 @@ protected Function, List> getFormatter(String format) { public GeoShapeFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, GeoShapeIndexer indexer, @@ -170,7 +170,7 @@ public GeoShapeFieldMapper( ) { super( simpleName, - mappedFieldType, + mappedField, builder.ignoreMalformed.get(), builder.coerce.get(), builder.ignoreZValue.get(), @@ -196,7 +196,7 @@ protected void index(DocumentParserContext context, Geometry geometry) throws IO return; } context.doc().addAll(indexer.indexShape(geometry)); - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java index beb594d9e9936..67f780c493d96 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeQueryable.java @@ -41,9 +41,15 @@ */ public interface GeoShapeQueryable { - Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... luceneGeometries); + Query geoShapeQuery( + String name, + SearchExecutionContext context, + String fieldName, + ShapeRelation relation, + LatLonGeometry... luceneGeometries + ); - default Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, Geometry shape) { + default Query geoShapeQuery(String name, SearchExecutionContext context, String fieldName, ShapeRelation relation, Geometry shape) { final LatLonGeometry[] luceneGeometries; try { luceneGeometries = toQuantizeLuceneGeometry(shape, relation); @@ -53,18 +59,19 @@ default Query geoShapeQuery(SearchExecutionContext context, String fieldName, Sh if (luceneGeometries.length == 0) { return new MatchNoDocsQuery(); } - return geoShapeQuery(context, fieldName, relation, luceneGeometries); + return geoShapeQuery(name, context, fieldName, relation, luceneGeometries); } @Deprecated default Query geoShapeQuery( + String name, SearchExecutionContext context, String fieldName, SpatialStrategy strategy, ShapeRelation relation, Geometry shape ) { - return geoShapeQuery(context, fieldName, relation, shape); + return geoShapeQuery(name, context, fieldName, relation, shape); } private static double quantizeLat(double lat) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index 8a921366556b8..3bfe4599d5997 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -26,9 +26,9 @@ public abstract class IdFieldMapper extends MetadataFieldMapper { private static final Map ANALYZERS = Map.of(NAME, Lucene.KEYWORD_ANALYZER); - protected IdFieldMapper(MappedFieldType mappedFieldType) { - super(mappedFieldType); - assert mappedFieldType.isSearchable(); + protected IdFieldMapper(MappedField mappedField) { + super(mappedField); + assert mappedField.isSearchable(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java index d65c5897e54fd..aae1f435e6538 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldMapper.java @@ -49,7 +49,7 @@ public static final class IgnoredFieldType extends StringFieldType { public static final IgnoredFieldType INSTANCE = new IgnoredFieldType(); private IgnoredFieldType() { - super(NAME, true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + super(true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); } @Override @@ -58,22 +58,22 @@ public String typeName() { } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { // This query is not performance sensitive, it only helps assess // quality of the data, so we may use a slow query. It shouldn't // be too slow in practice since the number of unique terms in this // field is bounded by the number of fields in the mappings. - return new TermRangeQuery(name(), null, null, true, true); + return new TermRangeQuery(name, null, null, true, true); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { return new StoredValueFetcher(context.lookup(), NAME); } } private IgnoredFieldMapper() { - super(IgnoredFieldType.INSTANCE); + super(new MappedField(NAME, IgnoredFieldType.INSTANCE)); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index cac0594899573..278f3b1482717 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -40,7 +40,7 @@ static final class IndexFieldType extends ConstantFieldType { static final IndexFieldType INSTANCE = new IndexFieldType(); private IndexFieldType() { - super(NAME, Collections.emptyMap()); + super(Collections.emptyMap()); } @Override @@ -59,15 +59,15 @@ protected boolean matches(String pattern, boolean caseInsensitive, SearchExecuti } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { return new MatchAllDocsQuery(); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { return new ConstantIndexFieldData.Builder( fullyQualifiedIndexName, - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new DelegateDocValuesField( new ScriptDocValues.Strings(new ScriptDocValues.StringsSupplier(FieldData.toString(dv))), @@ -77,7 +77,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { return new ValueFetcher() { private final List indexName = List.of(context.getFullyQualifiedIndex().getName()); @@ -91,7 +91,7 @@ public List fetchValues(SourceLookup lookup, List ignoredValues) } public IndexFieldMapper() { - super(IndexFieldType.INSTANCE); + super(new MappedField(NAME, IndexFieldType.INSTANCE)); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index c45840e8d0830..a3d45c01a52b9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -158,15 +158,17 @@ protected Parameter[] getParameters() { public IpFieldMapper build(MapperBuilderContext context) { return new IpFieldMapper( name, - new IpFieldType( + new MappedField( context.buildFullName(name), - indexed.getValue() && indexCreatedVersion.isLegacyIndexVersion() == false, - stored.getValue(), - hasDocValues.getValue(), - parseNullValue(), - scriptValues(), - meta.getValue(), - dimension.getValue() + new IpFieldType( + indexed.getValue() && indexCreatedVersion.isLegacyIndexVersion() == false, + stored.getValue(), + hasDocValues.getValue(), + parseNullValue(), + scriptValues(), + meta.getValue(), + dimension.getValue() + ) ), multiFieldsBuilder.build(this, context), copyTo.build(), @@ -190,7 +192,6 @@ public static final class IpFieldType extends SimpleMappedFieldType { private final boolean isDimension; public IpFieldType( - String name, boolean indexed, boolean stored, boolean hasDocValues, @@ -199,22 +200,22 @@ public IpFieldType( Map meta, boolean isDimension ) { - super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.nullValue = nullValue; this.scriptValues = scriptValues; this.isDimension = isDimension; } - public IpFieldType(String name) { - this(name, true, true); + public IpFieldType() { + this(true, true); } - public IpFieldType(String name, boolean isIndexed) { - this(name, isIndexed, true); + public IpFieldType(boolean isIndexed) { + this(isIndexed, true); } - public IpFieldType(String name, boolean isIndexed, boolean hasDocValues) { - this(name, isIndexed, false, hasDocValues, null, null, Collections.emptyMap(), false); + public IpFieldType(boolean isIndexed, boolean hasDocValues) { + this(isIndexed, false, hasDocValues, null, null, Collections.emptyMap(), false); } @Override @@ -228,8 +229,8 @@ public boolean isSearchable() { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } private static InetAddress parse(Object value) { @@ -244,14 +245,14 @@ private static InetAddress parse(Object value) { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } if (scriptValues != null) { return FieldValues.valueFetcher(scriptValues, v -> InetAddresses.toAddrString((InetAddress) v), context); } - return new SourceValueFetcher(name(), context, nullValue) { + return new SourceValueFetcher(name, context, nullValue) { @Override protected Object parseSourceValue(Object value) { InetAddress address; @@ -266,11 +267,11 @@ protected Object parseSourceValue(Object value) { } @Override - public Query termQuery(Object value, @Nullable SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termQuery(String name, Object value, @Nullable SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); Query query; if (value instanceof InetAddress) { - query = InetAddressPoint.newExactQuery(name(), (InetAddress) value); + query = InetAddressPoint.newExactQuery(name, (InetAddress) value); } else { if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); @@ -278,10 +279,10 @@ public Query termQuery(Object value, @Nullable SearchExecutionContext context) { String term = value.toString(); if (term.contains("/")) { final Tuple cidr = InetAddresses.parseCidr(term); - query = InetAddressPoint.newPrefixQuery(name(), cidr.v1(), cidr.v2()); + query = InetAddressPoint.newPrefixQuery(name, cidr.v1(), cidr.v2()); } else { InetAddress address = InetAddresses.forString(term); - query = InetAddressPoint.newExactQuery(name(), address); + query = InetAddressPoint.newExactQuery(name, address); } } if (isIndexed()) { @@ -304,10 +305,10 @@ static Query convertToDocValuesQuery(Query query) { } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed() == false) { - return super.termsQuery(values, context); + return super.termsQuery(name, values, context); } InetAddress[] addresses = new InetAddress[values.size()]; int i = 0; @@ -322,26 +323,27 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (value.toString().contains("/")) { // the `terms` query contains some prefix queries, so we cannot create a set query // and need to fall back to a disjunction of `term` queries - return super.termsQuery(values, context); + return super.termsQuery(name, values, context); } address = InetAddresses.forString(value.toString()); } addresses[i++] = address; } - return InetAddressPoint.newSetQuery(name(), addresses); + return InetAddressPoint.newSetQuery(name, addresses); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); - return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, (lower, upper) -> { - Query query = InetAddressPoint.newRangeQuery(name(), lower, upper); + failIfNotIndexedNorDocValuesFallback(name, context); + return rangeQuery(name, lowerTerm, upperTerm, includeLower, includeUpper, (lower, upper) -> { + Query query = InetAddressPoint.newRangeQuery(name, lower, upper); if (isIndexed()) { return query; } else { @@ -355,6 +357,7 @@ public Query rangeQuery( * provided {@code builder} to build a range query. */ public static Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -391,9 +394,9 @@ public static Query rangeQuery( } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.IP, IpDocValuesField::new); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new SortedSetOrdinalsIndexFieldData.Builder(name, CoreValuesSourceType.IP, IpDocValuesField::new); } @Override @@ -405,9 +408,9 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { - checkNoFormat(format); - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, @Nullable String format, ZoneId timeZone) { + checkNoFormat(name, format); + checkNoTimeZone(name, timeZone); return DocValueFormat.IP; } @@ -435,8 +438,8 @@ public boolean isDimension() { private final FieldValues scriptValues; private final ScriptCompiler scriptCompiler; - private IpFieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo, Builder builder) { - super(simpleName, mappedFieldType, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.get()); + private IpFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.get()); this.ignoreMalformedByDefault = builder.ignoreMalformedByDefault; this.indexed = builder.indexed.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); @@ -472,7 +475,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio address = value(context.parser(), nullValue); } catch (IllegalArgumentException e) { if (ignoreMalformed) { - context.addIgnoredField(fieldType().name()); + context.addIgnoredField(name()); return; } else { throw e; @@ -493,19 +496,19 @@ private static InetAddress value(XContentParser parser, InetAddress nullValue) t private void indexValue(DocumentParserContext context, InetAddress address) { if (dimension) { - context.getDimensions().addIp(fieldType().name(), address); + context.getDimensions().addIp(name(), address); } if (indexed) { - Field field = new InetAddressPoint(fieldType().name(), address); + Field field = new InetAddressPoint(name(), address); context.doc().add(field); } if (hasDocValues) { - context.doc().add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(InetAddressPoint.encode(address)))); + context.doc().add(new SortedSetDocValuesField(name(), new BytesRef(InetAddressPoint.encode(address)))); } else if (stored || indexed) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } if (stored) { - context.doc().add(new StoredField(fieldType().name(), new BytesRef(InetAddressPoint.encode(address)))); + context.doc().add(new StoredField(name(), new BytesRef(InetAddressPoint.encode(address)))); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java index b8acd5b0cc953..ae8ecb13908b8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java @@ -45,8 +45,8 @@ public final class IpScriptFieldType extends AbstractScriptFieldType new Builder<>(name, IpFieldScript.CONTEXT) { @Override - AbstractScriptFieldType createFieldType(String name, IpFieldScript.Factory factory, Script script, Map meta) { - return new IpScriptFieldType(name, factory, getScript(), meta()); + AbstractScriptFieldType createFieldType(IpFieldScript.Factory factory, Script script, Map meta) { + return new IpScriptFieldType(factory, getScript(), meta()); } @Override @@ -60,10 +60,9 @@ IpFieldScript.Factory getCompositeLeafFactory(Function meta) { + IpScriptFieldType(IpFieldScript.Factory scriptFactory, Script script, Map meta) { super( - name, - searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup), + (name, searchLookup) -> scriptFactory.newFactory(name, script.getParams(), searchLookup), script, scriptFactory.isResultDeterministic(), meta @@ -84,25 +83,26 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - checkNoFormat(format); - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + checkNoFormat(name, format); + checkNoTimeZone(name, timeZone); return DocValueFormat.IP; } @Override - public IpScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new IpScriptFieldData.Builder(name(), leafFactory(searchLookup.get()), IpDocValuesField::new); + public IpScriptFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + return new IpScriptFieldData.Builder(name, leafFactory(name, searchLookup.get()), IpDocValuesField::new); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { applyScriptContext(context); - return new IpScriptFieldExistsQuery(script, leafFactory(context), name()); + return new IpScriptFieldExistsQuery(script, leafFactory(name, context), name); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -113,14 +113,15 @@ public Query rangeQuery( ) { applyScriptContext(context); return IpFieldMapper.IpFieldType.rangeQuery( + name, lowerTerm, upperTerm, includeLower, includeUpper, (lower, upper) -> new IpScriptFieldRangeQuery( script, - leafFactory(context), - name(), + leafFactory(name, context), + name, new BytesRef(InetAddressPoint.encode(lower)), new BytesRef(InetAddressPoint.encode(upper)) ) @@ -128,25 +129,25 @@ public Query rangeQuery( } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { applyScriptContext(context); if (value instanceof InetAddress) { - return inetAddressQuery((InetAddress) value, context); + return inetAddressQuery(name, (InetAddress) value, context); } String term = BytesRefs.toString(value); if (term.contains("/")) { - return cidrQuery(term, context); + return cidrQuery(name, term, context); } InetAddress address = InetAddresses.forString(term); - return inetAddressQuery(address, context); + return inetAddressQuery(name, address, context); } - private Query inetAddressQuery(InetAddress address, SearchExecutionContext context) { - return new IpScriptFieldTermQuery(script, leafFactory(context), name(), new BytesRef(InetAddressPoint.encode(address))); + private Query inetAddressQuery(String name, InetAddress address, SearchExecutionContext context) { + return new IpScriptFieldTermQuery(script, leafFactory(name, context), name, new BytesRef(InetAddressPoint.encode(address))); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { applyScriptContext(context); BytesRefHash terms = new BytesRefHash(values.size(), BigArrays.NON_RECYCLING_INSTANCE); List cidrQueries = null; @@ -163,9 +164,9 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (cidrQueries == null) { cidrQueries = new ArrayList<>(); } - cidrQueries.add(cidrQuery(term, context)); + cidrQueries.add(cidrQuery(name, term, context)); } - Query termsQuery = new IpScriptFieldTermsQuery(script, leafFactory(context), name(), terms); + Query termsQuery = new IpScriptFieldTermsQuery(script, leafFactory(name, context), name, terms); if (cidrQueries == null) { return termsQuery; } @@ -177,7 +178,7 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { return bool.build(); } - private Query cidrQuery(String term, SearchExecutionContext context) { + private Query cidrQuery(String name, String term, SearchExecutionContext context) { Tuple cidr = InetAddresses.parseCidr(term); InetAddress addr = cidr.v1(); int prefixLength = cidr.v2(); @@ -192,6 +193,6 @@ private Query cidrQuery(String term, SearchExecutionContext context) { // Force the terms into IPv6 BytesRef lowerBytes = new BytesRef(InetAddressPoint.encode(InetAddressPoint.decode(lower))); BytesRef upperBytes = new BytesRef(InetAddressPoint.encode(InetAddressPoint.decode(upper))); - return new IpScriptFieldRangeQuery(script, leafFactory(context), name(), lowerBytes, upperBytes); + return new IpScriptFieldRangeQuery(script, leafFactory(name, context), name, lowerBytes, upperBytes); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index dff6d4b84baf3..c3ee2f5a4199b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -295,7 +295,7 @@ private KeywordFieldType buildFieldType(MapperBuilderContext context, FieldType } else if (splitQueriesOnWhitespace.getValue()) { searchAnalyzer = Lucene.WHITESPACE_ANALYZER; } - return new KeywordFieldType(context.buildFullName(name), fieldType, normalizer, searchAnalyzer, quoteAnalyzer, this); + return new KeywordFieldType(fieldType, normalizer, searchAnalyzer, quoteAnalyzer, this); } @Override @@ -311,7 +311,7 @@ public KeywordFieldMapper build(MapperBuilderContext context) { return new KeywordFieldMapper( name, fieldtype, - buildFieldType(context, fieldtype), + new MappedField(context.buildFullName(name), buildFieldType(context, fieldtype)), multiFieldsBuilder.build(this, context), copyTo.build(), this @@ -336,7 +336,6 @@ public static final class KeywordFieldType extends StringFieldType { private final boolean isDimension; public KeywordFieldType( - String name, FieldType fieldType, NamedAnalyzer normalizer, NamedAnalyzer searchAnalyzer, @@ -344,7 +343,6 @@ public KeywordFieldType( Builder builder ) { super( - name, fieldType.indexOptions() != IndexOptions.NONE && builder.indexCreatedVersion.isLegacyIndexVersion() == false, fieldType.stored(), builder.hasDocValues.getValue(), @@ -359,8 +357,8 @@ public KeywordFieldType( this.isDimension = builder.dimension.getValue(); } - public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Map meta) { - super(name, isIndexed, false, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); + public KeywordFieldType(boolean isIndexed, boolean hasDocValues, Map meta) { + super(isIndexed, false, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); this.normalizer = Lucene.KEYWORD_ANALYZER; this.ignoreAbove = Integer.MAX_VALUE; this.nullValue = null; @@ -369,13 +367,12 @@ public KeywordFieldType(String name, boolean isIndexed, boolean hasDocValues, Ma this.isDimension = false; } - public KeywordFieldType(String name) { - this(name, true, true, Collections.emptyMap()); + public KeywordFieldType() { + this(true, true, Collections.emptyMap()); } - public KeywordFieldType(String name, FieldType fieldType) { + public KeywordFieldType(FieldType fieldType) { super( - name, fieldType.indexOptions() != IndexOptions.NONE, false, false, @@ -390,8 +387,8 @@ public KeywordFieldType(String name, FieldType fieldType) { this.isDimension = false; } - public KeywordFieldType(String name, NamedAnalyzer analyzer) { - super(name, true, false, true, textSearchInfo(Defaults.FIELD_TYPE, null, analyzer, analyzer), Collections.emptyMap()); + public KeywordFieldType(NamedAnalyzer analyzer) { + super(true, false, true, textSearchInfo(Defaults.FIELD_TYPE, null, analyzer, analyzer), Collections.emptyMap()); this.normalizer = Lucene.KEYWORD_ANALYZER; this.ignoreAbove = Integer.MAX_VALUE; this.nullValue = null; @@ -406,42 +403,43 @@ public boolean isSearchable() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.termQuery(value, context); + return super.termQuery(name, value, context); } else { - return SortedSetDocValuesField.newSlowExactQuery(name(), indexedValueForSearch(value)); + return SortedSetDocValuesField.newSlowExactQuery(name, indexedValueForSearch(name, value)); } } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.termsQuery(values, context); + return super.termsQuery(name, values, context); } else { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); - return new DocValuesTermsQuery(name(), bytesRefs); + BytesRef[] bytesRefs = values.stream().map(v -> indexedValueForSearch(name, v)).toArray(BytesRef[]::new); + return new DocValuesTermsQuery(name, bytesRefs); } } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context); + return super.rangeQuery(name, lowerTerm, upperTerm, includeLower, includeUpper, context); } else { return SortedSetDocValuesField.newSlowRangeQuery( - name(), - lowerTerm == null ? null : indexedValueForSearch(lowerTerm), - upperTerm == null ? null : indexedValueForSearch(upperTerm), + name, + lowerTerm == null ? null : indexedValueForSearch(name, lowerTerm), + upperTerm == null ? null : indexedValueForSearch(name, upperTerm), includeLower, includeUpper ); @@ -450,6 +448,7 @@ public Query rangeQuery( @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -457,15 +456,15 @@ public Query fuzzyQuery( boolean transpositions, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context); + return super.fuzzyQuery(name, value, fuzziness, prefixLength, maxExpansions, transpositions, context); } else { return StringScriptFieldFuzzyQuery.build( new Script(""), - ctx -> new SortedSetDocValuesStringFieldScript(name(), context.lookup(), ctx), - name(), - indexedValueForSearch(value).utf8ToString(), + ctx -> new SortedSetDocValuesStringFieldScript(name, context.lookup(), ctx), + name, + indexedValueForSearch(name, value).utf8ToString(), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, transpositions @@ -475,51 +474,57 @@ public Query fuzzyQuery( @Override public Query prefixQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.prefixQuery(value, method, caseInsensitive, context); + return super.prefixQuery(name, value, method, caseInsensitive, context); } else { return new StringScriptFieldPrefixQuery( new Script(""), - ctx -> new SortedSetDocValuesStringFieldScript(name(), context.lookup(), ctx), - name(), - indexedValueForSearch(value).utf8ToString(), + ctx -> new SortedSetDocValuesStringFieldScript(name, context.lookup(), ctx), + name, + indexedValueForSearch(name, value).utf8ToString(), caseInsensitive ); } } @Override - public Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termQueryCaseInsensitive(String name, Object value, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.termQueryCaseInsensitive(value, context); + return super.termQueryCaseInsensitive(name, value, context); } else { return new StringScriptFieldTermQuery( new Script(""), - ctx -> new SortedSetDocValuesStringFieldScript(name(), context.lookup(), ctx), - name(), - indexedValueForSearch(value).utf8ToString(), + ctx -> new SortedSetDocValuesStringFieldScript(name, context.lookup(), ctx), + name, + indexedValueForSearch(name, value).utf8ToString(), true ); } } @Override - public TermsEnum getTerms(boolean caseInsensitive, String string, SearchExecutionContext queryShardContext, String searchAfter) - throws IOException { + public TermsEnum getTerms( + String name, + boolean caseInsensitive, + String string, + SearchExecutionContext queryShardContext, + String searchAfter + ) throws IOException { IndexReader reader = queryShardContext.searcher().getTopReaderContext().reader(); Terms terms = null; if (isIndexed()) { - terms = MultiTerms.getTerms(reader, name()); + terms = MultiTerms.getTerms(reader, name); } else if (hasDocValues()) { - terms = SortedSetDocValuesTerms.getTerms(reader, name()); + terms = SortedSetDocValuesTerms.getTerms(reader, name); } if (terms == null) { // Field does not exist on this shard. @@ -681,24 +686,24 @@ NamedAnalyzer normalizer() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); return new SortedSetOrdinalsIndexFieldData.Builder( - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new KeywordDocValuesField(FieldData.toString(dv), n) ); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } if (this.scriptValues != null) { return FieldValues.valueFetcher(this.scriptValues, context); } - return new SourceValueFetcher(name(), context, nullValue) { + return new SourceValueFetcher(name, context, nullValue) { @Override protected String parseSourceValue(Object value) { String keywordValue = value.toString(); @@ -706,7 +711,7 @@ protected String parseSourceValue(Object value) { return null; } - return normalizeValue(normalizer(), name(), keywordValue); + return normalizeValue(normalizer(), name, keywordValue); } }; } @@ -722,14 +727,14 @@ public Object valueForDisplay(Object value) { } @Override - protected BytesRef indexedValueForSearch(Object value) { + protected BytesRef indexedValueForSearch(String name, Object value) { if (getTextSearchInfo().searchAnalyzer() == Lucene.KEYWORD_ANALYZER) { // keyword analyzer with the default attribute source which encodes terms using UTF8 // in that case we skip normalization, which may be slow if there many terms need to // parse (eg. large terms query) since Analyzer.normalize involves things like creating // attributes through reflection // This if statement will be used whenever a normalizer is NOT configured - return super.indexedValueForSearch(value); + return super.indexedValueForSearch(name, value); } if (value == null) { @@ -738,7 +743,7 @@ protected BytesRef indexedValueForSearch(Object value) { if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } - return getTextSearchInfo().searchAnalyzer().normalize(name(), value.toString()); + return getTextSearchInfo().searchAnalyzer().normalize(name, value.toString()); } /** @@ -746,24 +751,25 @@ protected BytesRef indexedValueForSearch(Object value) { */ @Override public Query wildcardQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.wildcardQuery(value, method, caseInsensitive, true, context); + return super.wildcardQuery(name, value, method, caseInsensitive, true, context); } else { if (getTextSearchInfo().searchAnalyzer() != null) { - value = normalizeWildcardPattern(name(), value, getTextSearchInfo().searchAnalyzer()); + value = normalizeWildcardPattern(name, value, getTextSearchInfo().searchAnalyzer()); } else { - value = indexedValueForSearch(value).utf8ToString(); + value = indexedValueForSearch(name, value).utf8ToString(); } return new StringScriptFieldWildcardQuery( new Script(""), - ctx -> new SortedSetDocValuesStringFieldScript(name(), context.lookup(), ctx), - name(), + ctx -> new SortedSetDocValuesStringFieldScript(name, context.lookup(), ctx), + name, value, caseInsensitive ); @@ -771,20 +777,25 @@ public Query wildcardQuery( } @Override - public Query normalizedWildcardQuery(String value, MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query normalizedWildcardQuery( + String name, + String value, + MultiTermQuery.RewriteMethod method, + SearchExecutionContext context + ) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.normalizedWildcardQuery(value, method, context); + return super.normalizedWildcardQuery(name, value, method, context); } else { if (getTextSearchInfo().searchAnalyzer() != null) { - value = normalizeWildcardPattern(name(), value, getTextSearchInfo().searchAnalyzer()); + value = normalizeWildcardPattern(name, value, getTextSearchInfo().searchAnalyzer()); } else { - value = indexedValueForSearch(value).utf8ToString(); + value = indexedValueForSearch(name, value).utf8ToString(); } return new StringScriptFieldWildcardQuery( new Script(""), - ctx -> new SortedSetDocValuesStringFieldScript(name(), context.lookup(), ctx), - name(), + ctx -> new SortedSetDocValuesStringFieldScript(name, context.lookup(), ctx), + name, value, false ); @@ -793,6 +804,7 @@ public Query normalizedWildcardQuery(String value, MultiTermQuery.RewriteMethod @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -800,18 +812,18 @@ public Query regexpQuery( MultiTermQuery.RewriteMethod method, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return super.regexpQuery(value, syntaxFlags, matchFlags, maxDeterminizedStates, method, context); + return super.regexpQuery(name, value, syntaxFlags, matchFlags, maxDeterminizedStates, method, context); } else { if (matchFlags != 0) { throw new IllegalArgumentException("Match flags not yet implemented [" + matchFlags + "]"); } return new StringScriptFieldRegexpQuery( new Script(""), - ctx -> new SortedSetDocValuesStringFieldScript(name(), context.lookup(), ctx), - name(), - indexedValueForSearch(value).utf8ToString(), + ctx -> new SortedSetDocValuesStringFieldScript(name, context.lookup(), ctx), + name, + indexedValueForSearch(name, value).utf8ToString(), syntaxFlags, matchFlags, maxDeterminizedStates @@ -838,12 +850,12 @@ public boolean isDimension() { } @Override - public void validateMatchedRoutingPath() { + public void validateMatchedRoutingPath(String name) { if (false == isDimension) { throw new IllegalArgumentException( "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "and without the [script] parameter. [" - + name() + + name + "] was not [time_series_dimension: true]." ); } @@ -851,7 +863,7 @@ public void validateMatchedRoutingPath() { throw new IllegalArgumentException( "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "and without the [script] parameter. [" - + name() + + name + "] has a [script] parameter." ); } @@ -874,12 +886,12 @@ public void validateMatchedRoutingPath() { private KeywordFieldMapper( String simpleName, FieldType fieldType, - KeywordFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.getValue()); + super(simpleName, mappedField, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.getValue()); assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0; this.indexed = builder.indexed.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); @@ -934,7 +946,7 @@ private void indexValue(DocumentParserContext context, String value) { value = normalizeValue(fieldType().normalizer(), name(), value); if (fieldType().isDimension()) { - context.getDimensions().addString(fieldType().name(), value); + context.getDimensions().addString(name(), value); } // convert to utf8 only once before feeding postings/dv/stored fields @@ -948,7 +960,7 @@ private void indexValue(DocumentParserContext context, String value) { byte[] prefix = new byte[30]; System.arraycopy(binaryValue.bytes, binaryValue.offset, prefix, 0, 30); String msg = "Document contains at least one immense term in field=\"" - + fieldType().name() + + name() + "\" (whose " + "UTF8 encoding is longer than the max length " + MAX_TERM_LENGTH @@ -961,16 +973,16 @@ private void indexValue(DocumentParserContext context, String value) { } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new KeywordField(fieldType().name(), binaryValue, fieldType); + Field field = new KeywordField(name(), binaryValue, fieldType); context.doc().add(field); if (fieldType().hasDocValues() == false && fieldType.omitNorms()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } if (fieldType().hasDocValues()) { - context.doc().add(new SortedSetDocValuesField(fieldType().name(), binaryValue)); + context.doc().add(new SortedSetDocValuesField(name(), binaryValue)); } } @@ -1008,7 +1020,7 @@ protected String contentType() { @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), fieldType().normalizer); + return Map.of(mappedField.name(), fieldType().normalizer); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java index 8586dc4771c07..d5e0495e70999 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java @@ -50,13 +50,8 @@ private static class Builder extends AbstractScriptFieldType.Builder createFieldType( - String name, - StringFieldScript.Factory factory, - Script script, - Map meta - ) { - return new KeywordScriptFieldType(name, factory, script, meta); + AbstractScriptFieldType createFieldType(StringFieldScript.Factory factory, Script script, Map meta) { + return new KeywordScriptFieldType(factory, script, meta); } @Override @@ -74,10 +69,9 @@ public static RuntimeField sourceOnly(String name) { return new Builder(name).createRuntimeField(StringFieldScript.PARSE_FROM_SOURCE); } - public KeywordScriptFieldType(String name, StringFieldScript.Factory scriptFactory, Script script, Map meta) { + public KeywordScriptFieldType(StringFieldScript.Factory scriptFactory, Script script, Map meta) { super( - name, - searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup), + (name, searchLookup) -> scriptFactory.newFactory(name, script.getParams(), searchLookup), script, scriptFactory.isResultDeterministic(), meta @@ -100,18 +94,23 @@ public Object valueForDisplay(Object value) { } @Override - public StringScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new StringScriptFieldData.Builder(name(), leafFactory(searchLookup.get()), KeywordDocValuesField::new); + public StringScriptFieldData.Builder fielddataBuilder( + String name, + String fullyQualifiedIndexName, + Supplier searchLookup + ) { + return new StringScriptFieldData.Builder(name, leafFactory(name, searchLookup.get()), KeywordDocValuesField::new); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { applyScriptContext(context); - return new StringScriptFieldExistsQuery(script, leafFactory(context), name()); + return new StringScriptFieldExistsQuery(script, leafFactory(name, context), name); } @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -122,8 +121,8 @@ public Query fuzzyQuery( applyScriptContext(context); return StringScriptFieldFuzzyQuery.build( script, - leafFactory(context), - name(), + leafFactory(name, context), + name, BytesRefs.toString(Objects.requireNonNull(value)), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, @@ -132,13 +131,14 @@ public Query fuzzyQuery( } @Override - public Query prefixQuery(String value, RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { + public Query prefixQuery(String name, String value, RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { applyScriptContext(context); - return new StringScriptFieldPrefixQuery(script, leafFactory(context), name(), value, caseInsensitive); + return new StringScriptFieldPrefixQuery(script, leafFactory(name, context), name, value, caseInsensitive); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -150,8 +150,8 @@ public Query rangeQuery( applyScriptContext(context); return new StringScriptFieldRangeQuery( script, - leafFactory(context), - name(), + leafFactory(name, context), + name, lowerTerm == null ? null : BytesRefs.toString(lowerTerm), upperTerm == null ? null : BytesRefs.toString(upperTerm), includeLower, @@ -161,6 +161,7 @@ public Query rangeQuery( @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -174,8 +175,8 @@ public Query regexpQuery( } return new StringScriptFieldRegexpQuery( script, - leafFactory(context), - name(), + leafFactory(name, context), + name, value, syntaxFlags, matchFlags, @@ -184,45 +185,45 @@ public Query regexpQuery( } @Override - public Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) { + public Query termQueryCaseInsensitive(String name, Object value, SearchExecutionContext context) { applyScriptContext(context); return new StringScriptFieldTermQuery( script, - leafFactory(context), - name(), + leafFactory(name, context), + name, BytesRefs.toString(Objects.requireNonNull(value)), true ); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { applyScriptContext(context); return new StringScriptFieldTermQuery( script, - leafFactory(context), - name(), + leafFactory(name, context), + name, BytesRefs.toString(Objects.requireNonNull(value)), false ); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { applyScriptContext(context); Set terms = values.stream().map(v -> BytesRefs.toString(Objects.requireNonNull(v))).collect(toSet()); - return new StringScriptFieldTermsQuery(script, leafFactory(context), name(), terms); + return new StringScriptFieldTermsQuery(script, leafFactory(name, context), name, terms); } @Override - public Query wildcardQuery(String value, RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { + public Query wildcardQuery(String name, String value, RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { applyScriptContext(context); - return new StringScriptFieldWildcardQuery(script, leafFactory(context), name(), value, caseInsensitive); + return new StringScriptFieldWildcardQuery(script, leafFactory(name, context), name, value, caseInsensitive); } @Override - public Query normalizedWildcardQuery(String value, RewriteMethod method, SearchExecutionContext context) { + public Query normalizedWildcardQuery(String name, String value, RewriteMethod method, SearchExecutionContext context) { applyScriptContext(context); - return new StringScriptFieldWildcardQuery(script, leafFactory(context), name(), value, false); + return new StringScriptFieldWildcardQuery(script, leafFactory(name, context), name, value, false); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LeafRuntimeField.java b/server/src/main/java/org/elasticsearch/index/mapper/LeafRuntimeField.java index 9462e67f9ade1..e4618ccd9a56f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LeafRuntimeField.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LeafRuntimeField.java @@ -16,34 +16,34 @@ /** * RuntimeField base class for leaf fields that will only ever return a single {@link MappedFieldType} - * from {@link RuntimeField#asMappedFieldTypes()}. Can be a standalone runtime field, or part of a composite. + * from {@link RuntimeField#asMappedFields()}. Can be a standalone runtime field, or part of a composite. */ public final class LeafRuntimeField implements RuntimeField { private final String name; - private final MappedFieldType mappedFieldType; + private final MappedField mappedField; private final List> parameters; - public LeafRuntimeField(String name, MappedFieldType mappedFieldType, List> parameters) { + public LeafRuntimeField(String name, MappedField mappedField, List> parameters) { this.name = name; - this.mappedFieldType = mappedFieldType; + this.mappedField = mappedField; this.parameters = parameters; - assert mappedFieldType.name().endsWith(name) : "full name: " + mappedFieldType.name() + " - leaf name: " + name; + assert mappedField.name().endsWith(name) : "full name: " + mappedField.name() + " - leaf name: " + name; } @Override public String name() { - return mappedFieldType.name(); + return mappedField.name(); } @Override - public Stream asMappedFieldTypes() { - return Stream.of(mappedFieldType); + public Stream asMappedFields() { + return Stream.of(mappedField); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); - builder.field("type", mappedFieldType.typeName()); + builder.field("type", mappedField.typeName()); boolean includeDefaults = params.paramAsBoolean("include_defaults", false); for (FieldMapper.Parameter parameter : parameters) { parameter.toXContent(builder, includeDefaults); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java index ac800e147d44e..a0748e935b7ee 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java @@ -36,7 +36,7 @@ public class LegacyTypeFieldMapper extends MetadataFieldMapper { private static final Map ANALYZERS = Map.of(NAME, Lucene.KEYWORD_ANALYZER); protected LegacyTypeFieldMapper() { - super(new LegacyTypeFieldType()); + super(new MappedField(NAME, new LegacyTypeFieldType())); } @Override @@ -47,7 +47,7 @@ public Map indexAnalyzers() { static final class LegacyTypeFieldType extends TermBasedFieldType { LegacyTypeFieldType() { - super(NAME, false, true, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + super(false, true, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); } @Override @@ -62,18 +62,19 @@ public boolean isSearchable() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - return SortedSetDocValuesField.newSlowExactQuery(name(), indexedValueForSearch(value)); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + return SortedSetDocValuesField.newSlowExactQuery(name, indexedValueForSearch(name, value)); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); - return new DocValuesTermsQuery(name(), bytesRefs); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + BytesRef[] bytesRefs = values.stream().map(v -> indexedValueForSearch(name, v)).toArray(BytesRef[]::new); + return new DocValuesTermsQuery(name, bytesRefs); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -81,21 +82,21 @@ public Query rangeQuery( SearchExecutionContext context ) { return SortedSetDocValuesField.newSlowRangeQuery( - name(), - lowerTerm == null ? null : indexedValueForSearch(lowerTerm), - upperTerm == null ? null : indexedValueForSearch(upperTerm), + name, + lowerTerm == null ? null : indexedValueForSearch(name, lowerTerm), + upperTerm == null ? null : indexedValueForSearch(name, upperTerm), includeLower, includeUpper ); } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return true; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { return new StoredValueFetcher(context.lookup(), NAME); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java index 3b9ea4a89a850..7b5438c07985c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java @@ -43,8 +43,8 @@ private static class Builder extends AbstractScriptFieldType.Builder createFieldType(String name, LongFieldScript.Factory factory, Script script, Map meta) { - return new LongScriptFieldType(name, factory, script, meta); + AbstractScriptFieldType createFieldType(LongFieldScript.Factory factory, Script script, Map meta) { + return new LongScriptFieldType(factory, script, meta); } @Override @@ -62,10 +62,9 @@ public static RuntimeField sourceOnly(String name) { return new Builder(name).createRuntimeField(LongFieldScript.PARSE_FROM_SOURCE); } - public LongScriptFieldType(String name, LongFieldScript.Factory scriptFactory, Script script, Map meta) { + public LongScriptFieldType(LongFieldScript.Factory scriptFactory, Script script, Map meta) { super( - name, - searchLookup -> scriptFactory.newFactory(name, script.getParams(), searchLookup), + (name, searchLookup) -> scriptFactory.newFactory(name, script.getParams(), searchLookup), script, scriptFactory.isResultDeterministic(), meta @@ -83,8 +82,8 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + checkNoTimeZone(name, timeZone); if (format == null) { return DocValueFormat.RAW; } @@ -92,18 +91,19 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) { } @Override - public LongScriptFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new LongScriptFieldData.Builder(name(), leafFactory(searchLookup.get()), LongDocValuesField::new); + public LongScriptFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + return new LongScriptFieldData.Builder(name, leafFactory(name, searchLookup.get()), LongDocValuesField::new); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { applyScriptContext(context); - return new LongScriptFieldExistsQuery(script, leafFactory(context)::newInstance, name()); + return new LongScriptFieldExistsQuery(script, leafFactory(name, context)::newInstance, name); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -118,21 +118,21 @@ public Query rangeQuery( upperTerm, includeLower, includeUpper, - (l, u) -> new LongScriptFieldRangeQuery(script, leafFactory(context)::newInstance, name(), l, u) + (l, u) -> new LongScriptFieldRangeQuery(script, leafFactory(name, context)::newInstance, name, l, u) ); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { if (NumberType.hasDecimalPart(value)) { return Queries.newMatchNoDocsQuery("Value [" + value + "] has a decimal part"); } applyScriptContext(context); - return new LongScriptFieldTermQuery(script, leafFactory(context)::newInstance, name(), NumberType.objectToLong(value, true)); + return new LongScriptFieldTermQuery(script, leafFactory(name, context)::newInstance, name, NumberType.objectToLong(value, true)); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { if (values.isEmpty()) { return Queries.newMatchAllQuery(); } @@ -147,6 +147,6 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { return Queries.newMatchNoDocsQuery("All values have a decimal part"); } applyScriptContext(context); - return new LongScriptFieldTermsQuery(script, leafFactory(context)::newInstance, name(), terms); + return new LongScriptFieldTermsQuery(script, leafFactory(name, context)::newInstance, name, terms); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LookupRuntimeFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/LookupRuntimeFieldType.java index 37e2e0bef1ce1..8cc71d90f7d78 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LookupRuntimeFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LookupRuntimeFieldType.java @@ -145,14 +145,13 @@ protected List> getParameters() { @Override protected RuntimeField createRuntimeField(MappingParserContext parserContext) { final LookupRuntimeFieldType ft = new LookupRuntimeFieldType( - name, meta(), targetIndex.get(), inputField.get(), targetField.get(), fetchFields.get() ); - return new LeafRuntimeField(name, ft, getParameters()); + return new LeafRuntimeField(name, new MappedField(name, ft), getParameters()); } @Override @@ -171,14 +170,13 @@ protected RuntimeField createChildRuntimeField( private final List fetchFields; private LookupRuntimeFieldType( - String name, Map meta, String lookupIndex, String inputField, String targetField, List fetchFields ) { - super(name, false, false, false, TextSearchInfo.NONE, meta); + super(false, false, false, TextSearchInfo.NONE, meta); this.lookupIndex = lookupIndex; this.inputField = inputField; this.targetField = targetField; @@ -186,14 +184,10 @@ private LookupRuntimeFieldType( } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (context.allowExpensiveQueries() == false) { throw new ElasticsearchException( - "cannot be executed against lookup field [" - + name() - + "] while [" - + ALLOW_EXPENSIVE_QUERIES.getKey() - + "] is set to [false]." + "cannot be executed against lookup field [" + name + "] while [" + ALLOW_EXPENSIVE_QUERIES.getKey() + "] is set to [false]." ); } return new LookupFieldValueFetcher(context); @@ -205,20 +199,24 @@ public String typeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new IllegalArgumentException("Cannot search on field [" + name() + "] since it is a lookup field."); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new IllegalArgumentException("Cannot search on field [" + name + "] since it is a lookup field."); } private class LookupFieldValueFetcher implements ValueFetcher { private final ValueFetcher inputFieldValueFetcher; LookupFieldValueFetcher(SearchExecutionContext context) { - final MappedFieldType inputFieldType = context.getFieldType(inputField); + final MappedField inputField = context.getMappedField(LookupRuntimeFieldType.this.inputField); // do not allow unmapped field - if (inputFieldType == null) { - throw new QueryShardException(context, "No field mapping can be found for the field with name [{}]", inputField); + if (inputField == null) { + throw new QueryShardException( + context, + "No field mapping can be found for the field with name [{}]", + LookupRuntimeFieldType.this.inputField + ); } - this.inputFieldValueFetcher = inputFieldType.valueFetcher(context, null); + this.inputFieldValueFetcher = inputField.valueFetcher(context, null); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedField.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedField.java new file mode 100644 index 0000000000000..99d7416abcb9b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedField.java @@ -0,0 +1,421 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.time.ZoneId; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import java.util.function.Supplier; + +public class MappedField { + + private final String name; + private final MappedFieldType type; + + public MappedField(String name, MappedFieldType type) { + this.name = Mapper.internFieldName(name); + this.type = type; + } + + public String name() { + return name; + } + + public MappedFieldType type() { + return type; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MappedField that = (MappedField) o; + return name.equals(that.name) && type.equals(that.type); + } + + @Override + public int hashCode() { + return Objects.hash(name, type); + } + + /** + * Return a fielddata builder for this field + * + * @param fullyQualifiedIndexName the name of the index this field-data is build for + * @param searchLookup a {@link SearchLookup} supplier to allow for accessing other fields values in the context of runtime fields + * @throws IllegalArgumentException if the fielddata is not supported on this type. + * An IllegalArgumentException is needed in order to return an http error 400 + * when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status} + */ + public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + return type.fielddataBuilder(name, fullyQualifiedIndexName, searchLookup); + } + + /** + * Create a helper class to fetch field values during the {@link FetchFieldsPhase}. + * + * New field types must implement this method in order to support the search 'fields' option. Except + * for metadata fields, field types should not throw {@link UnsupportedOperationException} since this + * could cause a search retrieving multiple fields (like "fields": ["*"]) to fail. + */ + public ValueFetcher valueFetcher(SearchExecutionContext context, @Nullable String format) { + return type.valueFetcher(name, context, format); + } + + /** Returns the name of this type, as would be specified in mapping properties */ + public String typeName() { + return type.typeName(); + } + + /** Returns the field family type, as used in field capabilities */ + public String familyTypeName() { + return type.familyTypeName(); + } + + public boolean hasDocValues() { + return type.hasDocValues(); + } + + /** + * Returns the collapse type of the field + * CollapseType.NONE means the field can'be used for collapsing. + * @return collapse type of the field + */ + public MappedFieldType.CollapseType collapseType() { + return type.collapseType(); + } + + /** Given a value that comes from the stored fields API, convert it to the + * expected type. For instance a date field would store dates as longs and + * format it back to a string in this method. */ + public Object valueForDisplay(Object value) { + return type.valueForDisplay(value); + } + + /** + * Returns true if the field is searchable. + */ + public boolean isSearchable() { + return type.isSearchable(); + } + + /** + * Returns true if the field is indexed. + */ + public final boolean isIndexed() { + return type.isIndexed(); + } + + /** + * Returns true if the field is stored separately. + */ + public final boolean isStored() { + return type.isStored(); + } + + /** + * If the field supports using the indexed data to speed up operations related to ordering of data, such as sorting or aggs, return + * a function for doing that. If it is unsupported for this field type, there is no need to override this method. + * + * @return null if the optimization cannot be applied, otherwise a function to use for the optimization + */ + @Nullable + public Function pointReaderIfPossible() { + return type.pointReaderIfPossible(); + } + + /** Returns true if the field is aggregatable. + * + */ + public boolean isAggregatable() { + return type.isAggregatable(name); + } + + /** + * @return true if field has been marked as a dimension field + */ + public boolean isDimension() { + return type.isDimension(); + } + + /** + * @return metric type or null if the field is not a metric field + */ + public TimeSeriesParams.MetricType getMetricType() { + return type.getMetricType(); + } + + /** Generates a query that will only match documents that contain the given value. + * The default implementation returns a {@link TermQuery} over the value bytes + * @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type or if the field is not searchable + * due to the way it is configured (eg. not indexed) + * @throws ElasticsearchParseException if {@code value} cannot be converted to the expected data type + * @throws UnsupportedOperationException if the field is not searchable regardless of options + * @throws QueryShardException if the field is not searchable regardless of options + */ + // TODO: Standardize exception types + public Query termQuery(Object value, @Nullable SearchExecutionContext context) { + return type.termQuery(name, value, context); + } + + // Case insensitive form of term query (not supported by all fields so must be overridden to enable) + public Query termQueryCaseInsensitive(Object value, @Nullable SearchExecutionContext context) { + return type.termQueryCaseInsensitive(name, value, context); + } + + /** Build a constant-scoring query that matches all values. The default implementation uses a + * {@link ConstantScoreQuery} around a {@link BooleanQuery} whose {@link BooleanClause.Occur#SHOULD} clauses + * are generated with {@link #termQuery}. */ + public Query termsQuery(Collection values, @Nullable SearchExecutionContext context) { + return type.termsQuery(name, values, context); + } + + /** + * Factory method for range queries. + * @param relation the relation, nulls should be interpreted like INTERSECTS + */ + public Query rangeQuery( + Object lowerTerm, + Object upperTerm, + boolean includeLower, + boolean includeUpper, + ShapeRelation relation, + ZoneId timeZone, + DateMathParser parser, + SearchExecutionContext context + ) { + return type.rangeQuery(name, lowerTerm, upperTerm, includeLower, includeUpper, relation, timeZone, parser, context); + } + + public Query fuzzyQuery( + Object value, + Fuzziness fuzziness, + int prefixLength, + int maxExpansions, + boolean transpositions, + SearchExecutionContext context + ) { + return type.fuzzyQuery(name, value, fuzziness, prefixLength, maxExpansions, transpositions, context); + } + + // Case sensitive form of prefix query + public final Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { + return type.prefixQuery(name, value, method, context); + } + + public Query prefixQuery( + String value, + @Nullable MultiTermQuery.RewriteMethod method, + boolean caseInsensitve, + SearchExecutionContext context + ) { + return type.prefixQuery(name, value, method, caseInsensitve, context); + } + + // Case sensitive form of wildcard query + public final Query wildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { + return type.wildcardQuery(name, value, method, context); + } + + public Query wildcardQuery( + String value, + @Nullable MultiTermQuery.RewriteMethod method, + boolean caseInsensitve, + SearchExecutionContext context + ) { + return type.wildcardQuery(name, value, method, caseInsensitve, context); + } + + public Query normalizedWildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { + return type.normalizedWildcardQuery(name, value, method, context); + } + + public Query regexpQuery( + String value, + int syntaxFlags, + int matchFlags, + int maxDeterminizedStates, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context + ) { + return type.regexpQuery(name, value, syntaxFlags, matchFlags, maxDeterminizedStates, method, context); + } + + public Query existsQuery(SearchExecutionContext context) { + return type.existsQuery(name, context); + } + + public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) + throws IOException { + return type.phraseQuery(name, stream, slop, enablePositionIncrements, context); + } + + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) + throws IOException { + return type.multiPhraseQuery(name, stream, slop, enablePositionIncrements, context); + } + + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) throws IOException { + return type.phrasePrefixQuery(name, stream, slop, maxExpansions, context); + } + + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) { + return type.spanPrefixQuery(name, value, method, context); + } + + public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { + return type.distanceFeatureQuery(name, origin, pivot, context); + } + + /** + * Create an {@link IntervalsSource} for the given term. + */ + public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext context) { + return type.termIntervals(name, term, context); + } + + /** + * Create an {@link IntervalsSource} for the given prefix. + */ + public IntervalsSource prefixIntervals(BytesRef prefix, SearchExecutionContext context) { + return type.prefixIntervals(name, prefix, context); + } + + /** + * Create a fuzzy {@link IntervalsSource} for the given term. + */ + public IntervalsSource fuzzyIntervals( + String term, + int maxDistance, + int prefixLength, + boolean transpositions, + SearchExecutionContext context + ) { + return type.fuzzyIntervals(name, term, maxDistance, prefixLength, transpositions, context); + } + + /** + * Create a wildcard {@link IntervalsSource} for the given pattern. + */ + public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { + return type.wildcardIntervals(name, pattern, context); + } + + /** Return whether all values of the given {@link IndexReader} are within the range, + * outside the range or cross the range. The default implementation returns + * {@link MappedFieldType.Relation#INTERSECTS}, which is always fine to return when there is + * no way to check whether values are actually within bounds. */ + public MappedFieldType.Relation isFieldWithinQuery( + IndexReader reader, + Object from, + Object to, + boolean includeLower, + boolean includeUpper, + ZoneId timeZone, + DateMathParser dateMathParser, + QueryRewriteContext context + ) throws IOException { + return type.isFieldWithinQuery(name, reader, from, to, includeLower, includeUpper, timeZone, dateMathParser, context); + } + + /** + * @return if this field type should load global ordinals eagerly + */ + public boolean eagerGlobalOrdinals() { + return type.eagerGlobalOrdinals(); + } + + /** + * @return if the field may have values in the underlying index + * + * Note that this should only return {@code false} if it is not possible for it to + * match on a term query. + * + * @see org.elasticsearch.index.search.QueryParserHelper + */ + public boolean mayExistInIndex(SearchExecutionContext context) { + return type.mayExistInIndex(name, context); + } + + /** + * Pick a {@link DocValueFormat} that can be used to display and parse + * values of fields of this type. + */ + public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { + return type.docValueFormat(name, format, timeZone); + } + + /** + * Get the metadata associated with this field. + */ + public Map meta() { + return type.meta(); + } + + /** + * Returns information on how any text in this field is indexed + * + * Fields that do not support any text-based queries should return + * {@link TextSearchInfo#NONE}. Some fields (eg keyword) may support + * only simple match queries, and can return + * {@link TextSearchInfo#SIMPLE_MATCH_ONLY}; other fields may support + * simple match queries without using the terms index, and can return + * {@link TextSearchInfo#SIMPLE_MATCH_WITHOUT_TERMS} + */ + public TextSearchInfo getTextSearchInfo() { + return type.getTextSearchInfo(); + } + + /** + * This method is used to support auto-complete services and implementations + * are expected to find terms beginning with the provided string very quickly. + * If fields cannot look up matching terms quickly they should return null. + * The returned TermEnum should implement next(), term() and doc_freq() methods + * but postings etc are not required. + * @param caseInsensitive if matches should be case insensitive + * @param string the partially complete word the user has typed (can be empty) + * @param queryShardContext the shard context + * @param searchAfter - usually null. If supplied the TermsEnum result must be positioned after the provided term (used for pagination) + * @return null or an enumeration of matching terms and their doc frequencies + * @throws IOException Errors accessing data + */ + public TermsEnum getTerms(boolean caseInsensitive, String string, SearchExecutionContext queryShardContext, String searchAfter) + throws IOException { + return type.getTerms(name, caseInsensitive, string, queryShardContext, searchAfter); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index e7a3739bc39b5..949da1f53d73d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -58,7 +58,6 @@ */ public abstract class MappedFieldType { - private final String name; private final boolean docValues; private final boolean isIndexed; private final boolean isStored; @@ -66,14 +65,12 @@ public abstract class MappedFieldType { private final Map meta; public MappedFieldType( - String name, boolean isIndexed, boolean isStored, boolean hasDocValues, TextSearchInfo textSearchInfo, Map meta ) { - this.name = Mapper.internFieldName(name); this.isIndexed = isIndexed; this.isStored = isStored; this.docValues = hasDocValues; @@ -90,8 +87,8 @@ public MappedFieldType( * An IllegalArgumentException is needed in order to return an http error 400 * when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status} */ - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]"); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + throw new IllegalArgumentException("Fielddata is not supported on field [" + name + "] of type [" + typeName() + "]"); } /** @@ -101,7 +98,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S * for metadata fields, field types should not throw {@link UnsupportedOperationException} since this * could cause a search retrieving multiple fields (like "fields": ["*"]) to fail. */ - public abstract ValueFetcher valueFetcher(SearchExecutionContext context, @Nullable String format); + public abstract ValueFetcher valueFetcher(String name, SearchExecutionContext context, @Nullable String format); /** Returns the name of this type, as would be specified in mapping properties */ public abstract String typeName(); @@ -111,10 +108,6 @@ public String familyTypeName() { return typeName(); } - public String name() { - return name; - } - public boolean hasDocValues() { return docValues; } @@ -170,9 +163,9 @@ public Function pointReaderIfPossible() { /** Returns true if the field is aggregatable. * */ - public boolean isAggregatable() { + public boolean isAggregatable(String name) { try { - fielddataBuilder("", () -> { throw new UnsupportedOperationException("SearchLookup not available"); }); + fielddataBuilder(name, "", () -> { throw new UnsupportedOperationException("SearchLookup not available"); }); return true; } catch (IllegalArgumentException e) { return false; @@ -202,10 +195,10 @@ public TimeSeriesParams.MetricType getMetricType() { * @throws QueryShardException if the field is not searchable regardless of options */ // TODO: Standardize exception types - public abstract Query termQuery(Object value, @Nullable SearchExecutionContext context); + public abstract Query termQuery(String name, Object value, @Nullable SearchExecutionContext context); // Case insensitive form of term query (not supported by all fields so must be overridden to enable) - public Query termQueryCaseInsensitive(Object value, @Nullable SearchExecutionContext context) { + public Query termQueryCaseInsensitive(String name, Object value, @Nullable SearchExecutionContext context) { throw new QueryShardException( context, "[" + name + "] field which is of type [" + typeName() + "], does not support case insensitive term queries" @@ -215,10 +208,10 @@ public Query termQueryCaseInsensitive(Object value, @Nullable SearchExecutionCon /** Build a constant-scoring query that matches all values. The default implementation uses a * {@link ConstantScoreQuery} around a {@link BooleanQuery} whose {@link Occur#SHOULD} clauses * are generated with {@link #termQuery}. */ - public Query termsQuery(Collection values, @Nullable SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, @Nullable SearchExecutionContext context) { BooleanQuery.Builder builder = new BooleanQuery.Builder(); for (Object value : values) { - builder.add(termQuery(value, context), Occur.SHOULD); + builder.add(termQuery(name, value, context), Occur.SHOULD); } return new ConstantScoreQuery(builder.build()); } @@ -228,6 +221,7 @@ public Query termsQuery(Collection values, @Nullable SearchExecutionContext c * @param relation the relation, nulls should be interpreted like INTERSECTS */ public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -241,6 +235,7 @@ public Query rangeQuery( } public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -254,11 +249,17 @@ public Query fuzzyQuery( } // Case sensitive form of prefix query - public final Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { - return prefixQuery(value, method, false, context); + public final Query prefixQuery( + String name, + String value, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context + ) { + return prefixQuery(name, value, method, false, context); } public Query prefixQuery( + String name, String value, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitve, @@ -271,11 +272,17 @@ public Query prefixQuery( } // Case sensitive form of wildcard query - public final Query wildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { - return wildcardQuery(value, method, false, context); + public final Query wildcardQuery( + String name, + String value, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context + ) { + return wildcardQuery(name, value, method, false, context); } public Query wildcardQuery( + String name, String value, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitve, @@ -291,7 +298,12 @@ public Query wildcardQuery( ); } - public Query normalizedWildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { + public Query normalizedWildcardQuery( + String name, + String value, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context + ) { throw new QueryShardException( context, "Can only use wildcard queries on keyword, text and wildcard fields - not on [" @@ -303,6 +315,7 @@ public Query normalizedWildcardQuery(String value, @Nullable MultiTermQuery.Rewr } public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -316,41 +329,52 @@ public Query regexpQuery( ); } - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { if (hasDocValues() || getTextSearchInfo().hasNorms()) { - return new FieldExistsQuery(name()); + return new FieldExistsQuery(name); } else { - return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); + return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name)); } } - public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) + public Query phraseQuery(String name, TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) throws IOException { throw new IllegalArgumentException( "Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); } - public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) - throws IOException { + public Query multiPhraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) throws IOException { throw new IllegalArgumentException( "Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); } - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) throws IOException { + public Query phrasePrefixQuery(String name, TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) + throws IOException { throw new IllegalArgumentException( "Can only use phrase prefix queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); } - public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) { + public SpanQuery spanPrefixQuery( + String name, + String value, + SpanMultiTermQueryWrapper.SpanRewriteMethod method, + SearchExecutionContext context + ) { throw new IllegalArgumentException( "Can only use span prefix queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); } - public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { + public Query distanceFeatureQuery(String name, Object origin, String pivot, SearchExecutionContext context) { throw new IllegalArgumentException( "Illegal data type of [" + typeName() @@ -364,7 +388,7 @@ public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionCo /** * Create an {@link IntervalsSource} for the given term. */ - public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext context) { + public IntervalsSource termIntervals(String name, BytesRef term, SearchExecutionContext context) { throw new IllegalArgumentException( "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); @@ -373,7 +397,7 @@ public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext conte /** * Create an {@link IntervalsSource} for the given prefix. */ - public IntervalsSource prefixIntervals(BytesRef prefix, SearchExecutionContext context) { + public IntervalsSource prefixIntervals(String name, BytesRef prefix, SearchExecutionContext context) { throw new IllegalArgumentException( "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); @@ -383,6 +407,7 @@ public IntervalsSource prefixIntervals(BytesRef prefix, SearchExecutionContext c * Create a fuzzy {@link IntervalsSource} for the given term. */ public IntervalsSource fuzzyIntervals( + String name, String term, int maxDistance, int prefixLength, @@ -397,7 +422,7 @@ public IntervalsSource fuzzyIntervals( /** * Create a wildcard {@link IntervalsSource} for the given pattern. */ - public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { + public IntervalsSource wildcardIntervals(String name, BytesRef pattern, SearchExecutionContext context) { throw new IllegalArgumentException( "Can only use interval queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); @@ -418,6 +443,7 @@ public enum Relation { * {@link Relation#INTERSECTS}, which is always fine to return when there is * no way to check whether values are actually within bounds. */ public Relation isFieldWithinQuery( + String name, IndexReader reader, Object from, Object to, @@ -434,11 +460,11 @@ public Relation isFieldWithinQuery( * An IllegalArgumentException is needed in order to return an http error 400 * when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status} **/ - protected final void failIfNoDocValues() { + protected final void failIfNoDocValues(String name) { if (hasDocValues() == false) { throw new IllegalArgumentException( "Can't load fielddata on [" - + name() + + name + "] because fielddata is unsupported on fields of type [" + typeName() + "]. Use doc values instead." @@ -446,26 +472,24 @@ protected final void failIfNoDocValues() { } } - protected final void failIfNotIndexed() { + protected final void failIfNotIndexed(String name) { if (isIndexed == false) { // we throw an IAE rather than an ISE so that it translates to a 4xx code rather than 5xx code on the http layer - throw new IllegalArgumentException("Cannot search on field [" + name() + "] since it is not indexed."); + throw new IllegalArgumentException("Cannot search on field [" + name + "] since it is not indexed."); } } - protected final void failIfNotIndexedNorDocValuesFallback(SearchExecutionContext context) { + protected final void failIfNotIndexedNorDocValuesFallback(String name, SearchExecutionContext context) { if (docValues == false && context.indexVersionCreated().isLegacyIndexVersion()) { - throw new IllegalArgumentException( - "Cannot search on field [" + name() + "] of legacy index since it does not have doc values." - ); + throw new IllegalArgumentException("Cannot search on field [" + name + "] of legacy index since it does not have doc values."); } else if (isIndexed == false && docValues == false) { // we throw an IAE rather than an ISE so that it translates to a 4xx code rather than 5xx code on the http layer - throw new IllegalArgumentException("Cannot search on field [" + name() + "] since it is not indexed nor has doc values."); + throw new IllegalArgumentException("Cannot search on field [" + name + "] since it is not indexed nor has doc values."); } else if (isIndexed == false && docValues && context.allowExpensiveQueries() == false) { // if query can only run using doc values, ensure running expensive queries are allowed throw new ElasticsearchException( "Cannot search on field [" - + name() + + name + "] since it is not indexed and '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false." @@ -488,7 +512,7 @@ public boolean eagerGlobalOrdinals() { * * @see org.elasticsearch.index.search.QueryParserHelper */ - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return true; } @@ -496,27 +520,27 @@ public boolean mayExistInIndex(SearchExecutionContext context) { * Pick a {@link DocValueFormat} that can be used to display and parse * values of fields of this type. */ - public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { - checkNoFormat(format); - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, @Nullable String format, ZoneId timeZone) { + checkNoFormat(name, format); + checkNoTimeZone(name, timeZone); return DocValueFormat.RAW; } /** * Validate the provided {@code format} is null. */ - protected void checkNoFormat(@Nullable String format) { + protected void checkNoFormat(String name, @Nullable String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support custom formats"); } } /** * Validate the provided {@code timeZone} is null. */ - protected void checkNoTimeZone(@Nullable ZoneId timeZone) { + protected void checkNoTimeZone(String name, @Nullable ZoneId timeZone) { if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support custom time zones"); } } @@ -583,19 +607,24 @@ public enum CollapseType { * @return null or an enumeration of matching terms and their doc frequencies * @throws IOException Errors accessing data */ - public TermsEnum getTerms(boolean caseInsensitive, String string, SearchExecutionContext queryShardContext, String searchAfter) - throws IOException { + public TermsEnum getTerms( + String name, + boolean caseInsensitive, + String string, + SearchExecutionContext queryShardContext, + String searchAfter + ) throws IOException { return null; } /** * Validate that this field can be the target of {@link IndexMetadata#INDEX_ROUTING_PATH}. */ - public void validateMatchedRoutingPath() { + public void validateMatchedRoutingPath(String name) { throw new IllegalArgumentException( "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "and without the [script] parameter. [" - + name() + + name + "] was [" + typeName() + "]." diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldsLookup.java similarity index 74% rename from server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java rename to server/src/main/java/org/elasticsearch/index/mapper/MappedFieldsLookup.java index 964c68088d42f..5dc110b528ed6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldsLookup.java @@ -20,11 +20,11 @@ import java.util.stream.Collectors; /** - * An immutable container for looking up {@link MappedFieldType}s by their name. + * An immutable container for looking up {@link MappedField}s by their name. */ -final class FieldTypeLookup { - private final Map fullNameToFieldType; - private final Map dynamicFieldTypes; +final class MappedFieldsLookup { + private final Map fullNameToMappedField; + private final Map dynamicFields; /** * A map from field name to all fields whose content has been copied into it @@ -37,21 +37,21 @@ final class FieldTypeLookup { private final int maxParentPathDots; - FieldTypeLookup( + MappedFieldsLookup( Collection fieldMappers, Collection fieldAliasMappers, Collection runtimeFields ) { - final Map fullNameToFieldType = new HashMap<>(); - final Map dynamicFieldTypes = new HashMap<>(); + final Map fullNameToMappedField = new HashMap<>(); + final Map dynamicFields = new HashMap<>(); final Map> fieldToCopiedFields = new HashMap<>(); for (FieldMapper fieldMapper : fieldMappers) { String fieldName = fieldMapper.name(); - MappedFieldType fieldType = fieldMapper.fieldType(); - fullNameToFieldType.put(fieldType.name(), fieldType); - if (fieldType instanceof DynamicFieldType) { - dynamicFieldTypes.put(fieldType.name(), (DynamicFieldType) fieldType); + MappedField mappedField = fieldMapper.field(); + fullNameToMappedField.put(mappedField.name(), mappedField); + if (mappedField instanceof DynamicMappedField) { + dynamicFields.put(mappedField.name(), (DynamicMappedField) mappedField); } for (String targetField : fieldMapper.copyTo().copyToFields()) { Set sourcePath = fieldToCopiedFields.get(targetField); @@ -65,7 +65,7 @@ final class FieldTypeLookup { } int maxParentPathDots = 0; - for (String dynamicRoot : dynamicFieldTypes.keySet()) { + for (String dynamicRoot : dynamicFields.keySet()) { maxParentPathDots = Math.max(maxParentPathDots, dotCount(dynamicRoot)); } this.maxParentPathDots = maxParentPathDots; @@ -73,23 +73,23 @@ final class FieldTypeLookup { for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { String aliasName = fieldAliasMapper.name(); String path = fieldAliasMapper.path(); - MappedFieldType fieldType = fullNameToFieldType.get(path); - if (fieldType == null) { + MappedField mappedField = fullNameToMappedField.get(path); + if (mappedField == null) { continue; } - fullNameToFieldType.put(aliasName, fieldType); - if (fieldType instanceof DynamicFieldType) { - dynamicFieldTypes.put(aliasName, (DynamicFieldType) fieldType); + fullNameToMappedField.put(aliasName, mappedField); + if (mappedField instanceof DynamicMappedField) { + dynamicFields.put(aliasName, (DynamicMappedField) mappedField); } } - for (MappedFieldType fieldType : RuntimeField.collectFieldTypes(runtimeFields).values()) { + for (MappedField mappedField : RuntimeField.collectMappedFields(runtimeFields).values()) { // this will override concrete fields with runtime fields that have the same name - fullNameToFieldType.put(fieldType.name(), fieldType); + fullNameToMappedField.put(mappedField.name(), mappedField); } // make all fields into compact+fast immutable maps - this.fullNameToFieldType = Map.copyOf(fullNameToFieldType); - this.dynamicFieldTypes = Map.copyOf(dynamicFieldTypes); + this.fullNameToMappedField = Map.copyOf(fullNameToMappedField); + this.dynamicFields = Map.copyOf(dynamicFields); // make values into more compact immutable sets to save memory fieldToCopiedFields.entrySet().forEach(e -> e.setValue(Set.copyOf(e.getValue()))); this.fieldToCopiedFields = Map.copyOf(fieldToCopiedFields); @@ -108,10 +108,10 @@ private static int dotCount(String path) { /** * Returns the mapped field type for the given field name. */ - MappedFieldType get(String field) { - MappedFieldType fieldType = fullNameToFieldType.get(field); - if (fieldType != null) { - return fieldType; + MappedField get(String field) { + MappedField mappedField = fullNameToMappedField.get(field); + if (mappedField != null) { + return mappedField; } return getDynamicField(field); } @@ -124,8 +124,8 @@ int getMaxParentPathDots() { // Check if the given field corresponds to a dynamic key mapper of the // form 'path_to_field.path_to_key'. If so, returns a field type that // can be used to perform searches on this field. Otherwise returns null. - private MappedFieldType getDynamicField(String field) { - if (dynamicFieldTypes.isEmpty()) { + private MappedField getDynamicField(String field) { + if (dynamicFields.isEmpty()) { // no parent fields defined return null; } @@ -143,10 +143,10 @@ private MappedFieldType getDynamicField(String field) { } String parentField = field.substring(0, dotIndex); - DynamicFieldType dft = dynamicFieldTypes.get(parentField); + DynamicMappedField dft = dynamicFields.get(parentField); if (dft != null && Objects.equals(field, parentField) == false) { String key = field.substring(dotIndex + 1); - return dft.getChildFieldType(key); + return dft.getChildField(key); } } } @@ -158,13 +158,13 @@ private MappedFieldType getDynamicField(String field) { */ Set getMatchingFieldNames(String pattern) { if (Regex.isMatchAllPattern(pattern)) { - return Collections.unmodifiableSet(fullNameToFieldType.keySet()); + return Collections.unmodifiableSet(fullNameToMappedField.keySet()); } if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards return get(pattern) == null ? Collections.emptySet() : Collections.singleton(pattern); } - return fullNameToFieldType.keySet() + return fullNameToMappedField.keySet() .stream() .filter(field -> Regex.simpleMatch(pattern, field)) .collect(Collectors.toUnmodifiableSet()); @@ -183,13 +183,13 @@ Set getMatchingFieldNames(String pattern) { * @return A set of paths in the _source that contain the field's values. */ Set sourcePaths(String field) { - if (fullNameToFieldType.isEmpty()) { + if (fullNameToMappedField.isEmpty()) { return Set.of(); } // If the field is dynamically generated then return its full path - MappedFieldType fieldType = getDynamicField(field); - if (fieldType != null) { + MappedField dynamicField = getDynamicField(field); + if (dynamicField != null) { return Set.of(field); } @@ -197,7 +197,7 @@ Set sourcePaths(String field) { int lastDotIndex = field.lastIndexOf('.'); if (lastDotIndex > 0) { String parentField = field.substring(0, lastDotIndex); - if (fullNameToFieldType.containsKey(parentField)) { + if (fullNameToMappedField.containsKey(parentField)) { resolvedField = parentField; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 1c6e36c8e2b5a..5135531a4de4d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -435,8 +435,8 @@ private String resolveDocumentType(String type) { /** * Given the full name of a field, returns its {@link MappedFieldType}. */ - public MappedFieldType fieldType(String fullName) { - return mappingLookup().fieldTypesLookup().get(fullName); + public MappedField mappedField(String fullName) { + return mappingLookup().mappedFieldsLookup().get(fullName); } /** @@ -453,7 +453,7 @@ public MappingLookup mappingLookup() { /** * Returns field types that have eager global ordinals. */ - public Iterable getEagerGlobalOrdinalsFields() { + public Iterable getEagerGlobalOrdinalsFields() { DocumentMapper mapper = this.mapper; if (mapper == null) { return Collections.emptySet(); @@ -461,8 +461,8 @@ public Iterable getEagerGlobalOrdinalsFields() { MappingLookup mappingLookup = mapper.mappers(); return mappingLookup.getMatchingFieldNames("*") .stream() - .map(mappingLookup::getFieldType) - .filter(MappedFieldType::eagerGlobalOrdinals) + .map(mappingLookup::getMappedField) + .filter(MappedField::eagerGlobalOrdinals) .toList(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 19f02f9ad0b84..0b94fd1a09de2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -48,8 +48,8 @@ private CacheKey() {} private final Map fieldMappers; private final Map objectMappers; private final NestedLookup nestedLookup; - private final FieldTypeLookup fieldTypeLookup; - private final FieldTypeLookup indexTimeLookup; // for index-time scripts, a lookup that does not include runtime fields + private final MappedFieldsLookup mappedFieldsLookup; + private final MappedFieldsLookup indexTimeLookup; // for index-time scripts, a lookup that does not include runtime fields private final Map indexAnalyzersMap; private final List indexTimeScriptMappers; private final Mapping mapping; @@ -170,12 +170,12 @@ private MappingLookup( } final Collection runtimeFields = mapping.getRoot().runtimeFields(); - this.fieldTypeLookup = new FieldTypeLookup(mappers, aliasMappers, runtimeFields); + this.mappedFieldsLookup = new MappedFieldsLookup(mappers, aliasMappers, runtimeFields); if (runtimeFields.isEmpty()) { // without runtime fields this is the same as the field type lookup - this.indexTimeLookup = fieldTypeLookup; + this.indexTimeLookup = mappedFieldsLookup; } else { - this.indexTimeLookup = new FieldTypeLookup(mappers, aliasMappers, Collections.emptyList()); + this.indexTimeLookup = new MappedFieldsLookup(mappers, aliasMappers, Collections.emptyList()); } // make all fields into compact+fast immutable maps this.fieldMappers = Map.copyOf(fieldMappers); @@ -184,7 +184,7 @@ private MappingLookup( this.completionFields = Set.copyOf(completionFields); this.indexTimeScriptMappers = List.copyOf(indexTimeScriptMappers); - runtimeFields.stream().flatMap(RuntimeField::asMappedFieldTypes).map(MappedFieldType::name).forEach(this::validateDoesNotShadow); + runtimeFields.stream().flatMap(RuntimeField::asMappedFields).map(MappedField::name).forEach(this::validateDoesNotShadow); assert assertMapperNamesInterned(this.fieldMappers, this.objectMappers); } @@ -207,17 +207,17 @@ private static void assertNamesInterned(String name, Mapper mapper) { * Returns the leaf mapper associated with this field name. Note that the returned mapper * could be either a concrete {@link FieldMapper}, or a {@link FieldAliasMapper}. * - * To access a field's type information, {@link MapperService#fieldType} should be used instead. + * To access a field's type information, {@link MapperService#mappedField} should be used instead. */ public Mapper getMapper(String field) { return fieldMappers.get(field); } - FieldTypeLookup fieldTypesLookup() { - return fieldTypeLookup; + MappedFieldsLookup mappedFieldsLookup() { + return mappedFieldsLookup; } - FieldTypeLookup indexTimeLookup() { + MappedFieldsLookup indexTimeLookup() { return indexTimeLookup; } @@ -338,7 +338,7 @@ public boolean isMultiField(String field) { return false; } // Is it a runtime field? - if (indexTimeLookup.get(field) != fieldTypeLookup.get(field)) { + if (indexTimeLookup.get(field) != mappedFieldsLookup.get(field)) { return false; } String sourceParent = parentObject(field); @@ -365,14 +365,14 @@ private static String parentObject(String field) { * @param pattern the pattern to match field names against */ public Set getMatchingFieldNames(String pattern) { - return fieldTypeLookup.getMatchingFieldNames(pattern); + return mappedFieldsLookup.getMatchingFieldNames(pattern); } /** * Returns the mapped field type for the given field name. */ - public MappedFieldType getFieldType(String field) { - return fieldTypesLookup().get(field); + public MappedField getMappedField(String field) { + return mappedFieldsLookup().get(field); } /** @@ -388,7 +388,7 @@ public MappedFieldType getFieldType(String field) { * @return A set of paths in the _source that contain the field's values. */ public Set sourcePaths(String field) { - return fieldTypesLookup().sourcePaths(field); + return mappedFieldsLookup().sourcePaths(field); } /** @@ -428,9 +428,9 @@ public boolean isDataStreamTimestampFieldEnabled() { * @return {@code true} if contains a timestamp field of type date that is indexed and has doc values, {@code false} otherwise. */ public boolean hasTimestampField() { - final MappedFieldType mappedFieldType = fieldTypesLookup().get(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD); - if (mappedFieldType instanceof DateFieldMapper.DateFieldType) { - return mappedFieldType.isIndexed() && mappedFieldType.hasDocValues(); + final MappedField mappedField = mappedFieldsLookup().get(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD); + if (mappedField != null && mappedField.type() instanceof DateFieldMapper.DateFieldType) { + return mappedField.isIndexed() && mappedField.hasDocValues(); } else { return false; } @@ -456,7 +456,7 @@ public Mapping getMapping() { * or metric field. */ public void validateDoesNotShadow(String name) { - MappedFieldType shadowed = indexTimeLookup.get(name); + MappedField shadowed = indexTimeLookup.get(name); if (shadowed == null) { return; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 6303cf0063b1b..b33d7acc7c2b7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -132,8 +132,8 @@ public final MetadataFieldMapper build(MapperBuilderContext context) { public abstract MetadataFieldMapper build(); } - protected MetadataFieldMapper(MappedFieldType mappedFieldType) { - super(mappedFieldType.name(), mappedFieldType, MultiFields.empty(), CopyTo.empty(), false, null); + protected MetadataFieldMapper(MappedField mappedField) { + super(mappedField.name(), mappedField, MultiFields.empty(), CopyTo.empty(), false, null); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java index 29ac3b86b6c72..ce2d0798c50f6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java @@ -63,8 +63,8 @@ public static class Defaults { public static final class NestedPathFieldType extends StringFieldType { - private NestedPathFieldType(String name) { - super(name, true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + private NestedPathFieldType() { + super(true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); } @Override @@ -73,23 +73,23 @@ public String typeName() { } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { throw new UnsupportedOperationException("Cannot run exists() query against the nested field path"); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name + "]."); } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return false; } } private NestedPathFieldMapper(String name) { - super(new NestedPathFieldType(name)); + super(new MappedField(name, new NestedPathFieldType())); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 5e538ec258b28..3161cd2700139 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -230,8 +230,13 @@ protected Parameter[] getParameters() { @Override public NumberFieldMapper build(MapperBuilderContext context) { - MappedFieldType ft = new NumberFieldType(context.buildFullName(name), this); - return new NumberFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this); + return new NumberFieldMapper( + name, + new MappedField(context.buildFullName(name), new NumberFieldType(this)), + multiFieldsBuilder.build(this, context), + copyTo.build(), + this + ); } } @@ -1287,7 +1292,6 @@ public static class NumberFieldType extends SimpleMappedFieldType { private final MetricType metricType; public NumberFieldType( - String name, NumberType type, boolean isIndexed, boolean isStored, @@ -1299,7 +1303,7 @@ public NumberFieldType( boolean isDimension, MetricType metricType ) { - super(name, isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.type = Objects.requireNonNull(type); this.coerce = coerce; this.nullValue = nullValue; @@ -1308,9 +1312,8 @@ public NumberFieldType( this.metricType = metricType; } - NumberFieldType(String name, Builder builder) { + NumberFieldType(Builder builder) { this( - name, builder.type, builder.indexed.getValue() && builder.indexCreatedVersion.isLegacyIndexVersion() == false, builder.stored.getValue(), @@ -1324,12 +1327,12 @@ public NumberFieldType( ); } - public NumberFieldType(String name, NumberType type) { - this(name, type, true); + public NumberFieldType(NumberType type) { + this(type, true); } - public NumberFieldType(String name, NumberType type, boolean isIndexed) { - this(name, type, isIndexed, false, true, true, null, Collections.emptyMap(), null, false, null); + public NumberFieldType(NumberType type, boolean isIndexed) { + this(type, isIndexed, false, true, true, null, Collections.emptyMap(), null, false, null); } @Override @@ -1357,8 +1360,8 @@ public NumericType numericType() { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(this.name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } public boolean isSearchable() { @@ -1366,31 +1369,32 @@ public boolean isSearchable() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); - return type.termQuery(name(), value, isIndexed()); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); + return type.termQuery(name, value, isIndexed()); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexedNorDocValuesFallback(context); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexedNorDocValuesFallback(name, context); if (isIndexed()) { - return type.termsQuery(name(), values); + return type.termsQuery(name, values); } else { - return super.termsQuery(values, context); + return super.termsQuery(name, values, context); } } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - failIfNotIndexedNorDocValuesFallback(context); - return type.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper, hasDocValues(), context, isIndexed()); + failIfNotIndexedNorDocValuesFallback(name, context); + return type.rangeQuery(name, lowerTerm, upperTerm, includeLower, includeUpper, hasDocValues(), context, isIndexed()); } @Override @@ -1402,9 +1406,9 @@ public Function pointReaderIfPossible() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return type.getFieldDataBuilder(name()); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return type.getFieldDataBuilder(name); } @Override @@ -1416,14 +1420,14 @@ public Object valueForDisplay(Object value) { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } if (this.scriptValues != null) { return FieldValues.valueFetcher(this.scriptValues, context); } - return new SourceValueFetcher(name(), context, nullValue) { + return new SourceValueFetcher(name, context, nullValue) { @Override protected Object parseSourceValue(Object value) { if (value.equals("")) { @@ -1435,8 +1439,8 @@ protected Object parseSourceValue(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + checkNoTimeZone(name, timeZone); if (format == null) { return DocValueFormat.RAW; } @@ -1485,8 +1489,8 @@ public MetricType getMetricType() { private final MetricType metricType; private final Version indexCreatedVersion; - private NumberFieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo, Builder builder) { - super(simpleName, mappedFieldType, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.getValue()); + private NumberFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo, builder.script.get() != null, builder.onScriptError.getValue()); this.type = builder.type; this.indexed = builder.indexed.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); @@ -1529,7 +1533,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio value = value(context.parser(), type, nullValue, coerce()); } catch (IllegalArgumentException e) { if (ignoreMalformed.value() && context.parser().currentToken().isValue()) { - context.addIgnoredField(mappedFieldType.name()); + context.addIgnoredField(mappedField.name()); return; } else { throw e; @@ -1563,12 +1567,12 @@ private static Number value(XContentParser parser, NumberType numberType, Number private void indexValue(DocumentParserContext context, Number numericValue) { if (dimension && numericValue != null) { - context.getDimensions().addLong(fieldType().name(), numericValue.longValue()); + context.getDimensions().addLong(name(), numericValue.longValue()); } - fieldType().type.addFields(context.doc(), fieldType().name(), numericValue, indexed, hasDocValues, stored); + fieldType().type.addFields(context.doc(), name(), numericValue, indexed, hasDocValues, stored); if (hasDocValues == false && (stored || indexed)) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java index fab325c7947c0..4983232564f03 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapper.java @@ -91,10 +91,10 @@ protected Parameter[] getParameters() { @Override public PlaceHolderFieldMapper build(MapperBuilderContext context) { - PlaceHolderFieldType mappedFieldType = new PlaceHolderFieldType(context.buildFullName(name), type, Map.of()); + PlaceHolderFieldType mappedFieldType = new PlaceHolderFieldType(type, Map.of()); return new PlaceHolderFieldMapper( name, - mappedFieldType, + new MappedField(context.buildFullName(name), mappedFieldType), multiFieldsBuilder.build(this, context), copyTo.build(), unknownParams @@ -106,15 +106,15 @@ public static final class PlaceHolderFieldType extends MappedFieldType { private String type; - public PlaceHolderFieldType(String name, String type, Map meta) { - super(name, false, false, false, TextSearchInfo.NONE, meta); + public PlaceHolderFieldType(String type, Map meta) { + super(false, false, false, TextSearchInfo.NONE, meta); this.type = type; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { // ignore format parameter - return new SourceValueFetcher(name(), context) { + return new SourceValueFetcher(name, context) { @Override protected Object parseSourceValue(Object value) { @@ -130,17 +130,18 @@ public String typeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new QueryShardException(context, fail("term query")); } @Override - public Query termQueryCaseInsensitive(Object value, @Nullable SearchExecutionContext context) { + public Query termQueryCaseInsensitive(String name, Object value, @Nullable SearchExecutionContext context) { throw new QueryShardException(context, fail("case insensitive term query")); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -155,6 +156,7 @@ public Query rangeQuery( @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -167,6 +169,7 @@ public Query fuzzyQuery( @Override public Query prefixQuery( + String name, String value, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitve, @@ -177,6 +180,7 @@ public Query prefixQuery( @Override public Query wildcardQuery( + String name, String value, @Nullable MultiTermQuery.RewriteMethod method, boolean caseInsensitve, @@ -186,12 +190,18 @@ public Query wildcardQuery( } @Override - public Query normalizedWildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { + public Query normalizedWildcardQuery( + String name, + String value, + @Nullable MultiTermQuery.RewriteMethod method, + SearchExecutionContext context + ) { throw new QueryShardException(context, fail("normalized wildcard query")); } @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -203,42 +213,61 @@ public Query regexpQuery( } @Override - public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) { + public Query phraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) { throw new QueryShardException(context, fail("phrase query")); } @Override - public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) { + public Query multiPhraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) { throw new QueryShardException(context, fail("multi-phrase query")); } @Override - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) throws IOException { + public Query phrasePrefixQuery(String name, TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) + throws IOException { throw new QueryShardException(context, fail("phrase prefix query")); } @Override - public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) { + public SpanQuery spanPrefixQuery( + String name, + String value, + SpanMultiTermQueryWrapper.SpanRewriteMethod method, + SearchExecutionContext context + ) { throw new QueryShardException(context, fail("span prefix query")); } @Override - public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { + public Query distanceFeatureQuery(String name, Object origin, String pivot, SearchExecutionContext context) { throw new QueryShardException(context, fail("distance feature query")); } @Override - public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext context) { + public IntervalsSource termIntervals(String name, BytesRef term, SearchExecutionContext context) { throw new QueryShardException(context, fail("term intervals query")); } @Override - public IntervalsSource prefixIntervals(BytesRef prefix, SearchExecutionContext context) { + public IntervalsSource prefixIntervals(String name, BytesRef prefix, SearchExecutionContext context) { throw new QueryShardException(context, fail("term intervals query")); } @Override public IntervalsSource fuzzyIntervals( + String name, String term, int maxDistance, int prefixLength, @@ -249,12 +278,12 @@ public IntervalsSource fuzzyIntervals( } @Override - public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { + public IntervalsSource wildcardIntervals(String name, BytesRef pattern, SearchExecutionContext context) { throw new QueryShardException(context, fail("wildcard intervals query")); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { throw new IllegalArgumentException(fail("aggregation or sorts")); } @@ -267,12 +296,12 @@ private String fail(String query) { public PlaceHolderFieldMapper( String simpleName, - PlaceHolderFieldType fieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Map unknownParams ) { - super(simpleName, fieldType, multiFields, copyTo); + super(simpleName, mappedField, multiFields, copyTo); this.unknownParams.putAll(unknownParams); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java index cd6f19f24502e..2ebe59dd5f2c7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java @@ -88,7 +88,7 @@ static final class IdFieldType extends TermBasedFieldType { private final BooleanSupplier fieldDataEnabled; IdFieldType(BooleanSupplier fieldDataEnabled) { - super(NAME, true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + super(true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); this.fieldDataEnabled = fieldDataEnabled; } @@ -104,28 +104,28 @@ public boolean isSearchable() { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return true; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { return new StoredValueFetcher(context.lookup(), NAME); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - return termsQuery(Arrays.asList(value), context); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + return termsQuery(name, Arrays.asList(value), context); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { return new MatchAllDocsQuery(); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexed(); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexed(name); BytesRef[] bytesRefs = values.stream().map(v -> { Object idObject = v; if (idObject instanceof BytesRef) { @@ -133,11 +133,11 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { } return Uid.encodeId(idObject.toString()); }).toArray(BytesRef[]::new); - return new TermInSetQuery(name(), bytesRefs); + return new TermInSetQuery(name, bytesRefs); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { if (fieldDataEnabled.getAsBoolean() == false) { throw new IllegalArgumentException( "Fielddata access on the _id field is disallowed, " @@ -146,7 +146,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S ); } final IndexFieldData.Builder fieldDataBuilder = new PagedBytesIndexFieldData.Builder( - name(), + name, TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY, TextFieldMapper.Defaults.FIELDDATA_MAX_FREQUENCY, TextFieldMapper.Defaults.FIELDDATA_MIN_SEGMENT_SIZE, @@ -257,7 +257,7 @@ public boolean advanceExact(int doc) throws IOException { } public ProvidedIdFieldMapper(BooleanSupplier fieldDataEnabled) { - super(new IdFieldType(fieldDataEnabled)); + super(new MappedField(NAME, new IdFieldType(fieldDataEnabled))); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index 56ce812d1ad2d..f26b7872a1275 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -130,7 +130,6 @@ protected RangeFieldType setupFieldType(MapperBuilderContext context) { ); } return new RangeFieldType( - fullName, index.getValue(), store.getValue(), hasDocValues.getValue(), @@ -141,7 +140,6 @@ protected RangeFieldType setupFieldType(MapperBuilderContext context) { } if (type == RangeType.DATE) { return new RangeFieldType( - fullName, index.getValue(), store.getValue(), hasDocValues.getValue(), @@ -151,7 +149,6 @@ protected RangeFieldType setupFieldType(MapperBuilderContext context) { ); } return new RangeFieldType( - fullName, type, index.getValue(), store.getValue(), @@ -164,7 +161,14 @@ protected RangeFieldType setupFieldType(MapperBuilderContext context) { @Override public RangeFieldMapper build(MapperBuilderContext context) { RangeFieldType ft = setupFieldType(context); - return new RangeFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), type, this); + return new RangeFieldMapper( + name, + new MappedField(context.buildFullName(name), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), + type, + this + ); } } @@ -175,7 +179,6 @@ public static final class RangeFieldType extends MappedFieldType { protected final boolean coerce; public RangeFieldType( - String name, RangeType type, boolean indexed, boolean stored, @@ -183,7 +186,7 @@ public RangeFieldType( boolean coerce, Map meta ) { - super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); assert type != RangeType.DATE; this.rangeType = Objects.requireNonNull(type); dateTimeFormatter = null; @@ -191,12 +194,11 @@ public RangeFieldType( this.coerce = coerce; } - public RangeFieldType(String name, RangeType type) { - this(name, type, true, false, true, false, Collections.emptyMap()); + public RangeFieldType(RangeType type) { + this(type, true, false, true, false, Collections.emptyMap()); } public RangeFieldType( - String name, boolean indexed, boolean stored, boolean hasDocValues, @@ -204,15 +206,15 @@ public RangeFieldType( boolean coerce, Map meta ) { - super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.rangeType = RangeType.DATE; this.dateTimeFormatter = Objects.requireNonNull(formatter); this.dateMathParser = dateTimeFormatter.toDateMathParser(); this.coerce = coerce; } - public RangeFieldType(String name, DateFormatter formatter) { - this(name, true, false, true, formatter, false, Collections.emptyMap()); + public RangeFieldType(DateFormatter formatter) { + this(true, false, true, formatter, false, Collections.emptyMap()); } public RangeType rangeType() { @@ -220,24 +222,24 @@ public RangeType rangeType() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new BinaryIndexFieldData.Builder(name(), CoreValuesSourceType.RANGE); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new BinaryIndexFieldData.Builder(name, CoreValuesSourceType.RANGE); } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(this.name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { DateFormatter defaultFormatter = dateTimeFormatter(); DateFormatter formatter = format != null ? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale()) : defaultFormatter; - return new SourceValueFetcher(name(), context) { + return new SourceValueFetcher(name, context) { @Override @SuppressWarnings("unchecked") @@ -275,7 +277,7 @@ protected DateMathParser dateMathParser() { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { if (rangeType == RangeType.DATE) { DateFormatter dateTimeFormatter = this.dateTimeFormatter; if (format != null) { @@ -288,16 +290,17 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) { // milliseconds. The only special case here is docvalue fields, which are handled somewhere else return new DocValueFormat.DateTime(dateTimeFormatter, timeZone, DateFieldMapper.Resolution.MILLISECONDS); } - return super.docValueFormat(format, timeZone); + return super.docValueFormat(name, format, timeZone); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - return rangeQuery(value, value, true, true, ShapeRelation.INTERSECTS, null, null, context); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + return rangeQuery(name, value, value, true, true, ShapeRelation.INTERSECTS, null, null, context); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -307,12 +310,12 @@ public Query rangeQuery( DateMathParser parser, SearchExecutionContext context ) { - failIfNotIndexed(); + failIfNotIndexed(name); if (parser == null) { parser = dateMathParser(); } return rangeType.rangeQuery( - name(), + name, hasDocValues(), lowerTerm, upperTerm, @@ -338,13 +341,13 @@ public Query rangeQuery( private RangeFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, RangeType type, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + super(simpleName, mappedField, multiFields, copyTo); this.type = type; this.index = builder.index.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); @@ -430,7 +433,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio context.doc().addAll(fieldType().rangeType.createFields(context, name(), range, index, hasDocValues, store)); if (hasDocValues == false && (index || store)) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java index 4a38289ec3202..71d851c029147 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java @@ -72,7 +72,7 @@ static final class RoutingFieldType extends StringFieldType { static RoutingFieldType INSTANCE = new RoutingFieldType(); private RoutingFieldType() { - super(NAME, true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + super(true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); } @Override @@ -81,8 +81,8 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return new StoredValueFetcher(context.lookup(), NAME); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return new StoredValueFetcher(context.lookup(), name); } } @@ -101,7 +101,7 @@ public static RoutingFieldMapper get(boolean required) { } private RoutingFieldMapper(boolean required) { - super(RoutingFieldType.INSTANCE); + super(new MappedField(NAME, RoutingFieldType.INSTANCE)); this.required = required; } @@ -121,8 +121,8 @@ public boolean required() { public void preParse(DocumentParserContext context) { String routing = context.sourceToParse().routing(); if (routing != null) { - context.doc().add(new Field(fieldType().name(), routing, Defaults.FIELD_TYPE)); - context.addToFieldNames(fieldType().name()); + context.doc().add(new Field(name(), routing, Defaults.FIELD_TYPE)); + context.addToFieldNames(name()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RuntimeField.java b/server/src/main/java/org/elasticsearch/index/mapper/RuntimeField.java index 1bf8c5d937e1c..6d4b08355aab3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RuntimeField.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RuntimeField.java @@ -37,10 +37,10 @@ public interface RuntimeField extends ToXContentFragment { String name(); /** - * Exposes the {@link MappedFieldType}s backing this runtime field, used to execute queries, run aggs etc. - * @return the {@link MappedFieldType}s backing this runtime field + * Exposes the {@link MappedField}s backing this runtime field, used to execute queries, run aggs etc. + * @return the {@link MappedField}s backing this runtime field */ - Stream asMappedFieldTypes(); + Stream asMappedFields(); abstract class Builder { final String name; @@ -193,16 +193,16 @@ static Map parseRuntimeFields( } /** - * Collect and return all {@link MappedFieldType} exposed by the provided {@link RuntimeField}s. + * Collect and return all {@link MappedField} exposed by the provided {@link RuntimeField}s. * Note that validation is performed to make sure that there are no name clashes among the collected runtime fields. * This is because runtime fields with the same name are not accepted as part of the same section. * @param runtimeFields the runtime to extract the mapped field types from * @return the collected mapped field types */ - static Map collectFieldTypes(Collection runtimeFields) { + static Map collectMappedFields(Collection runtimeFields) { return runtimeFields.stream().flatMap(runtimeField -> { - List names = runtimeField.asMappedFieldTypes() - .map(MappedFieldType::name) + List names = runtimeField.asMappedFields() + .map(MappedField::name) .filter( name -> name.equals(runtimeField.name()) == false && (name.startsWith(runtimeField.name() + ".") == false @@ -212,11 +212,11 @@ static Map collectFieldTypes(Collection r if (names.isEmpty() == false) { throw new IllegalStateException("Found sub-fields with name not belonging to the parent field they are part of " + names); } - return runtimeField.asMappedFieldTypes(); + return runtimeField.asMappedFields(); }) .collect( Collectors.toUnmodifiableMap( - MappedFieldType::name, + MappedField::name, mappedFieldType -> mappedFieldType, (t, t2) -> { throw new IllegalArgumentException("Found two runtime fields with same name [" + t.name() + "]"); } ) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index 45ffa6b58b99c..e7c3a8a29729e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -122,7 +122,7 @@ static final class SeqNoFieldType extends SimpleMappedFieldType { private static final SeqNoFieldType INSTANCE = new SeqNoFieldType(); private SeqNoFieldType() { - super(NAME, true, false, true, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, Collections.emptyMap()); + super(true, false, true, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, Collections.emptyMap()); } @Override @@ -148,29 +148,30 @@ private static long parse(Object value) { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return false; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name + "]."); } @Override - public Query termQuery(Object value, @Nullable SearchExecutionContext context) { + public Query termQuery(String name, Object value, @Nullable SearchExecutionContext context) { long v = parse(value); - return LongPoint.newExactQuery(name(), v); + return LongPoint.newExactQuery(name, v); } @Override - public Query termsQuery(Collection values, @Nullable SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, @Nullable SearchExecutionContext context) { long[] v = values.stream().mapToLong(SeqNoFieldType::parse).toArray(); - return LongPoint.newSetQuery(name(), v); + return LongPoint.newSetQuery(name, v); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -197,18 +198,18 @@ public Query rangeQuery( --u; } } - return LongPoint.newRangeQuery(name(), l, u); + return LongPoint.newRangeQuery(name, l, u); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, SeqNoDocValuesField::new); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new SortedNumericIndexFieldData.Builder(name, NumericType.LONG, SeqNoDocValuesField::new); } } private SeqNoFieldMapper() { - super(SeqNoFieldType.INSTANCE); + super(new MappedField(NAME, SeqNoFieldType.INSTANCE)); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java index d17dcd61b9e09..631eb71b06bda 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java @@ -22,18 +22,18 @@ public abstract class SimpleMappedFieldType extends MappedFieldType { protected SimpleMappedFieldType( - String name, boolean isIndexed, boolean isStored, boolean hasDocValues, TextSearchInfo textSearchInfo, Map meta ) { - super(name, isIndexed, isStored, hasDocValues, textSearchInfo, meta); + super(isIndexed, isStored, hasDocValues, textSearchInfo, meta); } @Override public final Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -44,26 +44,27 @@ public final Query rangeQuery( SearchExecutionContext context ) { if (relation == ShapeRelation.DISJOINT) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support DISJOINT ranges"); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support DISJOINT ranges"); } // We do not fail on non-null time zones and date parsers // The reasoning is that on query parsers, you might want to set a time zone or format for date fields // but then the API has no way to know which fields are dates and which fields are not dates - return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context); + return rangeQuery(name, lowerTerm, upperTerm, includeLower, includeUpper, context); } /** - * Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, ZoneId, DateMathParser, SearchExecutionContext)} + * Same as {@link #rangeQuery(String, Object, Object, boolean, boolean, ShapeRelation, ZoneId, DateMathParser, SearchExecutionContext)} * but without the trouble of relations or date-specific options. */ protected Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support range queries"); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries"); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index e79ba64528c6c..e5b274831c18c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -116,7 +116,7 @@ public SourceFieldMapper build() { static final class SourceFieldType extends MappedFieldType { private SourceFieldType(boolean enabled) { - super(NAME, false, enabled, false, TextSearchInfo.NONE, Collections.emptyMap()); + super(false, enabled, false, TextSearchInfo.NONE, Collections.emptyMap()); } @Override @@ -125,18 +125,18 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name + "]."); } @Override - public Query existsQuery(SearchExecutionContext context) { - throw new QueryShardException(context, "The _source field is not searchable"); + public Query existsQuery(String name, SearchExecutionContext context) { + throw new QueryShardException(context, "The " + name + " field is not searchable"); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new QueryShardException(context, "The _source field is not searchable"); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new QueryShardException(context, "The " + name + " field is not searchable"); } } @@ -149,7 +149,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { private final String[] excludes; private SourceFieldMapper(boolean enabled, boolean synthetic, String[] includes, String[] excludes) { - super(new SourceFieldType(enabled)); + super(new MappedField(NAME, new SourceFieldType(enabled))); this.enabled = enabled; this.synthetic = synthetic; this.includes = includes; @@ -180,7 +180,7 @@ public void preParse(DocumentParserContext context) throws IOException { if (adaptedSource != null) { final BytesRef ref = adaptedSource.toBytesRef(); - context.doc().add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length)); + context.doc().add(new StoredField(name(), ref.bytes, ref.offset, ref.length)); } if (originalSource != null && adaptedSource != originalSource) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 2842e9476bb5a..9be6d86e9df43 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -42,18 +42,18 @@ public abstract class StringFieldType extends TermBasedFieldType { private static final Pattern WILDCARD_PATTERN = Pattern.compile("(\\\\.)|([?*]+)"); public StringFieldType( - String name, boolean isIndexed, boolean isStored, boolean hasDocValues, TextSearchInfo textSearchInfo, Map meta ) { - super(name, isIndexed, isStored, hasDocValues, textSearchInfo, meta); + super(isIndexed, isStored, hasDocValues, textSearchInfo, meta); } @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -66,9 +66,9 @@ public Query fuzzyQuery( "[fuzzy] queries cannot be executed when '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false." ); } - failIfNotIndexed(); + failIfNotIndexed(name); return new FuzzyQuery( - new Term(name(), indexedValueForSearch(value)), + new Term(name, indexedValueForSearch(name, value)), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, @@ -77,7 +77,13 @@ public Query fuzzyQuery( } @Override - public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { + public Query prefixQuery( + String name, + String value, + MultiTermQuery.RewriteMethod method, + boolean caseInsensitive, + SearchExecutionContext context + ) { if (context.allowExpensiveQueries() == false) { throw new ElasticsearchException( "[prefix] queries cannot be executed when '" @@ -86,16 +92,16 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool + "fields please enable [index_prefixes]." ); } - failIfNotIndexed(); + failIfNotIndexed(name); if (caseInsensitive) { - AutomatonQuery query = AutomatonQueries.caseInsensitivePrefixQuery((new Term(name(), indexedValueForSearch(value)))); + AutomatonQuery query = AutomatonQueries.caseInsensitivePrefixQuery((new Term(name, indexedValueForSearch(name, value)))); if (method != null) { query.setRewriteMethod(method); } return query; } - PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value))); + PrefixQuery query = new PrefixQuery(new Term(name, indexedValueForSearch(name, value))); if (method != null) { query.setRewriteMethod(method); } @@ -133,23 +139,30 @@ public static final String normalizeWildcardPattern(String fieldname, String val } @Override - public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { - return wildcardQuery(value, method, caseInsensitive, false, context); + public Query wildcardQuery( + String name, + String value, + MultiTermQuery.RewriteMethod method, + boolean caseInsensitive, + SearchExecutionContext context + ) { + return wildcardQuery(name, value, method, caseInsensitive, false, context); } @Override - public Query normalizedWildcardQuery(String value, MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { - return wildcardQuery(value, method, false, true, context); + public Query normalizedWildcardQuery(String name, String value, MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { + return wildcardQuery(name, value, method, false, true, context); } protected Query wildcardQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, boolean shouldNormalize, SearchExecutionContext context ) { - failIfNotIndexed(); + failIfNotIndexed(name); if (context.allowExpensiveQueries() == false) { throw new ElasticsearchException( "[wildcard] queries cannot be executed when '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false." @@ -158,10 +171,10 @@ protected Query wildcardQuery( Term term; if (getTextSearchInfo().searchAnalyzer() != null && shouldNormalize) { - value = normalizeWildcardPattern(name(), value, getTextSearchInfo().searchAnalyzer()); - term = new Term(name(), value); + value = normalizeWildcardPattern(name, value, getTextSearchInfo().searchAnalyzer()); + term = new Term(name, value); } else { - term = new Term(name(), indexedValueForSearch(value)); + term = new Term(name, indexedValueForSearch(name, value)); } if (caseInsensitive) { AutomatonQuery query = AutomatonQueries.caseInsensitiveWildcardQuery(term); @@ -175,6 +188,7 @@ protected Query wildcardQuery( @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -187,8 +201,13 @@ public Query regexpQuery( "[regexp] queries cannot be executed when '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false." ); } - failIfNotIndexed(); - RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), syntaxFlags, matchFlags, maxDeterminizedStates); + failIfNotIndexed(name); + RegexpQuery query = new RegexpQuery( + new Term(name, indexedValueForSearch(name, value)), + syntaxFlags, + matchFlags, + maxDeterminizedStates + ); if (method != null) { query.setRewriteMethod(method); } @@ -197,6 +216,7 @@ public Query regexpQuery( @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -210,11 +230,11 @@ public Query rangeQuery( + "' is set to false." ); } - failIfNotIndexed(); + failIfNotIndexed(name); return new TermRangeQuery( - name(), - lowerTerm == null ? null : indexedValueForSearch(lowerTerm), - upperTerm == null ? null : indexedValueForSearch(upperTerm), + name, + lowerTerm == null ? null : indexedValueForSearch(name, lowerTerm), + upperTerm == null ? null : indexedValueForSearch(name, upperTerm), includeLower, includeUpper ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java index 80e6d04d967d5..57c2983bed12b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java @@ -25,45 +25,44 @@ public abstract class TermBasedFieldType extends SimpleMappedFieldType { public TermBasedFieldType( - String name, boolean isIndexed, boolean isStored, boolean hasDocValues, TextSearchInfo textSearchInfo, Map meta ) { - super(name, isIndexed, isStored, hasDocValues, textSearchInfo, meta); + super(isIndexed, isStored, hasDocValues, textSearchInfo, meta); } /** Returns the indexed value used to construct search "values". * This method is used for the default implementations of most * query factory methods such as {@link #termQuery}. */ - protected BytesRef indexedValueForSearch(Object value) { + protected BytesRef indexedValueForSearch(String name, Object value) { return BytesRefs.toBytesRef(value); } @Override - public Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) { - failIfNotIndexed(); - return AutomatonQueries.caseInsensitiveTermQuery(new Term(name(), indexedValueForSearch(value))); + public Query termQueryCaseInsensitive(String name, Object value, SearchExecutionContext context) { + failIfNotIndexed(name); + return AutomatonQueries.caseInsensitiveTermQuery(new Term(name, indexedValueForSearch(name, value))); } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - failIfNotIndexed(); - return new TermQuery(new Term(name(), indexedValueForSearch(value))); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + failIfNotIndexed(name); + return new TermQuery(new Term(name, indexedValueForSearch(name, value))); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); - return new TermInSetQuery(name(), bytesRefs); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexed(name); + BytesRef[] bytesRefs = values.stream().map(v -> indexedValueForSearch(name, v)).toArray(BytesRef[]::new); + return new TermInSetQuery(name, bytesRefs); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 8a987988861c7..439140d239075 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -347,10 +347,10 @@ private TextFieldType buildFieldType(FieldType fieldType, MapperBuilderContext c TextSearchInfo tsi = new TextSearchInfo(fieldType, similarity.getValue(), searchAnalyzer, searchQuoteAnalyzer); TextFieldType ft; if (indexCreatedVersion.isLegacyIndexVersion()) { - ft = new LegacyTextFieldType(context.buildFullName(name), index.getValue(), store.getValue(), tsi, meta.getValue()); + ft = new LegacyTextFieldType(index.getValue(), store.getValue(), tsi, meta.getValue()); // ignore fieldData and eagerGlobalOrdinals } else { - ft = new TextFieldType(context.buildFullName(name), index.getValue(), store.getValue(), tsi, meta.getValue()); + ft = new TextFieldType(index.getValue(), store.getValue(), tsi, meta.getValue()); ft.eagerGlobalOrdinals = eagerGlobalOrdinals.getValue(); if (fieldData.getValue()) { ft.setFielddata(true, freqFilter.getValue()); @@ -400,7 +400,7 @@ private SubFieldInfo buildPrefixInfo(MapperBuilderContext context, FieldType fie ); } - private SubFieldInfo buildPhraseInfo(FieldType fieldType, TextFieldType parent) { + private SubFieldInfo buildPhraseInfo(FieldType fieldType, MappedField parent) { if (indexPhrases.get() == false) { return null; } @@ -411,7 +411,7 @@ private SubFieldInfo buildPhraseInfo(FieldType fieldType, TextFieldType parent) throw new IllegalArgumentException("Cannot set index_phrases on field [" + name() + "] if positions are not enabled"); } FieldType phraseFieldType = new FieldType(fieldType); - parent.setIndexPhrases(); + ((TextFieldType) parent.type()).setIndexPhrases(); PhraseWrappedAnalyzer a = new PhraseWrappedAnalyzer( analyzers.getIndexAnalyzer().analyzer(), analyzers.positionIncrementGap.get() @@ -448,9 +448,9 @@ public TextFieldMapper build(MapperBuilderContext context) { indexCreatedVersion.isLegacyIndexVersion() ? () -> false : norms, termVectors ); - TextFieldType tft = buildFieldType(fieldType, context, indexCreatedVersion); + MappedField tft = new MappedField(context.buildFullName(name), buildFieldType(fieldType, context, indexCreatedVersion)); SubFieldInfo phraseFieldInfo = buildPhraseInfo(fieldType, tft); - SubFieldInfo prefixFieldInfo = buildPrefixInfo(context, fieldType, tft); + SubFieldInfo prefixFieldInfo = buildPrefixInfo(context, fieldType, (TextFieldType) tft.type()); MultiFields multiFields = multiFieldsBuilder.build(this, context); for (Mapper mapper : multiFields) { if (mapper.name().endsWith(FAST_PHRASE_SUFFIX) || mapper.name().endsWith(FAST_PREFIX_SUFFIX)) { @@ -541,22 +541,20 @@ private static final class PrefixFieldType extends StringFieldType { final int minChars; final int maxChars; - final TextFieldType parentField; - PrefixFieldType(TextFieldType parentField, int minChars, int maxChars) { - super(parentField.name() + FAST_PREFIX_SUFFIX, true, false, false, parentField.getTextSearchInfo(), Collections.emptyMap()); + PrefixFieldType(TextFieldType parentFieldType, int minChars, int maxChars) { + super(true, false, false, parentFieldType.getTextSearchInfo(), Collections.emptyMap()); this.minChars = minChars; this.maxChars = maxChars; - this.parentField = parentField; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { + public boolean mayExistInIndex(String name, SearchExecutionContext context) { return false; } @@ -564,8 +562,9 @@ boolean accept(int length) { return length >= minChars - 1 && length <= maxChars; } - @Override public Query prefixQuery( + String parentName, + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, @@ -573,9 +572,9 @@ public Query prefixQuery( ) { if (value.length() >= minChars) { if (caseInsensitive) { - return super.termQueryCaseInsensitive(value, context); + return super.termQueryCaseInsensitive(name, value, context); } - return super.termQuery(value, context); + return super.termQuery(name, value, context); } List automata = new ArrayList<>(); if (caseInsensitive) { @@ -588,25 +587,25 @@ public Query prefixQuery( automata.add(Automata.makeAnyChar()); } Automaton automaton = Operations.concatenate(automata); - AutomatonQuery query = new AutomatonQuery(new Term(name(), value + "*"), automaton); + AutomatonQuery query = new AutomatonQuery(new Term(name, value + "*"), automaton); query.setRewriteMethod(method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) - .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term(parentName, value)), BooleanClause.Occur.SHOULD) .build(); } - public IntervalsSource intervals(BytesRef term) { + public IntervalsSource intervals(String name, BytesRef term) { if (getTextSearchInfo().hasPositions() == false) { - throw new IllegalArgumentException("Cannot create intervals over a field [" + name() + "] without indexed positions"); + throw new IllegalArgumentException("Cannot create intervals over a field [" + name + "] without indexed positions"); } if (term.length > maxChars) { return Intervals.prefix(term); } if (term.length >= minChars) { - return Intervals.fixField(name(), Intervals.term(term)); + return Intervals.fixField(name, Intervals.term(term)); } String wildcardTerm = term.utf8ToString() + "?".repeat(Math.max(0, minChars - term.length)); - return Intervals.or(Intervals.fixField(name(), Intervals.wildcard(new BytesRef(wildcardTerm))), Intervals.term(term)); + return Intervals.or(Intervals.fixField(name, Intervals.wildcard(new BytesRef(wildcardTerm))), Intervals.term(term)); } @Override @@ -620,7 +619,7 @@ public String toString() { } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { throw new UnsupportedOperationException(); } } @@ -647,14 +646,13 @@ public static class TextFieldType extends StringFieldType { private boolean indexPhrases = false; private boolean eagerGlobalOrdinals = false; - public TextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map meta) { - super(name, indexed, stored, false, tsi, meta); + public TextFieldType(boolean indexed, boolean stored, TextSearchInfo tsi, Map meta) { + super(indexed, stored, false, tsi, meta); fielddata = false; } - public TextFieldType(String name, boolean indexed, boolean stored, Map meta) { + public TextFieldType(boolean indexed, boolean stored, Map meta) { super( - name, indexed, stored, false, @@ -664,9 +662,8 @@ public TextFieldType(String name, boolean indexed, boolean stored, Map= prefixFieldType.minChars && value.length() <= prefixFieldType.maxChars && prefixFieldType.getTextSearchInfo().hasPositions()) { - return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixFieldType.name(), indexedValueForSearch(value))), name()); + return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixName(name), indexedValueForSearch(name, value))), name); } else { SpanMultiTermQueryWrapper spanMulti = new SpanMultiTermQueryWrapper<>( - new PrefixQuery(new Term(name(), indexedValueForSearch(value))) + new PrefixQuery(new Term(name, indexedValueForSearch(name, value))) ); spanMulti.setRewriteMethod(method); return spanMulti; @@ -764,26 +771,27 @@ public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRew } @Override - public IntervalsSource termIntervals(BytesRef term, SearchExecutionContext context) { + public IntervalsSource termIntervals(String name, BytesRef term, SearchExecutionContext context) { if (getTextSearchInfo().hasPositions() == false) { - throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + throw new IllegalArgumentException("Cannot create intervals over field [" + name + "] with no positions indexed"); } return Intervals.term(term); } @Override - public IntervalsSource prefixIntervals(BytesRef term, SearchExecutionContext context) { + public IntervalsSource prefixIntervals(String name, BytesRef term, SearchExecutionContext context) { if (getTextSearchInfo().hasPositions() == false) { - throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + throw new IllegalArgumentException("Cannot create intervals over field [" + name + "] with no positions indexed"); } if (prefixFieldType != null) { - return prefixFieldType.intervals(term); + return prefixFieldType.intervals(prefixName(name), term); } return Intervals.prefix(term); } @Override public IntervalsSource fuzzyIntervals( + String name, String term, int maxDistance, int prefixLength, @@ -791,31 +799,31 @@ public IntervalsSource fuzzyIntervals( SearchExecutionContext context ) { if (getTextSearchInfo().hasPositions() == false) { - throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + throw new IllegalArgumentException("Cannot create intervals over field [" + name + "] with no positions indexed"); } - FuzzyQuery fq = new FuzzyQuery(new Term(name(), term), maxDistance, prefixLength, 128, transpositions); + FuzzyQuery fq = new FuzzyQuery(new Term(name, term), maxDistance, prefixLength, 128, transpositions); return Intervals.multiterm(fq.getAutomata(), term); } @Override - public IntervalsSource wildcardIntervals(BytesRef pattern, SearchExecutionContext context) { + public IntervalsSource wildcardIntervals(String name, BytesRef pattern, SearchExecutionContext context) { if (getTextSearchInfo().hasPositions() == false) { - throw new IllegalArgumentException("Cannot create intervals over field [" + name() + "] with no positions indexed"); + throw new IllegalArgumentException("Cannot create intervals over field [" + name + "] with no positions indexed"); } return Intervals.wildcard(pattern); } - private void checkForPositions() { + private void checkForPositions(String name) { if (getTextSearchInfo().hasPositions() == false) { - throw new IllegalStateException("field:[" + name() + "] was indexed without position data; cannot run PhraseQuery"); + throw new IllegalStateException("field:[" + name + "] was indexed without position data; cannot run PhraseQuery"); } } @Override - public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, SearchExecutionContext context) + public Query phraseQuery(String name, TokenStream stream, int slop, boolean enablePosIncrements, SearchExecutionContext context) throws IOException { - String field = name(); - checkForPositions(); + String field = name; + checkForPositions(name); // we can't use the index_phrases shortcut with slop, if there are gaps in the stream, // or if the incoming token stream is the output of a token graph due to // https://issues.apache.org/jira/browse/LUCENE-8916 @@ -847,9 +855,14 @@ public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncremen } @Override - public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext context) - throws IOException { - String field = name(); + public Query multiPhraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePositionIncrements, + SearchExecutionContext context + ) throws IOException { + String field = name; if (indexPhrases && slop == 0 && hasGaps(stream) == false) { stream = new FixedShingleFilter(stream, 2); field = field + FAST_PHRASE_SUFFIX; @@ -868,17 +881,18 @@ private static int countTokens(TokenStream ts) throws IOException { } @Override - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) throws IOException { + public Query phrasePrefixQuery(String name, TokenStream stream, int slop, int maxExpansions, SearchExecutionContext context) + throws IOException { if (countTokens(stream) > 1) { - checkForPositions(); + checkForPositions(name); } - return analyzePhrasePrefix(stream, slop, maxExpansions); + return analyzePhrasePrefix(name, stream, slop, maxExpansions); } - private Query analyzePhrasePrefix(TokenStream stream, int slop, int maxExpansions) throws IOException { - String prefixField = prefixFieldType == null || slop > 0 ? null : prefixFieldType.name(); + private Query analyzePhrasePrefix(String name, TokenStream stream, int slop, int maxExpansions) throws IOException { + String prefixField = prefixFieldType == null || slop > 0 ? null : prefixName(name); IntPredicate usePrefix = (len) -> len >= prefixFieldType.minChars && len <= prefixFieldType.maxChars; - return createPhrasePrefixQuery(stream, name(), slop, maxExpansions, prefixField, usePrefix); + return createPhrasePrefixQuery(stream, name, slop, maxExpansions, prefixField, usePrefix); } public static boolean hasGaps(TokenStream stream) throws IOException { @@ -894,19 +908,19 @@ public static boolean hasGaps(TokenStream stream) throws IOException { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { if (fielddata == false) { throw new IllegalArgumentException( "Text fields are not optimised for operations that require per-document " + "field data like aggregations and sorting, so these operations are disabled by default. Please use a " + "keyword field instead. Alternatively, set fielddata=true on [" - + name() + + name + "] in order to load " + "field data by uninverting the inverted index. Note that this can use significant memory." ); } return new PagedBytesIndexFieldData.Builder( - name(), + name, filter.minFreq, filter.maxFreq, filter.minSegmentSize, @@ -922,13 +936,12 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S public static class ConstantScoreTextFieldType extends TextFieldType { - public ConstantScoreTextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map meta) { - super(name, indexed, stored, tsi, meta); + public ConstantScoreTextFieldType(boolean indexed, boolean stored, TextSearchInfo tsi, Map meta) { + super(indexed, stored, tsi, meta); } - public ConstantScoreTextFieldType(String name) { + public ConstantScoreTextFieldType() { this( - name, true, false, new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), @@ -936,24 +949,19 @@ public ConstantScoreTextFieldType(String name) { ); } - public ConstantScoreTextFieldType(String name, boolean indexed, boolean stored, Map meta) { - this( - name, - indexed, - stored, - new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), - meta - ); + public ConstantScoreTextFieldType(boolean indexed, boolean stored, Map meta) { + this(indexed, stored, new TextSearchInfo(Defaults.FIELD_TYPE, null, Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER), meta); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { // Disable scoring - return new ConstantScoreQuery(super.termQuery(value, context)); + return new ConstantScoreQuery(super.termQuery(name, value, context)); } @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -962,59 +970,73 @@ public Query fuzzyQuery( SearchExecutionContext context ) { // Disable scoring - return new ConstantScoreQuery(super.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context)); + return new ConstantScoreQuery(super.fuzzyQuery(name, value, fuzziness, prefixLength, maxExpansions, transpositions, context)); } @Override - public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, SearchExecutionContext queryShardContext) - throws IOException { + public Query phraseQuery( + String name, + TokenStream stream, + int slop, + boolean enablePosIncrements, + SearchExecutionContext queryShardContext + ) throws IOException { // Disable scoring - return new ConstantScoreQuery(super.phraseQuery(stream, slop, enablePosIncrements, queryShardContext)); + return new ConstantScoreQuery(super.phraseQuery(name, stream, slop, enablePosIncrements, queryShardContext)); } @Override public Query multiPhraseQuery( + String name, TokenStream stream, int slop, boolean enablePositionIncrements, SearchExecutionContext queryShardContext ) throws IOException { // Disable scoring - return new ConstantScoreQuery(super.multiPhraseQuery(stream, slop, enablePositionIncrements, queryShardContext)); + return new ConstantScoreQuery(super.multiPhraseQuery(name, stream, slop, enablePositionIncrements, queryShardContext)); } @Override - public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, SearchExecutionContext queryShardContext) - throws IOException { + public Query phrasePrefixQuery( + String name, + TokenStream stream, + int slop, + int maxExpansions, + SearchExecutionContext queryShardContext + ) throws IOException { // Disable scoring - return new ConstantScoreQuery(super.phrasePrefixQuery(stream, slop, maxExpansions, queryShardContext)); + return new ConstantScoreQuery(super.phrasePrefixQuery(name, stream, slop, maxExpansions, queryShardContext)); } } static class LegacyTextFieldType extends ConstantScoreTextFieldType { - private final MappedFieldType existQueryFieldType; - - LegacyTextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map meta) { - super(name, indexed, stored, tsi, meta); - // norms are not available, neither are doc-values, so fall back to _source to run exists query - existQueryFieldType = KeywordScriptFieldType.sourceOnly(name()).asMappedFieldTypes().findFirst().get(); + LegacyTextFieldType(boolean indexed, boolean stored, TextSearchInfo tsi, Map meta) { + super(indexed, stored, tsi, meta); } @Override - public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, SearchExecutionContext context) { - throw new IllegalArgumentException("Cannot use span prefix queries on text field " + name() + " of a legacy index"); + public SpanQuery spanPrefixQuery( + String name, + String value, + SpanMultiTermQueryWrapper.SpanRewriteMethod method, + SearchExecutionContext context + ) { + throw new IllegalArgumentException("Cannot use span prefix queries on text field " + name + " of a legacy index"); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { if (context.allowExpensiveQueries() == false) { throw new ElasticsearchException( "runtime-computed exists query cannot be executed while [" + ALLOW_EXPENSIVE_QUERIES.getKey() + "] is set to [false]." ); } - return existQueryFieldType.existsQuery(context); + // norms are not available, neither are doc-values, so fall back to _source to run exists query + final MappedField existQueryField = KeywordScriptFieldType.sourceOnly(name).asMappedFields().findFirst().get(); + return existQueryField.existsQuery(context); } } @@ -1043,7 +1065,7 @@ public Query existsQuery(SearchExecutionContext context) { protected TextFieldMapper( String simpleName, FieldType fieldType, - TextFieldType mappedFieldType, + MappedField mappedField, Map indexAnalyzers, SubFieldInfo prefixFieldInfo, SubFieldInfo phraseFieldInfo, @@ -1051,9 +1073,9 @@ protected TextFieldMapper( CopyTo copyTo, Builder builder ) { - super(simpleName, mappedFieldType, multiFields, copyTo, false, null); - assert mappedFieldType.getTextSearchInfo().isTokenized(); - assert mappedFieldType.hasDocValues() == false; + super(simpleName, mappedField, multiFields, copyTo, false, null); + assert mappedField.getTextSearchInfo().isTokenized(); + assert mappedField.hasDocValues() == false; if (fieldType.indexOptions() == IndexOptions.NONE && fieldType().fielddata()) { throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + name() + "]"); } @@ -1097,10 +1119,10 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new Field(fieldType().name(), value, fieldType); + Field field = new Field(name(), value, fieldType); context.doc().add(field); if (fieldType.omitNorms()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } if (prefixFieldInfo != null) { context.doc().add(new Field(prefixFieldInfo.field, value, prefixFieldInfo.fieldType)); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java index ec4c04ea81e4a..99bf9c39d2e16 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java @@ -94,7 +94,7 @@ public TimeSeriesIdFieldMapper build() { public static final class TimeSeriesIdFieldType extends MappedFieldType { private TimeSeriesIdFieldType() { - super(NAME, false, false, true, TextSearchInfo.NONE, Collections.emptyMap()); + super(false, false, true, TextSearchInfo.NONE, Collections.emptyMap()); } @Override @@ -103,24 +103,24 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return new DocValueFetcher(docValueFormat(format, null), context.getForField(this)); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return new DocValueFetcher(docValueFormat(name, format, null), context.getForField(new MappedField(name, this))); } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } return DocValueFormat.TIME_SERIES_ID; } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); // TODO don't leak the TSID's binary format into the script return new SortedOrdinalsIndexFieldData.Builder( - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new DelegateDocValuesField( new ScriptDocValues.Strings(new ScriptDocValues.StringsSupplier(FieldData.toString(dv))), @@ -130,13 +130,13 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new IllegalArgumentException("[" + NAME + "] is not searchable"); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new IllegalArgumentException("[" + name + "] is not searchable"); } } private TimeSeriesIdFieldMapper() { - super(FIELD_TYPE); + super(new MappedField(NAME, FIELD_TYPE)); } @Override @@ -145,7 +145,7 @@ public void postParse(DocumentParserContext context) throws IOException { TimeSeriesIdBuilder timeSeriesIdBuilder = (TimeSeriesIdBuilder) context.getDimensions(); BytesRef timeSeriesId = timeSeriesIdBuilder.build().toBytesRef(); - context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId)); + context.doc().add(new SortedDocValuesField(name(), timeSeriesId)); TsidExtractingIdFieldMapper.createField(context, timeSeriesId); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java index 89201239e46cc..6b9751bd53cfc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java @@ -55,7 +55,7 @@ public class TsidExtractingIdFieldMapper extends IdFieldMapper { static final class IdFieldType extends TermBasedFieldType { IdFieldType() { - super(NAME, true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + super(true, true, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); } @Override @@ -70,23 +70,23 @@ public boolean isSearchable() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { return new StoredValueFetcher(context.lookup(), NAME); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - return termsQuery(Arrays.asList(value), context); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + return termsQuery(name, Arrays.asList(value), context); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { return new MatchAllDocsQuery(); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexed(); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexed(name); BytesRef[] bytesRefs = values.stream().map(v -> { Object idObject = v; if (idObject instanceof BytesRef) { @@ -94,17 +94,17 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { } return Uid.encodeId(idObject.toString()); }).toArray(BytesRef[]::new); - return new TermInSetQuery(name(), bytesRefs); + return new TermInSetQuery(name, bytesRefs); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { throw new IllegalArgumentException("Fielddata is not supported on [_id] field in [time_series] indices"); } } private TsidExtractingIdFieldMapper() { - super(new IdFieldType()); + super(new MappedField(NAME, new IdFieldType())); } private static final long SEED = 0; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java index 706a862b989b5..b3e4347a208f8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java @@ -37,7 +37,7 @@ static final class VersionFieldType extends MappedFieldType { public static final VersionFieldType INSTANCE = new VersionFieldType(); private VersionFieldType() { - super(NAME, false, false, true, TextSearchInfo.NONE, Collections.emptyMap()); + super(false, false, true, TextSearchInfo.NONE, Collections.emptyMap()); } @Override @@ -46,24 +46,24 @@ public String typeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new QueryShardException(context, "The _version field is not searchable"); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new QueryShardException(context, "The " + name + " field is not searchable"); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return new DocValueFetcher(docValueFormat(format, null), context.getForField(this)); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return new DocValueFetcher(docValueFormat(name, format, null), context.getForField(new MappedField(name, this))); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new SortedNumericIndexFieldData.Builder(name(), NumericType.LONG, VersionDocValuesField::new); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new SortedNumericIndexFieldData.Builder(name, NumericType.LONG, VersionDocValuesField::new); } } private VersionFieldMapper() { - super(VersionFieldType.INSTANCE); + super(new MappedField(NAME, VersionFieldType.INSTANCE)); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 4dd6a4e9e4c00..c4868ae224942 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -45,9 +45,9 @@ import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; -import org.elasticsearch.index.mapper.DynamicFieldType; +import org.elasticsearch.index.mapper.DynamicMappedField; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; @@ -184,15 +184,21 @@ public FlattenedFieldMapper build(MapperBuilderContext context) { if (copyTo.copyToFields().isEmpty() == false) { throw new IllegalArgumentException(CONTENT_TYPE + " field [" + name + "] does not support [copy_to]"); } - MappedFieldType ft = new RootFlattenedFieldType( - context.buildFullName(name), + RootFlattenedMappedFieldType ft = new RootFlattenedMappedFieldType( indexed.get(), hasDocValues.get(), meta.get(), splitQueriesOnWhitespace.get(), eagerGlobalOrdinals.get() ); - return new FlattenedFieldMapper(name, ft, this); + String fullName = context.buildFullName(name); + String childName = fullName + KEYED_FIELD_SUFFIX; + return new FlattenedFieldMapper(name, new DynamicMappedField(fullName, ft) { + @Override + public MappedField getChildField(String childPath) { + return new MappedField(childName, new KeyedFlattenedFieldType(fullName, childPath, ft)); + } + }, this); } } @@ -215,7 +221,6 @@ public static final class KeyedFlattenedFieldType extends StringFieldType { Map meta ) { super( - rootName + KEYED_FIELD_SUFFIX, indexed, false, hasDocValues, @@ -226,7 +231,7 @@ public static final class KeyedFlattenedFieldType extends StringFieldType { this.rootName = rootName; } - private KeyedFlattenedFieldType(String rootName, String key, RootFlattenedFieldType ref) { + private KeyedFlattenedFieldType(String rootName, String key, RootFlattenedMappedFieldType ref) { this(rootName, ref.isIndexed(), ref.hasDocValues(), key, ref.splitQueriesOnWhitespace, ref.meta()); } @@ -240,13 +245,14 @@ public String key() { } @Override - public Query existsQuery(SearchExecutionContext context) { - Term term = new Term(name(), FlattenedFieldParser.createKeyedValue(key, "")); + public Query existsQuery(String name, SearchExecutionContext context) { + Term term = new Term(name, FlattenedFieldParser.createKeyedValue(key, "")); return new PrefixQuery(term); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -263,11 +269,12 @@ public Query rangeQuery( ); } - return super.rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context); + return super.rangeQuery(name, lowerTerm, upperTerm, includeLower, includeUpper, context); } @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -282,6 +289,7 @@ public Query fuzzyQuery( @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -296,6 +304,7 @@ public Query regexpQuery( @Override public Query wildcardQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, @@ -307,15 +316,20 @@ public Query wildcardQuery( } @Override - public Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) { - return AutomatonQueries.caseInsensitiveTermQuery(new Term(name(), indexedValueForSearch(value))); + public Query termQueryCaseInsensitive(String name, Object value, SearchExecutionContext context) { + return AutomatonQueries.caseInsensitiveTermQuery(new Term(name, indexedValueForSearch(name, value))); } @Override - public TermsEnum getTerms(boolean caseInsensitive, String string, SearchExecutionContext queryShardContext, String searchAfter) - throws IOException { + public TermsEnum getTerms( + String name, + boolean caseInsensitive, + String string, + SearchExecutionContext queryShardContext, + String searchAfter + ) throws IOException { IndexReader reader = queryShardContext.searcher().getTopReaderContext().reader(); - Terms terms = MultiTerms.getTerms(reader, name()); + Terms terms = MultiTerms.getTerms(reader, name); if (terms == null) { // Field does not exist on this shard. return null; @@ -341,7 +355,7 @@ public TermsEnum getTerms(boolean caseInsensitive, String string, SearchExecutio } @Override - public BytesRef indexedValueForSearch(Object value) { + public BytesRef indexedValueForSearch(String name, Object value) { if (value == null) { return null; } @@ -352,13 +366,13 @@ public BytesRef indexedValueForSearch(Object value) { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new KeyedFlattenedFieldData.Builder(name(), key, (dv, n) -> new FlattenedDocValuesField(FieldData.toString(dv), n)); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new KeyedFlattenedFieldData.Builder(name, key, (dv, n) -> new FlattenedDocValuesField(FieldData.toString(dv), n)); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { throw new IllegalArgumentException( "Field [" + rootName + "." + key + "] of type [" + typeName() + "] doesn't support formats." @@ -584,12 +598,11 @@ public IndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService * A field type that represents all 'root' values. This field type is used in * searches on the flattened field itself, e.g. 'my_flattened: some_value'. */ - public static final class RootFlattenedFieldType extends StringFieldType implements DynamicFieldType { + public static final class RootFlattenedMappedFieldType extends StringFieldType { private final boolean splitQueriesOnWhitespace; private final boolean eagerGlobalOrdinals; - public RootFlattenedFieldType( - String name, + public RootFlattenedMappedFieldType( boolean indexed, boolean hasDocValues, Map meta, @@ -597,7 +610,6 @@ public RootFlattenedFieldType( boolean eagerGlobalOrdinals ) { super( - name, indexed, false, hasDocValues, @@ -619,8 +631,8 @@ public boolean eagerGlobalOrdinals() { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } @Override @@ -633,36 +645,31 @@ public Object valueForDisplay(Object value) { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); return new SortedSetOrdinalsIndexFieldData.Builder( - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new FlattenedDocValuesField(FieldData.toString(dv), n) ); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.identity(name(), context, format); - } - - @Override - public MappedFieldType getChildFieldType(String childPath) { - return new KeyedFlattenedFieldType(name(), childPath, this); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.identity(name, context, format); } } private final FlattenedFieldParser fieldParser; private final Builder builder; - private FlattenedFieldMapper(String simpleName, MappedFieldType mappedFieldType, Builder builder) { - super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty()); + private FlattenedFieldMapper(String simpleName, MappedField mappedField, Builder builder) { + super(simpleName, mappedField, MultiFields.empty(), CopyTo.empty()); this.builder = builder; this.fieldParser = new FlattenedFieldParser( - mappedFieldType.name(), - mappedFieldType.name() + KEYED_FIELD_SUFFIX, - mappedFieldType, + mappedField.name(), + mappedField.name() + KEYED_FIELD_SUFFIX, + mappedField.type(), builder.depthLimit.get(), builder.ignoreAbove.get(), builder.nullValue.get() @@ -671,7 +678,7 @@ private FlattenedFieldMapper(String simpleName, MappedFieldType mappedFieldType, @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override @@ -688,8 +695,8 @@ int ignoreAbove() { } @Override - public RootFlattenedFieldType fieldType() { - return (RootFlattenedFieldType) super.fieldType(); + public RootFlattenedMappedFieldType fieldType() { + return (RootFlattenedMappedFieldType) super.fieldType(); } @Override @@ -698,7 +705,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio return; } - if (mappedFieldType.isIndexed() == false && mappedFieldType.hasDocValues() == false) { + if (mappedField.isIndexed() == false && mappedField.hasDocValues() == false) { context.parser().skipChildren(); return; } @@ -706,8 +713,8 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio XContentParser xContentParser = context.parser(); context.doc().addAll(fieldParser.parse(xContentParser)); - if (mappedFieldType.hasDocValues() == false) { - context.addToFieldNames(fieldType().name()); + if (mappedField.hasDocValues() == false) { + context.addToFieldNames(name()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index b66ae08386c68..e0168dd071392 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -24,7 +24,7 @@ import org.elasticsearch.index.mapper.ArraySourceValueFetcher; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MappingLookup; @@ -129,13 +129,15 @@ protected Parameter[] getParameters() { public DenseVectorFieldMapper build(MapperBuilderContext context) { return new DenseVectorFieldMapper( name, - new DenseVectorFieldType( + new MappedField( context.buildFullName(name), - indexVersionCreated, - dims.getValue(), - indexed.getValue(), - similarity.getValue(), - meta.getValue() + new DenseVectorFieldType( + indexVersionCreated, + dims.getValue(), + indexed.getValue(), + similarity.getValue(), + meta.getValue() + ) ), dims.getValue(), indexed.getValue(), @@ -234,14 +236,13 @@ public static final class DenseVectorFieldType extends SimpleMappedFieldType { private final Version indexVersionCreated; public DenseVectorFieldType( - String name, Version indexVersionCreated, int dims, boolean indexed, VectorSimilarity similarity, Map meta ) { - super(name, indexed, false, indexed == false, TextSearchInfo.NONE, meta); + super(indexed, false, indexed == false, TextSearchInfo.NONE, meta); this.dims = dims; this.indexed = indexed; this.similarity = similarity; @@ -254,11 +255,11 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new ArraySourceValueFetcher(name(), context) { + return new ArraySourceValueFetcher(name, context) { @Override protected Object parseSourceValue(Object value) { return value; @@ -267,36 +268,36 @@ protected Object parseSourceValue(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { throw new IllegalArgumentException( - "Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations" + "Field [" + name + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations" ); } @Override - public boolean isAggregatable() { + public boolean isAggregatable(String name) { return false; } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new VectorIndexFieldData.Builder(name(), CoreValuesSourceType.KEYWORD, indexVersionCreated, dims, indexed); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + return new VectorIndexFieldData.Builder(name, CoreValuesSourceType.KEYWORD, indexVersionCreated, dims, indexed); } @Override - public Query existsQuery(SearchExecutionContext context) { - return new FieldExistsQuery(name()); + public Query existsQuery(String name, SearchExecutionContext context) { + return new FieldExistsQuery(name); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support term queries"); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support term queries"); } - public KnnVectorQuery createKnnQuery(float[] queryVector, int numCands, Query filter) { + public KnnVectorQuery createKnnQuery(String name, float[] queryVector, int numCands, Query filter) { if (isIndexed() == false) { throw new IllegalArgumentException( - "to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]" + "to perform knn search on field [" + name + "], its mapping must have [index] set to [true]" ); } @@ -313,7 +314,7 @@ public KnnVectorQuery createKnnQuery(float[] queryVector, int numCands, Query fi } checkVectorMagnitude(queryVector, squaredMagnitude); } - return new KnnVectorQuery(name(), queryVector, numCands, filter); + return new KnnVectorQuery(name, queryVector, numCands, filter); } private void checkVectorMagnitude(float[] vector, float squaredMagnitude) { @@ -354,7 +355,7 @@ private void checkVectorMagnitude(float[] vector, float squaredMagnitude) { private DenseVectorFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, int dims, boolean indexed, VectorSimilarity similarity, @@ -363,7 +364,7 @@ private DenseVectorFieldMapper( MultiFields multiFields, CopyTo copyTo ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + super(simpleName, mappedField, multiFields, copyTo); this.dims = dims; this.indexed = indexed; this.similarity = similarity; @@ -383,7 +384,7 @@ public boolean parsesArrayValue() { @Override public void parse(DocumentParserContext context) throws IOException { - if (context.doc().getByKey(fieldType().name()) != null) { + if (context.doc().getByKey(name()) != null) { throw new IllegalArgumentException( "Field [" + name() @@ -394,7 +395,7 @@ public void parse(DocumentParserContext context) throws IOException { } Field field = fieldType().indexed ? parseKnnVector(context) : parseBinaryDocValuesVector(context); - context.doc().addWithKey(fieldType().name(), field); + context.doc().addWithKey(name(), field); } private Field parseKnnVector(DocumentParserContext context) throws IOException { @@ -411,7 +412,7 @@ private Field parseKnnVector(DocumentParserContext context) throws IOException { } checkDimensionMatches(index, context); fieldType().checkVectorMagnitude(vector, squaredMagnitude); - return new KnnVectorField(fieldType().name(), vector, similarity.function); + return new KnnVectorField(name(), vector, similarity.function); } private Field parseBinaryDocValuesVector(DocumentParserContext context) throws IOException { @@ -438,7 +439,7 @@ private Field parseBinaryDocValuesVector(DocumentParserContext context) throws I float vectorMagnitude = (float) Math.sqrt(dotProduct); byteBuffer.putFloat(vectorMagnitude); } - return new BinaryDocValuesField(fieldType().name(), new BytesRef(bytes)); + return new BinaryDocValuesField(name(), new BytesRef(bytes)); } private void checkDimensionExceeded(int index, DocumentParserContext context) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index 0264d83d45fe1..d2bd1b04c8588 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.TextSearchInfo; @@ -57,7 +58,7 @@ protected Parameter[] getParameters() { public SparseVectorFieldMapper build(MapperBuilderContext context) { return new SparseVectorFieldMapper( name, - new SparseVectorFieldType(context.buildFullName(name), meta.getValue()), + new MappedField(context.buildFullName(name), new SparseVectorFieldType(meta.getValue())), multiFieldsBuilder.build(this, context), copyTo.build() ); @@ -75,8 +76,8 @@ public SparseVectorFieldMapper build(MapperBuilderContext context) { public static final class SparseVectorFieldType extends MappedFieldType { - public SparseVectorFieldType(String name, Map meta) { - super(name, false, false, false, TextSearchInfo.NONE, meta); + public SparseVectorFieldType(Map meta) { + super(false, false, false, TextSearchInfo.NONE, meta); } @Override @@ -85,28 +86,28 @@ public String typeName() { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { throw new UnsupportedOperationException(ERROR_MESSAGE_7X); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(ERROR_MESSAGE_7X); } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { throw new UnsupportedOperationException(ERROR_MESSAGE_7X); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new UnsupportedOperationException(ERROR_MESSAGE_7X); } } - private SparseVectorFieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, mappedFieldType, multiFields, copyTo); + private SparseVectorFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, mappedField, multiFields, copyTo); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java index 525cc72d74c97..1e48a978a5bf1 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractGeometryQueryBuilder.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -314,7 +314,7 @@ public boolean ignoreUnmapped() { } /** builds the appropriate lucene shape query */ - protected abstract Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType); + protected abstract Query buildShapeQuery(SearchExecutionContext context, MappedField mappedField); /** writes the xcontent specific to this shape query */ protected abstract void doShapeQueryXContent(XContentBuilder builder, Params params) throws IOException; @@ -334,15 +334,15 @@ protected Query doToQuery(SearchExecutionContext context) { if (shape == null || supplier != null) { throw new UnsupportedOperationException("query must be rewritten first"); } - final MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + final MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { throw new QueryShardException(context, "failed to find type for field [" + fieldName + "]"); } } - return buildShapeQuery(context, fieldType); + return buildShapeQuery(context, mappedField); } /** diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 52741a88e056d..66215fc8d1ba9 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.search.QueryParserHelper; @@ -294,7 +294,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { Map fields = QueryParserHelper.resolveMappingFields(context, fieldsAndBoosts); // If all fields are unmapped, then return an 'unmapped field query'. - boolean hasMappedField = fields.keySet().stream().anyMatch(k -> context.getFieldType(k) != null); + boolean hasMappedField = fields.keySet().stream().anyMatch(k -> context.getMappedField(k) != null); if (hasMappedField == false) { return Queries.newUnmappedFieldsQuery(fields.keySet()); } @@ -305,21 +305,21 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { List fieldsAndBoosts = new ArrayList<>(); for (Map.Entry entry : fields.entrySet()) { String name = entry.getKey(); - MappedFieldType fieldType = context.getFieldType(name); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(name); + if (mappedField == null) { continue; } - if (fieldType.familyTypeName().equals(TextFieldMapper.CONTENT_TYPE) == false) { + if (mappedField.familyTypeName().equals(TextFieldMapper.CONTENT_TYPE) == false) { throw new IllegalArgumentException( - "Field [" + fieldType.name() + "] of type [" + fieldType.typeName() + "] does not support [" + NAME + "] queries" + "Field [" + mappedField.name() + "] of type [" + mappedField.typeName() + "] does not support [" + NAME + "] queries" ); } float boost = entry.getValue() == null ? 1.0f : entry.getValue(); - fieldsAndBoosts.add(new FieldAndBoost(fieldType, boost)); + fieldsAndBoosts.add(new FieldAndBoost(mappedField, boost)); - Analyzer analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + Analyzer analyzer = mappedField.getTextSearchInfo().searchAnalyzer(); if (sharedAnalyzer != null && analyzer.equals(sharedAnalyzer) == false) { throw new IllegalArgumentException("All fields in [" + NAME + "] query must have the same search analyzer"); } @@ -327,10 +327,10 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } assert fieldsAndBoosts.isEmpty() == false; - String placeholderFieldName = fieldsAndBoosts.get(0).fieldType.name(); + String placeholderFieldName = fieldsAndBoosts.get(0).mappedField.name(); boolean canGenerateSynonymsPhraseQuery = autoGenerateSynonymsPhraseQuery; for (FieldAndBoost fieldAndBoost : fieldsAndBoosts) { - TextSearchInfo textSearchInfo = fieldAndBoost.fieldType.getTextSearchInfo(); + TextSearchInfo textSearchInfo = fieldAndBoost.mappedField.getTextSearchInfo(); canGenerateSynonymsPhraseQuery &= textSearchInfo.hasPositions(); } @@ -347,8 +347,8 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { private static void validateSimilarity(SearchExecutionContext context, Map fields) { for (Map.Entry entry : fields.entrySet()) { String name = entry.getKey(); - MappedFieldType fieldType = context.getFieldType(name); - if (fieldType != null && fieldType.getTextSearchInfo().similarity() != null) { + MappedField mappedField = context.getMappedField(name); + if (mappedField != null && mappedField.getTextSearchInfo().similarity() != null) { throw new IllegalArgumentException("[" + NAME + "] queries cannot be used with per-field similarities"); } } @@ -360,11 +360,11 @@ private static void validateSimilarity(SearchExecutionContext context, Map clusterStateSupplier; - private final Function mappingSupplier; + private final Function mappingSupplier; public CoordinatorRewriteContextProvider( XContentParserConfiguration parserConfig, @@ -35,7 +35,7 @@ public CoordinatorRewriteContextProvider( Client client, LongSupplier nowInMillis, Supplier clusterStateSupplier, - Function mappingSupplier + Function mappingSupplier ) { this.parserConfig = parserConfig; this.writeableRegistry = writeableRegistry; @@ -61,12 +61,12 @@ public CoordinatorRewriteContext getCoordinatorRewriteContext(Index index) { } } - DateFieldMapper.DateFieldType dateFieldType = mappingSupplier.apply(index); + MappedField dateField = mappingSupplier.apply(index); - if (dateFieldType == null) { + if (dateField == null) { return null; } - return new CoordinatorRewriteContext(parserConfig, writeableRegistry, client, nowInMillis, timestampRange, dateFieldType); + return new CoordinatorRewriteContext(parserConfig, writeableRegistry, client, nowInMillis, timestampRange, dateField); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java index b3e9ccc7f7877..93fdfcd509ffb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -103,11 +103,11 @@ public String getWriteableName() { @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType fieldType = context.getFieldType(field); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(field); + if (mappedField == null) { return Queries.newMatchNoDocsQuery("Can't run [" + NAME + "] query on unmapped fields!"); } - return fieldType.distanceFeatureQuery(origin.origin(), pivot, context); + return mappedField.distanceFeatureQuery(origin.origin(), pivot, context); } String fieldName() { diff --git a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index 11c056974ec5b..f4ad82545537b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -131,7 +131,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } public static Query newFilter(SearchExecutionContext context, String fieldPattern, boolean checkRewrite) { - Collection fields = getMappedFields(context, fieldPattern).stream().map(context::getFieldType).toList(); + Collection fields = getMappedFields(context, fieldPattern).stream().map(context::getMappedField).toList(); if (fields.isEmpty()) { if (checkRewrite) { @@ -142,12 +142,12 @@ public static Query newFilter(SearchExecutionContext context, String fieldPatter } if (fields.size() == 1) { - MappedFieldType field = fields.iterator().next(); + MappedField field = fields.iterator().next(); return new ConstantScoreQuery(field.existsQuery(context)); } BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); - for (MappedFieldType field : fields) { + for (MappedField field : fields) { boolFilterBuilder.add(field.existsQuery(context), BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(boolFilterBuilder.build()); diff --git a/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java index dcb2e2d3e09ea..5886e54f143b4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -151,9 +151,9 @@ public static FieldMaskingSpanQueryBuilder fromXContent(XContentParser parser) t @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { String fieldInQuery = fieldName; - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType != null) { - fieldInQuery = fieldType.name(); + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField != null) { + fieldInQuery = mappedField.name(); } Query innerQuery = queryBuilder.toQuery(context); assert innerQuery instanceof SpanQuery; diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 850c4b0809a74..bc7e115530552 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -317,8 +317,8 @@ public String getWriteableName() { protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { SearchExecutionContext context = queryRewriteContext.convertToSearchExecutionContext(); if (context != null) { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { return new MatchNoneQueryBuilder(); } } @@ -327,12 +327,12 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { throw new IllegalStateException("Rewrite first"); } String rewrite = this.rewrite; - Query query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context); + Query query = mappedField.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context); if (query instanceof MultiTermQuery) { MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod); diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index 730bae7685b6b..2183c27e56d87 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -24,7 +24,7 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.index.mapper.GeoShapeQueryable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -255,18 +255,18 @@ QueryValidationException checkLatLon() { @Override public Query doToQuery(SearchExecutionContext context) { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { throw new QueryShardException(context, "failed to find geo field [" + fieldName + "]"); } } - if ((fieldType instanceof GeoShapeQueryable) == false) { + if ((mappedField.type() instanceof GeoShapeQueryable) == false) { throw new QueryShardException( context, - "Field [" + fieldName + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" + "Field [" + fieldName + "] is of unsupported type [" + mappedField.typeName() + "] for [" + NAME + "] query" ); } @@ -292,14 +292,21 @@ public Query doToQuery(SearchExecutionContext context) { } } - final GeoShapeQueryable geoShapeQueryable = (GeoShapeQueryable) fieldType; + final GeoShapeQueryable geoShapeQueryable = (GeoShapeQueryable) mappedField.type(); final Rectangle rectangle = new Rectangle( luceneTopLeft.getLon(), luceneBottomRight.getLon(), luceneTopLeft.getLat(), luceneBottomRight.getLat() ); - return geoShapeQueryable.geoShapeQuery(context, fieldType.name(), SpatialStrategy.RECURSIVE, ShapeRelation.INTERSECTS, rectangle); + return geoShapeQueryable.geoShapeQuery( + mappedField.name(), + context, + mappedField.name(), + SpatialStrategy.RECURSIVE, + ShapeRelation.INTERSECTS, + rectangle + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index 87f8c79da6ada..56c6c81d46580 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.geometry.Circle; import org.elasticsearch.index.mapper.GeoShapeQueryable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -217,8 +217,8 @@ public boolean ignoreUnmapped() { @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { @@ -226,10 +226,10 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } } - if ((fieldType instanceof GeoShapeQueryable) == false) { + if ((mappedField.type() instanceof GeoShapeQueryable) == false) { throw new QueryShardException( context, - "Field [" + fieldName + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" + "Field [" + fieldName + "] is of unsupported type [" + mappedField.typeName() + "] for [" + NAME + "] query" ); } @@ -242,9 +242,16 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { GeoUtils.normalizePoint(center, true, true); } - final GeoShapeQueryable geoShapeQueryable = (GeoShapeQueryable) fieldType; + final GeoShapeQueryable geoShapeQueryable = (GeoShapeQueryable) mappedField.type(); final Circle circle = new Circle(center.lon(), center.lat(), this.distance); - return geoShapeQueryable.geoShapeQuery(context, fieldType.name(), SpatialStrategy.RECURSIVE, ShapeRelation.INTERSECTS, circle); + return geoShapeQueryable.geoShapeQuery( + mappedField.name(), + context, + mappedField.name(), + SpatialStrategy.RECURSIVE, + ShapeRelation.INTERSECTS, + circle + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index 03358ea4835bf..10bfaf3521f0d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -150,15 +150,15 @@ public boolean ignoreUnmapped() { @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]"); } } - if ((fieldType instanceof GeoPointFieldType) == false) { + if ((mappedField.type() instanceof GeoPointFieldType) == false) { throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); } @@ -207,9 +207,9 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } Polygon polygon = new Polygon(lats, lons); - Query query = LatLonPoint.newPolygonQuery(fieldType.name(), polygon); - if (fieldType.hasDocValues()) { - Query dvQuery = LatLonDocValuesField.newSlowPolygonQuery(fieldType.name(), polygon); + Query query = LatLonPoint.newPolygonQuery(mappedField.name(), polygon); + if (mappedField.hasDocValues()) { + Query dvQuery = LatLonDocValuesField.newSlowPolygonQuery(mappedField.name(), polygon); query = new IndexOrDocValuesQuery(query, dvQuery); } return query; diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 0f80cc7cc538b..45a256fa07627 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.GeoShapeQueryable; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -170,15 +171,15 @@ protected GeoShapeQueryBuilder newShapeQueryBuilder(String fieldName, Supplier maskedFields = new HashSet<>(); sourceProvider.extractFields(maskedFields); for (String maskedField : maskedFields) { - MappedFieldType ft = context.getFieldType(maskedField); - if (ft == null) { + MappedField masked = context.getMappedField(maskedField); + if (masked == null) { // Be lenient with unmapped fields so that cross-index search will work nicely return new MatchNoDocsQuery(); } } - return new IntervalQuery(field, sourceProvider.getSource(context, fieldType)); + return new IntervalQuery(field, sourceProvider.getSource(context, mappedField)); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java index 01c5a749caf02..7a1aff67f72f8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/index/query/IntervalsSourceProvider.java @@ -21,7 +21,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.script.Script; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -51,7 +51,7 @@ */ public abstract class IntervalsSourceProvider implements NamedWriteable, ToXContentFragment { - public abstract IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) throws IOException; + public abstract IntervalsSource getSource(SearchExecutionContext context, MappedField mappedField) throws IOException; public abstract void extractFields(Set fields); @@ -128,41 +128,41 @@ public Match(StreamInput in) throws IOException { } private static IntervalsSource intervals( - MappedFieldType fieldType, + MappedField mappedField, String text, int maxGaps, boolean ordered, NamedAnalyzer analyzer, SearchExecutionContext context ) throws IOException { - IntervalBuilder builder = new IntervalBuilder(fieldType.name(), analyzer) { + IntervalBuilder builder = new IntervalBuilder(mappedField.name(), analyzer) { @Override protected IntervalsSource termIntervals(BytesRef term) { - return fieldType.termIntervals(term, context); + return mappedField.termIntervals(term, context); } }; return builder.analyzeText(text, maxGaps, ordered); } @Override - public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) throws IOException { + public IntervalsSource getSource(SearchExecutionContext context, MappedField mappedField) throws IOException { NamedAnalyzer analyzer = null; if (this.analyzer != null) { analyzer = context.getIndexAnalyzers().get(this.analyzer); } if (useField != null) { - fieldType = context.getFieldType(useField); - assert fieldType != null; + mappedField = context.getMappedField(useField); + assert mappedField != null; } if (analyzer == null) { - analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + analyzer = mappedField.getTextSearchInfo().searchAnalyzer(); } - IntervalsSource source = intervals(fieldType, query, maxGaps, ordered, analyzer, context); + IntervalsSource source = intervals(mappedField, query, maxGaps, ordered, analyzer, context); if (useField != null) { source = Intervals.fixField(useField, source); } if (filter != null) { - source = filter.filter(source, context, fieldType); + source = filter.filter(source, context, mappedField); } return source; } @@ -293,16 +293,16 @@ public Disjunction(StreamInput in) throws IOException { } @Override - public IntervalsSource getSource(SearchExecutionContext ctx, MappedFieldType fieldType) throws IOException { + public IntervalsSource getSource(SearchExecutionContext ctx, MappedField mappedField) throws IOException { List sources = new ArrayList<>(); for (IntervalsSourceProvider provider : subSources) { - sources.add(provider.getSource(ctx, fieldType)); + sources.add(provider.getSource(ctx, mappedField)); } IntervalsSource source = Intervals.or(sources.toArray(new IntervalsSource[0])); if (filter == null) { return source; } - return filter.filter(source, ctx, fieldType); + return filter.filter(source, ctx, mappedField); } @Override @@ -404,14 +404,14 @@ public Combine(StreamInput in) throws IOException { } @Override - public IntervalsSource getSource(SearchExecutionContext ctx, MappedFieldType fieldType) throws IOException { + public IntervalsSource getSource(SearchExecutionContext ctx, MappedField mappedField) throws IOException { List ss = new ArrayList<>(); for (IntervalsSourceProvider provider : subSources) { - ss.add(provider.getSource(ctx, fieldType)); + ss.add(provider.getSource(ctx, mappedField)); } IntervalsSource source = IntervalBuilder.combineSources(ss, maxGaps, ordered); if (filter != null) { - return filter.filter(source, ctx, fieldType); + return filter.filter(source, ctx, mappedField); } return source; } @@ -531,20 +531,20 @@ public Prefix(StreamInput in) throws IOException { } @Override - public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) throws IOException { + public IntervalsSource getSource(SearchExecutionContext context, MappedField mappedField) throws IOException { NamedAnalyzer analyzer = null; if (this.analyzer != null) { analyzer = context.getIndexAnalyzers().get(this.analyzer); } if (useField != null) { - fieldType = context.getFieldType(useField); - assert fieldType != null; + mappedField = context.getMappedField(useField); + assert mappedField != null; } if (analyzer == null) { - analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + analyzer = mappedField.getTextSearchInfo().searchAnalyzer(); } - final BytesRef prefixTerm = analyzer.normalize(fieldType.name(), prefix); - IntervalsSource source = fieldType.prefixIntervals(prefixTerm, context); + final BytesRef prefixTerm = analyzer.normalize(mappedField.name(), prefix); + IntervalsSource source = mappedField.prefixIntervals(prefixTerm, context); if (useField != null) { source = Intervals.fixField(useField, source); } @@ -649,20 +649,20 @@ public Wildcard(StreamInput in) throws IOException { } @Override - public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + public IntervalsSource getSource(SearchExecutionContext context, MappedField mappedField) { NamedAnalyzer analyzer = null; if (this.analyzer != null) { analyzer = context.getIndexAnalyzers().get(this.analyzer); } if (useField != null) { - fieldType = context.getFieldType(useField); - assert fieldType != null; + mappedField = context.getMappedField(useField); + assert mappedField != null; } if (analyzer == null) { - analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + analyzer = mappedField.getTextSearchInfo().searchAnalyzer(); } - BytesRef normalizedPattern = analyzer.normalize(fieldType.name(), pattern); - IntervalsSource source = fieldType.wildcardIntervals(normalizedPattern, context); + BytesRef normalizedPattern = analyzer.normalize(mappedField.name(), pattern); + IntervalsSource source = mappedField.wildcardIntervals(normalizedPattern, context); if (useField != null) { source = Intervals.fixField(useField, source); } @@ -776,21 +776,21 @@ public Fuzzy(StreamInput in) throws IOException { } @Override - public IntervalsSource getSource(SearchExecutionContext context, MappedFieldType fieldType) { + public IntervalsSource getSource(SearchExecutionContext context, MappedField mappedField) { NamedAnalyzer analyzer = null; if (this.analyzer != null) { analyzer = context.getIndexAnalyzers().get(this.analyzer); } if (useField != null) { - fieldType = context.getFieldType(useField); - assert fieldType != null; + mappedField = context.getMappedField(useField); + assert mappedField != null; } if (analyzer == null) { - analyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + analyzer = mappedField.getTextSearchInfo().searchAnalyzer(); } // Fuzzy queries only work with unicode content so it's legal to call utf8ToString here. - String normalizedTerm = analyzer.normalize(fieldType.name(), term).utf8ToString(); - IntervalsSource source = fieldType.fuzzyIntervals( + String normalizedTerm = analyzer.normalize(mappedField.name(), term).utf8ToString(); + IntervalsSource source = mappedField.fuzzyIntervals( normalizedTerm, fuzziness.asDistance(term), prefixLength, @@ -954,12 +954,12 @@ public IntervalFilter(StreamInput in) throws IOException { } } - public IntervalsSource filter(IntervalsSource input, SearchExecutionContext context, MappedFieldType fieldType) throws IOException { + public IntervalsSource filter(IntervalsSource input, SearchExecutionContext context, MappedField mappedField) throws IOException { if (script != null) { IntervalFilterScript ifs = context.compile(script, IntervalFilterScript.CONTEXT).newInstance(); return new ScriptFilterSource(input, script.getIdOrCode(), ifs); } - IntervalsSource filterSource = filter.getSource(context, fieldType); + IntervalsSource filterSource = filter.getSource(context, mappedField); return switch (type) { case "containing" -> Intervals.containing(input, filterSource); case "contained_by" -> Intervals.containedBy(input, filterSource); diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java index 3fbfc1ce0f694..96aefd94561aa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.search.MatchQueryParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -178,9 +178,9 @@ private NamedAnalyzer configuredAnalyzer(SearchExecutionContext context) { if (analyzer != null) { return context.getIndexAnalyzers().get(analyzer); } - MappedFieldType mft = context.getFieldType(fieldName); - if (mft != null) { - return mft.getTextSearchInfo().searchAnalyzer(); + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField != null) { + return mappedField.getTextSearchInfo().searchAnalyzer(); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index 8353063a2cc26..6d9bd4822925b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.search.MatchQueryParser; import org.elasticsearch.xcontent.ParseField; @@ -391,9 +391,9 @@ private NamedAnalyzer configuredAnalyzer(SearchExecutionContext context) { if (analyzer != null) { return context.getIndexAnalyzers().get(analyzer); } - MappedFieldType mft = context.getFieldType(fieldName); - if (mft != null) { - return mft.getTextSearchInfo().searchAnalyzer(); + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField != null) { + return mappedField.getTextSearchInfo().searchAnalyzer(); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 5a6c871d7a8de..db2f2da64bb76 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; @@ -995,8 +996,8 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } } else { for (String field : fields) { - MappedFieldType fieldType = context.getFieldType(field); - if (fieldType != null && SUPPORTED_FIELD_TYPES.contains(fieldType.getClass()) == false) { + MappedField mappedField = context.getMappedField(field); + if (mappedField != null && SUPPORTED_FIELD_TYPES.contains(mappedField.type().getClass()) == false) { if (failOnUnsupportedField) { throw new IllegalArgumentException("more_like_this only supports text/keyword fields: [" + field + "]"); } else { @@ -1004,7 +1005,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { continue; } } - moreLikeFields.add(fieldType == null ? field : fieldType.name()); + moreLikeFields.add(mappedField == null ? field : mappedField.name()); } } @@ -1123,7 +1124,7 @@ private static void checkRoutingMissingException(MultiTermVectorsItemResponse re } private static void handleExclude(BooleanQuery.Builder boolQuery, Item[] likeItems, SearchExecutionContext context) { - MappedFieldType idField = context.getFieldType(IdFieldMapper.NAME); + MappedField idField = context.getMappedField(IdFieldMapper.NAME); if (idField == null) { // no mappings, nothing to exclude return; diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index 8867d91902da7..bcaeacb6f2c56 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.index.mapper.ConstantFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -189,14 +189,14 @@ public String getWriteableName() { protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { SearchExecutionContext context = queryRewriteContext.convertToSearchExecutionContext(); if (context != null) { - MappedFieldType fieldType = context.getFieldType(this.fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(this.fieldName); + if (mappedField == null) { return new MatchNoneQueryBuilder(); - } else if (fieldType instanceof ConstantFieldType) { + } else if (mappedField.type() instanceof ConstantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. - Query query = fieldType.prefixQuery(value, null, caseInsensitive, context); // the rewrite method doesn't matter + Query query = mappedField.prefixQuery(value, null, caseInsensitive, context); // the rewrite method doesn't matter if (query instanceof MatchAllDocsQuery) { return new MatchAllQueryBuilder(); } else if (query instanceof MatchNoDocsQuery) { @@ -214,11 +214,11 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws protected Query doToQuery(SearchExecutionContext context) throws IOException { MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { throw new IllegalStateException("Rewrite first"); } - return fieldType.prefixQuery(value, method, caseInsensitive, context); + return mappedField.prefixQuery(value, method, caseInsensitive, context); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index addd5f1ceca7a..a231ca0a512f0 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -435,8 +436,8 @@ public String getWriteableName() { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { CoordinatorRewriteContext coordinatorRewriteContext = queryRewriteContext.convertToCoordinatorRewriteContext(); if (coordinatorRewriteContext != null) { - final MappedFieldType fieldType = coordinatorRewriteContext.getFieldType(fieldName); - if (fieldType instanceof final DateFieldMapper.DateFieldType dateFieldType) { + final MappedField mappedField = coordinatorRewriteContext.getMappedField(fieldName); + if (mappedField != null && mappedField.type()instanceof DateFieldMapper.DateFieldType dateFieldType) { if (coordinatorRewriteContext.hasTimestampData() == false) { return MappedFieldType.Relation.DISJOINT; } @@ -459,8 +460,8 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC SearchExecutionContext searchExecutionContext = queryRewriteContext.convertToSearchExecutionContext(); if (searchExecutionContext != null) { - final MappedFieldType fieldType = searchExecutionContext.getFieldType(fieldName); - if (fieldType == null) { + final MappedField mappedField = searchExecutionContext.getMappedField(fieldName); + if (mappedField == null) { return MappedFieldType.Relation.DISJOINT; } if (searchExecutionContext.getIndexReader() == null) { @@ -469,7 +470,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC } DateMathParser dateMathParser = getForceDateParser(); - return fieldType.isFieldWithinQuery( + return mappedField.isFieldWithinQuery( searchExecutionContext.getIndexReader(), from, to, @@ -520,18 +521,19 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { return new MatchNoDocsQuery("No mappings yet"); } final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context - .getFieldType(FieldNamesFieldMapper.NAME); + .getMappedField(FieldNamesFieldMapper.NAME) + .type(); // Exists query would fail if the fieldNames field is disabled. if (fieldNamesFieldType.isEnabled()) { return ExistsQueryBuilder.newFilter(context, fieldName, false); } } - MappedFieldType mapper = context.getFieldType(this.fieldName); - if (mapper == null) { + MappedField mappedField = context.getMappedField(this.fieldName); + if (mappedField == null) { throw new IllegalStateException("Rewrite first"); } DateMathParser forcedDateParser = getForceDateParser(); - return mapper.rangeQuery(from, to, includeLower, includeUpper, relation, timeZone, forcedDateParser, context); + return mappedField.rangeQuery(from, to, includeLower, includeUpper, relation, timeZone, forcedDateParser, context); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index f6ee1620a0ead..48d17d2fbcbaa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -280,9 +280,9 @@ protected Query doToQuery(SearchExecutionContext context) throws QueryShardExcep // For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff) int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp.ALL; - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType != null) { - query = fieldType.regexpQuery(value, sanitisedSyntaxFlag, matchFlagsValue, maxDeterminizedStates, method, context); + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField != null) { + query = mappedField.regexpQuery(value, sanitisedSyntaxFlag, matchFlagsValue, maxDeterminizedStates, method, context); } if (query == null) { RegexpQuery regexpQuery = new RegexpQuery( diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index c3c0a6b24ff75..d5a346b4c477f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -36,7 +36,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; @@ -91,7 +91,7 @@ public class SearchExecutionContext extends QueryRewriteContext { private final MappingLookup mappingLookup; private final SimilarityService similarityService; private final BitsetFilterCache bitsetFilterCache; - private final TriFunction, IndexFieldData> indexFieldDataService; + private final TriFunction, IndexFieldData> indexFieldDataService; private SearchLookup lookup = null; private final int shardId; @@ -110,7 +110,7 @@ public class SearchExecutionContext extends QueryRewriteContext { private boolean mapUnmappedFieldAsString; private NestedScope nestedScope; private final ValuesSourceRegistry valuesSourceRegistry; - private final Map runtimeMappings; + private final Map runtimeMappings; private Predicate allowedFields; /** @@ -121,7 +121,7 @@ public SearchExecutionContext( int shardRequestIndex, IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, - TriFunction, IndexFieldData> indexFieldDataLookup, + TriFunction, IndexFieldData> indexFieldDataLookup, MapperService mapperService, MappingLookup mappingLookup, SimilarityService similarityService, @@ -194,7 +194,7 @@ private SearchExecutionContext( int shardRequestIndex, IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, - TriFunction, IndexFieldData> indexFieldDataLookup, + TriFunction, IndexFieldData> indexFieldDataLookup, MapperService mapperService, MappingLookup mappingLookup, SimilarityService similarityService, @@ -208,7 +208,7 @@ private SearchExecutionContext( Index fullyQualifiedIndex, BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry, - Map runtimeMappings, + Map runtimeMappings, Predicate allowedFields ) { super(parserConfig, namedWriteableRegistry, client, nowInMillis); @@ -243,7 +243,7 @@ private void reset() { * The similarity to use in searches, which takes into account per-field configuration. */ public Similarity getSearchSimilarity() { - return similarityService != null ? similarityService.similarity(this::fieldType) : null; + return similarityService != null ? similarityService.similarity(this::mappedField) : null; } /** @@ -278,11 +278,11 @@ public boolean allowExpensiveQueries() { } @SuppressWarnings("unchecked") - public > IFD getForField(MappedFieldType fieldType) { + public > IFD getForField(MappedField mappedField) { return (IFD) indexFieldDataService.apply( - fieldType, + mappedField, fullyQualifiedIndex.getName(), - () -> this.lookup().forkAndTrackFieldReferences(fieldType.name()) + () -> this.lookup().forkAndTrackFieldReferences(mappedField.name()) ); } @@ -316,7 +316,7 @@ public boolean hasMappings() { * Returns the names of all mapped fields that match a given pattern * * All names returned by this method are guaranteed to resolve to a - * MappedFieldType if passed to {@link #getFieldType(String)} + * MappedFieldType if passed to {@link #getMappedField(String)} * * @param pattern the field name pattern */ @@ -343,7 +343,7 @@ public Set getMatchingFieldNames(String pattern) { } /** - * Returns the {@link MappedFieldType} for the provided field name. + * Returns the {@link MappedField} for the provided field name. * If the field is not mapped, the behaviour depends on the index.query.parse.allow_unmapped_fields setting, which defaults to true. * In case unmapped fields are allowed, null is returned when the field is not mapped. * In case unmapped fields are not allowed, either an exception is thrown or the field is automatically mapped as a text field. @@ -351,24 +351,24 @@ public Set getMatchingFieldNames(String pattern) { * @see SearchExecutionContext#setAllowUnmappedFields(boolean) * @see SearchExecutionContext#setMapUnmappedFieldAsString(boolean) */ - public MappedFieldType getFieldType(String name) { - return failIfFieldMappingNotFound(name, fieldType(name)); + public MappedField getMappedField(String name) { + return failIfFieldMappingNotFound(name, mappedField(name)); } /** * Returns true if the field identified by the provided name is mapped, false otherwise */ public boolean isFieldMapped(String name) { - return fieldType(name) != null; + return mappedField(name) != null; } - private MappedFieldType fieldType(String name) { + private MappedField mappedField(String name) { // If the field is not allowed, behave as if it is not mapped if (allowedFields != null && false == allowedFields.test(name)) { return null; } - MappedFieldType fieldType = runtimeMappings.get(name); - return fieldType == null ? mappingLookup.getFieldType(name) : fieldType; + MappedField mappedField = runtimeMappings.get(name); + return mappedField == null ? mappingLookup.getMappedField(name) : mappedField; } public boolean isMetadataField(String field) { @@ -404,7 +404,7 @@ public SourceLoader newSourceLoader(boolean forceSyntheticSource) { * Given a type (eg. long, string, ...), returns an anonymous field type that can be used for search operations. * Generally used to handle unmapped fields in the context of sorting. */ - public MappedFieldType buildAnonymousFieldType(String type) { + public MappedField buildAnonymousField(String type) { MappingParserContext parserContext = mapperService.parserContext(); Mapper.TypeParser typeParser = parserContext.typeParser(type); if (typeParser == null) { @@ -413,7 +413,7 @@ public MappedFieldType buildAnonymousFieldType(String type) { Mapper.Builder builder = typeParser.parse("__anonymous_", Collections.emptyMap(), parserContext); Mapper mapper = builder.build(MapperBuilderContext.ROOT); if (mapper instanceof FieldMapper) { - return ((FieldMapper) mapper).fieldType(); + return ((FieldMapper) mapper).field(); } throw new IllegalArgumentException("Mapper for type [" + type + "] must be a leaf field"); } @@ -451,12 +451,12 @@ public void setAllowedFields(Predicate allowedFields) { this.allowedFields = allowedFields; } - MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) { + MappedField failIfFieldMappingNotFound(String name, MappedField fieldMapping) { if (fieldMapping != null || allowUnmappedFields) { return fieldMapping; } else if (mapUnmappedFieldAsString) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, getIndexAnalyzers()); - return builder.build(MapperBuilderContext.ROOT).fieldType(); + return builder.build(MapperBuilderContext.ROOT).field(); } else { throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name); } @@ -477,7 +477,7 @@ public boolean containsBrokenAnalysis(String field) { public SearchLookup lookup() { if (this.lookup == null) { this.lookup = new SearchLookup( - this::getFieldType, + this::getMappedField, (fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup) ); } @@ -677,7 +677,7 @@ public Index getFullyQualifiedIndex() { return fullyQualifiedIndex; } - private static Map parseRuntimeMappings( + private static Map parseRuntimeMappings( Map runtimeMappings, MapperService mapperService, IndexSettings indexSettings, @@ -689,7 +689,7 @@ private static Map parseRuntimeMappings( // TODO add specific tests to SearchExecutionTests similar to the ones in FieldTypeLookupTests MappingParserContext parserContext = mapperService.parserContext(); Map runtimeFields = RuntimeField.parseRuntimeFields(new HashMap<>(runtimeMappings), parserContext, false); - Map runtimeFieldTypes = RuntimeField.collectFieldTypes(runtimeFields.values()); + Map runtimeFieldTypes = RuntimeField.collectMappedFields(runtimeFields.values()); if (false == indexSettings.getIndexMetadata().getRoutingPaths().isEmpty()) { for (String r : runtimeMappings.keySet()) { if (Regex.simpleMatch(indexSettings.getIndexMetadata().getRoutingPaths(), r)) { diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index e2bd8c88f9676..79ce6b1975690 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.lucene.queries.SpanMatchNoDocsQuery; import org.elasticsearch.xcontent.ParseField; @@ -124,8 +124,8 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { if (multiTermQueryBuilder instanceof MatchNoneQueryBuilder) { return new SpanMatchNoDocsQuery(this.multiTermQueryBuilder.fieldName(), "Inner query rewrote to match_none"); } else if (multiTermQueryBuilder instanceof PrefixQueryBuilder prefixBuilder) { - MappedFieldType fieldType = context.getFieldType(prefixBuilder.fieldName()); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(prefixBuilder.fieldName()); + if (mappedField == null) { throw new IllegalStateException("Rewrite first"); } final SpanMultiTermQueryWrapper.SpanRewriteMethod spanRewriteMethod; @@ -143,7 +143,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } else { spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(); } - return fieldType.spanPrefixQuery(prefixBuilder.value(), spanRewriteMethod, context); + return mappedField.spanPrefixQuery(prefixBuilder.value(), spanRewriteMethod, context); } else { Query subQuery = multiTermQueryBuilder.toQuery(context); while (true) { diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index 9b9a772af7a25..f3c7354b1a1fb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentLocation; @@ -254,8 +254,8 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } private static String queryFieldName(SearchExecutionContext context, String fieldName) { - MappedFieldType fieldType = context.getFieldType(fieldName); - return fieldType != null ? fieldType.name() : fieldName; + MappedField mappedField = context.getMappedField(fieldName); + return mappedField != null ? mappedField.name() : fieldName; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java index 87940e2f09f16..f2cbad4452d2f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -70,7 +71,7 @@ public SpanTermQueryBuilder(StreamInput in) throws IOException { @Override protected SpanQuery doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType mapper = context.getFieldType(fieldName); + MappedField mapper = context.getMappedField(fieldName); Term term; if (mapper == null) { term = new Term(fieldName, BytesRefs.toBytesRef(value)); diff --git a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java index 0e5084a576ee2..3b33ce13b57a8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.mapper.ConstantFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -171,18 +171,18 @@ protected void addExtraXContent(XContentBuilder builder, Params params) throws I protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { SearchExecutionContext context = queryRewriteContext.convertToSearchExecutionContext(); if (context != null) { - MappedFieldType fieldType = context.getFieldType(this.fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(this.fieldName); + if (mappedField == null) { return new MatchNoneQueryBuilder(); - } else if (fieldType instanceof ConstantFieldType) { + } else if (mappedField.type() instanceof ConstantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. Query query = null; if (caseInsensitive) { - query = fieldType.termQueryCaseInsensitive(value, context); + query = mappedField.termQueryCaseInsensitive(value, context); } else { - query = fieldType.termQuery(value, context); + query = mappedField.termQuery(value, context); } if (query instanceof MatchAllDocsQuery) { @@ -199,7 +199,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType mapper = context.getFieldType(this.fieldName); + MappedField mapper = context.getMappedField(this.fieldName); if (mapper == null) { throw new IllegalStateException("Rewrite first"); } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index c88a0ee73da8a..6cd4a30cb10b1 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ConstantFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -338,11 +338,11 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { + "] index level setting." ); } - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { throw new IllegalStateException("Rewrite first"); } - return fieldType.termsQuery(values, context); + return mappedField.termsQuery(values, context); } private static void fetch(TermsLookup termsLookup, Client client, ActionListener> actionListener) { @@ -390,14 +390,14 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) { SearchExecutionContext context = queryRewriteContext.convertToSearchExecutionContext(); if (context != null) { - MappedFieldType fieldType = context.getFieldType(this.fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(this.fieldName); + if (mappedField == null) { return new MatchNoneQueryBuilder(); - } else if (fieldType instanceof ConstantFieldType) { + } else if (mappedField.type() instanceof ConstantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. - Query query = fieldType.termsQuery(values, context); + Query query = mappedField.termsQuery(values, context); if (query instanceof MatchAllDocsQuery) { return new MatchAllQueryBuilder(); } else if (query instanceof MatchNoDocsQuery) { diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index 37bceefb9a7fd..22abcd9234d41 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.fielddata.IndexNumericFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.script.Script; import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.xcontent.ParseField; @@ -246,11 +246,11 @@ protected Query doToQuery(SearchExecutionContext context) { * Visible only for testing purposes. */ List createTermQueries(SearchExecutionContext context) { - final MappedFieldType fieldType = context.getFieldType(fieldName); + final MappedField mappedField = context.getMappedField(fieldName); final List queries = new ArrayList<>(values.size()); for (Object value : values) { - if (fieldType != null) { - queries.add(fieldType.termQuery(value, context)); + if (mappedField != null) { + queries.add(mappedField.termQuery(value, context)); } else { queries.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(value)))); } @@ -261,12 +261,12 @@ List createTermQueries(SearchExecutionContext context) { private LongValuesSource createValuesSource(SearchExecutionContext context) { LongValuesSource longValuesSource; if (minimumShouldMatchField != null) { - MappedFieldType msmFieldType = context.getFieldType(minimumShouldMatchField); - if (msmFieldType == null) { + MappedField msmField = context.getMappedField(minimumShouldMatchField); + if (msmField == null) { throw new QueryShardException(context, "failed to find minimum_should_match field [" + minimumShouldMatchField + "]"); } - IndexNumericFieldData fieldData = context.getForField(msmFieldType); + IndexNumericFieldData fieldData = context.getForField(msmField); longValuesSource = new FieldValuesSource(fieldData); } else if (minimumShouldMatchScript != null) { TermsSetQueryScript.Factory factory = context.compile(minimumShouldMatchScript, TermsSetQueryScript.CONTEXT); diff --git a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index f0a9d0495c0d2..30d81f7cdd5be 100644 --- a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.index.mapper.ConstantFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -201,14 +201,14 @@ public static WildcardQueryBuilder fromXContent(XContentParser parser) throws IO protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { SearchExecutionContext context = queryRewriteContext.convertToSearchExecutionContext(); if (context != null) { - MappedFieldType fieldType = context.getFieldType(this.fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(this.fieldName); + if (mappedField == null) { return new MatchNoneQueryBuilder(); - } else if (fieldType instanceof ConstantFieldType) { + } else if (mappedField.type() instanceof ConstantFieldType) { // This logic is correct for all field types, but by only applying it to constant // fields we also have the guarantee that it doesn't perform I/O, which is important // since rewrites might happen on a network thread. - Query query = fieldType.wildcardQuery(value, null, caseInsensitive, context); // the rewrite method doesn't matter + Query query = mappedField.wildcardQuery(value, null, caseInsensitive, context); // the rewrite method doesn't matter if (query instanceof MatchAllDocsQuery) { return new MatchAllQueryBuilder(); } else if (query instanceof MatchNoDocsQuery) { @@ -224,14 +224,14 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType fieldType = context.getFieldType(fieldName); + MappedField mappedField = context.getMappedField(fieldName); - if (fieldType == null) { + if (mappedField == null) { throw new IllegalStateException("Rewrite first"); } MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); - return fieldType.wildcardQuery(value, method, caseInsensitive, context); + return mappedField.wildcardQuery(value, method, caseInsensitive, context); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index 568886f3804f1..e3064c8ec19c4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -34,7 +34,7 @@ import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.MultiValueMode; @@ -198,26 +198,26 @@ private AbstractDistanceScoreFunction parseVariable( MultiValueMode mode ) throws IOException { // the field must exist, else we cannot read the value for the doc later - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { throw new ParsingException(parser.getTokenLocation(), "unknown field [{}]", fieldName); } // dates and time and geo need special handling parser.nextToken(); // TODO these ain't gonna work with runtime fields - if (fieldType instanceof DateFieldMapper.DateFieldType) { - return parseDateVariable(parser, context, fieldType, mode); - } else if (fieldType instanceof GeoPointFieldType) { - return parseGeoVariable(parser, context, fieldType, mode); - } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { - return parseNumberVariable(parser, context, fieldType, mode); + if (mappedField.type() instanceof DateFieldMapper.DateFieldType) { + return parseDateVariable(parser, context, mappedField, mode); + } else if (mappedField.type() instanceof GeoPointFieldType) { + return parseGeoVariable(parser, context, mappedField, mode); + } else if (mappedField.type() instanceof NumberFieldMapper.NumberFieldType) { + return parseNumberVariable(parser, context, mappedField, mode); } else { throw new ParsingException( parser.getTokenLocation(), "field [{}] is of type [{}], but only numeric types are supported.", fieldName, - fieldType + mappedField ); } } @@ -225,7 +225,7 @@ private AbstractDistanceScoreFunction parseVariable( private AbstractDistanceScoreFunction parseNumberVariable( XContentParser parser, SearchExecutionContext context, - MappedFieldType fieldType, + MappedField mappedField, MultiValueMode mode ) throws IOException { XContentParser.Token token; @@ -260,14 +260,14 @@ private AbstractDistanceScoreFunction parseNumberVariable( DecayFunctionBuilder.ORIGIN ); } - IndexNumericFieldData numericFieldData = context.getForField(fieldType); + IndexNumericFieldData numericFieldData = context.getForField(mappedField); return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); } private AbstractDistanceScoreFunction parseGeoVariable( XContentParser parser, SearchExecutionContext context, - MappedFieldType fieldType, + MappedField mappedField, MultiValueMode mode ) throws IOException { XContentParser.Token token; @@ -300,7 +300,7 @@ private AbstractDistanceScoreFunction parseGeoVariable( } double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT); double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT); - IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); + IndexGeoPointFieldData indexFieldData = context.getForField(mappedField); return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode); } @@ -308,7 +308,7 @@ private AbstractDistanceScoreFunction parseGeoVariable( private AbstractDistanceScoreFunction parseDateVariable( XContentParser parser, SearchExecutionContext context, - MappedFieldType dateFieldType, + MappedField mappedField, MultiValueMode mode ) throws IOException { XContentParser.Token token; @@ -336,7 +336,13 @@ private AbstractDistanceScoreFunction parseDateVariable( if (originString == null) { origin = context.nowInMillis(); } else { - origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToLong(originString, false, null, null, context::nowInMillis); + origin = ((DateFieldMapper.DateFieldType) mappedField.type()).parseToLong( + originString, + false, + null, + null, + context::nowInMillis + ); } if (scaleString == null) { @@ -350,7 +356,7 @@ private AbstractDistanceScoreFunction parseDateVariable( double scale = val.getMillis(); val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); - IndexNumericFieldData numericFieldData = context.getForField(dateFieldType); + IndexNumericFieldData numericFieldData = context.getForField(mappedField); return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); } diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/FieldValueFactorFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/FieldValueFactorFunctionBuilder.java index 3431349332873..5149103a9fbf9 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/FieldValueFactorFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/FieldValueFactorFunctionBuilder.java @@ -140,7 +140,7 @@ public Version getMinimalSupportedVersion() { protected ScoreFunction doToFunction(SearchExecutionContext context) { IndexNumericFieldData fieldData = null; if (context.isFieldMapped(field)) { - fieldData = context.getForField(context.getFieldType(field)); + fieldData = context.getForField(context.getMappedField(field)); } else { if (missing == null) { throw new ElasticsearchException("Unable to find a field mapper for field [" + field + "]. No 'missing' value defined."); diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java index 53e09336abfe9..0a116e6bc632f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java @@ -166,7 +166,7 @@ protected ScoreFunction doToFunction(SearchExecutionContext context) { ); } int seed = this.seed == null ? hash(context.nowInMillis()) : this.seed; - return new RandomScoreFunction(seed, salt, context.getForField(context.getFieldType(fieldName))); + return new RandomScoreFunction(seed, salt, context.getForField(context.getMappedField(fieldName))); } } diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java index 42412dccfab50..e20150440e63c 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java @@ -37,7 +37,7 @@ import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.PlaceHolderFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextSearchInfo; @@ -196,21 +196,25 @@ public void setAutoGenerateSynonymsPhraseQuery(boolean enabled) { } public Query parse(Type type, String fieldName, Object value) throws IOException { - final MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + final MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { return newUnmappedFieldQuery(fieldName); } // We check here that the field supports text searches - // if it doesn't, we can bail out early without doing any further parsing. - if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE) { + if (mappedField.getTextSearchInfo() == TextSearchInfo.NONE) { IllegalArgumentException iae; - if (fieldType instanceof PlaceHolderFieldMapper.PlaceHolderFieldType) { + if (mappedField.type() instanceof PlaceHolderFieldMapper.PlaceHolderFieldType) { iae = new IllegalArgumentException( - "Field [" + fieldType.name() + "] of type [" + fieldType.typeName() + "] in legacy index does not support match queries" + "Field [" + + mappedField.name() + + "] of type [" + + mappedField.typeName() + + "] in legacy index does not support match queries" ); } else { iae = new IllegalArgumentException( - "Field [" + fieldType.name() + "] of type [" + fieldType.typeName() + "] does not support match queries" + "Field [" + mappedField.name() + "] of type [" + mappedField.typeName() + "] does not support match queries" ); } if (lenient) { @@ -219,11 +223,11 @@ public Query parse(Type type, String fieldName, Object value) throws IOException throw iae; } - Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE || type == Type.PHRASE_PREFIX); + Analyzer analyzer = getAnalyzer(mappedField, type == Type.PHRASE || type == Type.PHRASE_PREFIX); assert analyzer != null; - MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType, enablePositionIncrements, autoGenerateSynonymsPhraseQuery); - String resolvedFieldName = fieldType.name(); + MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, mappedField, enablePositionIncrements, autoGenerateSynonymsPhraseQuery); + String resolvedFieldName = mappedField.name(); String stringValue = value.toString(); /* @@ -235,7 +239,8 @@ public Query parse(Type type, String fieldName, Object value) throws IOException if (analyzer == Lucene.KEYWORD_ANALYZER && type != Type.PHRASE_PREFIX) { final Term term = new Term(resolvedFieldName, stringValue); if (type == Type.BOOLEAN_PREFIX - && (fieldType instanceof TextFieldMapper.TextFieldType || fieldType instanceof KeywordFieldMapper.KeywordFieldType)) { + && (mappedField.type() instanceof TextFieldMapper.TextFieldType + || mappedField.type() instanceof KeywordFieldMapper.KeywordFieldType)) { return builder.newPrefixQuery(term); } else { return builder.newTermQuery(term, BoostAttribute.DEFAULT_BOOST); @@ -251,8 +256,8 @@ public Query parse(Type type, String fieldName, Object value) throws IOException return query == null ? zeroTermsQuery.asQuery() : query; } - protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) { - TextSearchInfo tsi = fieldType.getTextSearchInfo(); + protected Analyzer getAnalyzer(MappedField mappedField, boolean quoted) { + TextSearchInfo tsi = mappedField.getTextSearchInfo(); assert tsi != TextSearchInfo.NONE; if (analyzer == null) { return quoted ? tsi.searchQuoteAnalyzer() : tsi.searchAnalyzer(); @@ -262,21 +267,21 @@ protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) { } class MatchQueryBuilder extends QueryBuilder { - private final MappedFieldType fieldType; + private final MappedField mappedField; /** * Creates a new QueryBuilder using the given analyzer. */ MatchQueryBuilder( Analyzer analyzer, - MappedFieldType fieldType, + MappedField mappedField, boolean enablePositionIncrements, boolean autoGenerateSynonymsPhraseQuery ) { super(analyzer); - this.fieldType = fieldType; + this.mappedField = mappedField; setEnablePositionIncrements(enablePositionIncrements); - if (fieldType.getTextSearchInfo().hasPositions()) { + if (mappedField.getTextSearchInfo().hasPositions()) { setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); } else { setAutoGenerateMultiTermSynonymsPhraseQuery(false); @@ -425,12 +430,12 @@ private Query createQuery(String field, String queryText, Type type, BooleanClau private SpanQuery newSpanQuery(Term[] terms, boolean isPrefix) { if (terms.length == 1) { - return isPrefix ? fieldType.spanPrefixQuery(terms[0].text(), spanRewriteMethod, context) : new SpanTermQuery(terms[0]); + return isPrefix ? mappedField.spanPrefixQuery(terms[0].text(), spanRewriteMethod, context) : new SpanTermQuery(terms[0]); } SpanQuery[] spanQueries = new SpanQuery[terms.length]; for (int i = 0; i < terms.length; i++) { spanQueries[i] = isPrefix - ? fieldType.spanPrefixQuery(terms[i].text(), spanRewriteMethod, context) + ? mappedField.spanPrefixQuery(terms[i].text(), spanRewriteMethod, context) : new SpanTermQuery(terms[i]); } return new SpanOrQuery(spanQueries); @@ -456,7 +461,7 @@ private SpanQuery createSpanQuery(TokenStream in, String field, boolean isPrefix } if (lastTerm != null) { SpanQuery spanQuery = isPrefix - ? fieldType.spanPrefixQuery(lastTerm.text(), spanRewriteMethod, context) + ? mappedField.spanPrefixQuery(lastTerm.text(), spanRewriteMethod, context) : new SpanTermQuery(lastTerm); builder.addClause(spanQuery); } @@ -474,21 +479,21 @@ protected Query newTermQuery(Term term, float boost) { Supplier querySupplier; if (fuzziness != null) { querySupplier = () -> { - Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions, context); + Query query = mappedField.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions, context); if (query instanceof FuzzyQuery) { QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); } return query; }; } else { - querySupplier = () -> fieldType.termQuery(term.bytes(), context); + querySupplier = () -> mappedField.termQuery(term.bytes(), context); } try { Query query = querySupplier.get(); return query; } catch (RuntimeException e) { if (lenient) { - return newLenientFieldQuery(fieldType.name(), e); + return newLenientFieldQuery(mappedField.name(), e); } else { throw e; } @@ -500,7 +505,7 @@ protected Query newTermQuery(Term term, float boost) { */ protected Query newPrefixQuery(Term term) { try { - return fieldType.prefixQuery(term.text(), null, context); + return mappedField.prefixQuery(term.text(), null, context); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(term.field(), e); @@ -571,7 +576,7 @@ private Query analyzeMultiBoolean(String field, TokenStream stream, BooleanClaus @Override protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { try { - return fieldType.phraseQuery(stream, slop, enablePositionIncrements, context); + return mappedField.phraseQuery(stream, slop, enablePositionIncrements, context); } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); @@ -583,7 +588,7 @@ protected Query analyzePhrase(String field, TokenStream stream, int slop) throws @Override protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { try { - return fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements, context); + return mappedField.multiPhraseQuery(stream, slop, enablePositionIncrements, context); } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); @@ -594,7 +599,7 @@ protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) t private Query analyzePhrasePrefix(String field, TokenStream stream, int slop, int positionCount) throws IOException { try { - return fieldType.phrasePrefixQuery(stream, slop, maxExpansions, context); + return mappedField.phrasePrefixQuery(stream, slop, maxExpansions, context); } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java index 1220aeb9e96a0..bb687bfe7e16a 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java @@ -19,7 +19,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -49,7 +49,7 @@ public void setTieBreaker(float tieBreaker) { public Query parse(MultiMatchQueryBuilder.Type type, Map fieldNames, Object value, String minimumShouldMatch) throws IOException { - boolean hasMappedField = fieldNames.keySet().stream().anyMatch(k -> context.getFieldType(k) != null); + boolean hasMappedField = fieldNames.keySet().stream().anyMatch(k -> context.getMappedField(k) != null); if (hasMappedField == false) { // all query fields are unmapped return Queries.newUnmappedFieldsQuery(fieldNames.keySet()); @@ -85,7 +85,7 @@ private List buildFieldQueries( ) throws IOException { List queries = new ArrayList<>(); for (String fieldName : fieldNames.keySet()) { - if (context.getFieldType(fieldName) == null) { + if (context.getMappedField(fieldName) == null) { // ignore unmapped fields continue; } @@ -108,14 +108,14 @@ private List buildCrossFieldQuery(Map fieldNames, Object v List queries = new ArrayList<>(); for (Map.Entry entry : fieldNames.entrySet()) { String name = entry.getKey(); - MappedFieldType fieldType = context.getFieldType(name); - if (fieldType != null) { - Analyzer actualAnalyzer = getAnalyzer(fieldType, false); + MappedField mappedField = context.getMappedField(name); + if (mappedField != null) { + Analyzer actualAnalyzer = getAnalyzer(mappedField, false); if (groups.containsKey(actualAnalyzer) == false) { groups.put(actualAnalyzer, new ArrayList<>()); } float boost = entry.getValue() == null ? 1.0f : entry.getValue(); - groups.get(actualAnalyzer).add(new FieldAndBoost(fieldType, boost)); + groups.get(actualAnalyzer).add(new FieldAndBoost(mappedField, boost)); } } for (Map.Entry> group : groups.entrySet()) { @@ -123,7 +123,7 @@ private List buildCrossFieldQuery(Map fieldNames, Object v if (group.getValue().size() == 1) { builder = new MatchQueryBuilder( group.getKey(), - group.getValue().get(0).fieldType, + group.getValue().get(0).mappedField, enablePositionIncrements, autoGenerateSynonymsPhraseQuery ); @@ -142,7 +142,7 @@ private List buildCrossFieldQuery(Map fieldNames, Object v * we just pick the first field. It shouldn't matter because * fields are already grouped by their analyzers/types. */ - String representativeField = group.getValue().get(0).fieldType.name(); + String representativeField = group.getValue().get(0).mappedField.name(); Query query = builder.createBooleanQuery(representativeField, value.toString(), occur); if (query == null) { query = zeroTermsQuery.asQuery(); @@ -175,7 +175,7 @@ private class CrossFieldsQueryBuilder extends MatchQueryBuilder { boolean enablePositionIncrements, boolean autoGenerateSynonymsPhraseQuery ) { - super(analyzer, blendedFields.get(0).fieldType, enablePositionIncrements, autoGenerateSynonymsPhraseQuery); + super(analyzer, blendedFields.get(0).mappedField, enablePositionIncrements, autoGenerateSynonymsPhraseQuery); this.blendedFields = blendedFields; this.tieBreaker = tieBreaker; } @@ -218,7 +218,7 @@ protected Query newPrefixQuery(Term term) { protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { List disjunctions = new ArrayList<>(); for (FieldAndBoost fieldType : blendedFields) { - Query query = fieldType.fieldType.phraseQuery(stream, slop, enablePositionIncrements, context); + Query query = fieldType.mappedField.phraseQuery(stream, slop, enablePositionIncrements, context); if (fieldType.boost != 1f) { query = new BoostQuery(query, fieldType.boost); } @@ -231,7 +231,7 @@ protected Query analyzePhrase(String field, TokenStream stream, int slop) throws protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { List disjunctions = new ArrayList<>(); for (FieldAndBoost fieldType : blendedFields) { - Query query = fieldType.fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements, context); + Query query = fieldType.mappedField.multiPhraseQuery(stream, slop, enablePositionIncrements, context); if (fieldType.boost != 1f) { query = new BoostQuery(query, fieldType.boost); } @@ -268,10 +268,10 @@ static Query blendTerms( for (BytesRef term : values) { Query query; try { - query = ft.fieldType.termQuery(term, context); + query = ft.mappedField.termQuery(term, context); } catch (RuntimeException e) { if (lenient) { - query = newLenientFieldQuery(ft.fieldType.name(), e); + query = newLenientFieldQuery(ft.mappedField.name(), e); } else { throw e; } @@ -309,11 +309,11 @@ static Query blendTerms( } static final class FieldAndBoost { - final MappedFieldType fieldType; + final MappedField mappedField; final float boost; - FieldAndBoost(MappedFieldType fieldType, float boost) { - this.fieldType = Objects.requireNonNull(fieldType); + FieldAndBoost(MappedField mappedField, float boost) { + this.mappedField = Objects.requireNonNull(mappedField); this.boost = boost; } } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index 02431654e79e4..2e45307e71ffc 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -11,7 +11,7 @@ import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.query.SearchExecutionContext; @@ -127,20 +127,20 @@ static Map resolveMappingField( fieldName = fieldName + fieldSuffix; } - MappedFieldType fieldType = context.getFieldType(fieldName); - if (acceptMetadataField == false && fieldType.name().startsWith("_")) { + MappedField mappedField = context.getMappedField(fieldName); + if (acceptMetadataField == false && mappedField.name().startsWith("_")) { // Ignore metadata fields continue; } if (acceptAllTypes == false) { - if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE || fieldType.mayExistInIndex(context) == false) { + if (mappedField.getTextSearchInfo() == TextSearchInfo.NONE || mappedField.mayExistInIndex(context) == false) { continue; } } // Deduplicate aliases and their concrete fields. - String resolvedFieldName = fieldType.name(); + String resolvedFieldName = mappedField.name(); if (allFields.contains(resolvedFieldName)) { fieldName = resolvedFieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index ee08242557a87..0770e3016eb61 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.query.ExistsQueryBuilder; @@ -325,7 +325,8 @@ public Query getFieldQuery(String field, String queryText, boolean quoted) throw return getRangeQuery(field, null, queryText.substring(1), true, false); } // if we are querying a single date field, we also create a range query that leverages the time zone setting - if (context.getFieldType(field) instanceof DateFieldType && this.timeZone != null) { + MappedField mappedField = context.getMappedField(field); + if (mappedField != null && mappedField.type() instanceof DateFieldType && this.timeZone != null) { return getRangeQuery(field, queryText, queryText, true, true); } } @@ -420,15 +421,15 @@ private Query getRangeQuerySingle( boolean endInclusive, SearchExecutionContext context ) { - MappedFieldType currentFieldType = context.getFieldType(field); - if (currentFieldType == null || currentFieldType.getTextSearchInfo() == TextSearchInfo.NONE) { + MappedField mappedField = context.getMappedField(field); + if (mappedField == null || mappedField.getTextSearchInfo() == TextSearchInfo.NONE) { return newUnmappedFieldQuery(field); } try { - Analyzer normalizer = forceAnalyzer == null ? currentFieldType.getTextSearchInfo().searchAnalyzer() : forceAnalyzer; + Analyzer normalizer = forceAnalyzer == null ? mappedField.getTextSearchInfo().searchAnalyzer() : forceAnalyzer; BytesRef part1Binary = part1 == null ? null : normalizer.normalize(field, part1); BytesRef part2Binary = part2 == null ? null : normalizer.normalize(field, part2); - Query rangeQuery = currentFieldType.rangeQuery( + Query rangeQuery = mappedField.rangeQuery( part1Binary, part2Binary, startInclusive, @@ -477,14 +478,14 @@ protected Query getFuzzyQuery(String field, String termStr, float minSimilarity) } private Query getFuzzyQuerySingle(String field, String termStr, int minSimilarity) throws ParseException { - MappedFieldType currentFieldType = context.getFieldType(field); - if (currentFieldType == null || currentFieldType.getTextSearchInfo() == TextSearchInfo.NONE) { + MappedField mappedField = context.getMappedField(field); + if (mappedField == null || mappedField.getTextSearchInfo() == TextSearchInfo.NONE) { return newUnmappedFieldQuery(field); } try { - Analyzer normalizer = forceAnalyzer == null ? currentFieldType.getTextSearchInfo().searchAnalyzer() : forceAnalyzer; + Analyzer normalizer = forceAnalyzer == null ? mappedField.getTextSearchInfo().searchAnalyzer() : forceAnalyzer; BytesRef term = termStr == null ? null : normalizer.normalize(field, termStr); - return currentFieldType.fuzzyQuery( + return mappedField.fuzzyQuery( term, Fuzziness.fromEdits(minSimilarity), getFuzzyPrefixLength(), @@ -534,16 +535,16 @@ protected Query getPrefixQuery(String field, String termStr) throws ParseExcepti private Query getPrefixQuerySingle(String field, String termStr) throws ParseException { Analyzer oldAnalyzer = getAnalyzer(); try { - MappedFieldType currentFieldType = context.getFieldType(field); - if (currentFieldType == null || currentFieldType.getTextSearchInfo() == TextSearchInfo.NONE) { + MappedField mappedField = context.getMappedField(field); + if (mappedField == null || mappedField.getTextSearchInfo() == TextSearchInfo.NONE) { return newUnmappedFieldQuery(field); } - setAnalyzer(forceAnalyzer == null ? currentFieldType.getTextSearchInfo().searchAnalyzer() : forceAnalyzer); + setAnalyzer(forceAnalyzer == null ? mappedField.getTextSearchInfo().searchAnalyzer() : forceAnalyzer); Query query = null; - if (currentFieldType.getTextSearchInfo().isTokenized() == false) { - query = currentFieldType.prefixQuery(termStr, getMultiTermRewriteMethod(), context); + if (mappedField.getTextSearchInfo().isTokenized() == false) { + query = mappedField.prefixQuery(termStr, getMultiTermRewriteMethod(), context); } else { - query = getPossiblyAnalyzedPrefixQuery(currentFieldType.name(), termStr, currentFieldType); + query = getPossiblyAnalyzedPrefixQuery(mappedField.name(), termStr, mappedField); } return query; } catch (RuntimeException e) { @@ -556,13 +557,9 @@ private Query getPrefixQuerySingle(String field, String termStr) throws ParseExc } } - private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, MappedFieldType currentFieldType) throws ParseException { + private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, MappedField mappedField) throws ParseException { if (analyzeWildcard == false) { - return currentFieldType.prefixQuery( - getAnalyzer().normalize(field, termStr).utf8ToString(), - getMultiTermRewriteMethod(), - context - ); + return mappedField.prefixQuery(getAnalyzer().normalize(field, termStr).utf8ToString(), getMultiTermRewriteMethod(), context); } List> tlist; // get Analyzer from superclass and tokenize the term @@ -607,7 +604,7 @@ private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, Mappe } if (tlist.size() == 1 && tlist.get(0).size() == 1) { - return currentFieldType.prefixQuery(tlist.get(0).get(0), getMultiTermRewriteMethod(), context); + return mappedField.prefixQuery(tlist.get(0).get(0), getMultiTermRewriteMethod(), context); } // build a boolean query with prefix on the last position only. @@ -618,7 +615,7 @@ private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, Mappe Query posQuery; if (plist.size() == 1) { if (isLastPos) { - posQuery = currentFieldType.prefixQuery(plist.get(0), getMultiTermRewriteMethod(), context); + posQuery = mappedField.prefixQuery(plist.get(0), getMultiTermRewriteMethod(), context); } else { posQuery = newTermQuery(new Term(field, plist.get(0)), BoostAttribute.DEFAULT_BOOST); } @@ -649,7 +646,8 @@ private Query existsQuery(String fieldName) { return new MatchNoDocsQuery("No mappings yet"); } final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context - .getFieldType(FieldNamesFieldMapper.NAME); + .getMappedField(FieldNamesFieldMapper.NAME) + .type(); if (fieldNamesFieldType.isEnabled() == false) { // The field_names_field is disabled so we switch to a wildcard query that matches all terms return new WildcardQuery(new Term(fieldName, "*")); @@ -693,18 +691,18 @@ private Query getWildcardQuerySingle(String field, String termStr) throws ParseE } Analyzer oldAnalyzer = getAnalyzer(); try { - MappedFieldType currentFieldType = queryBuilder.context.getFieldType(field); - if (currentFieldType == null) { + MappedField mappedField = queryBuilder.context.getMappedField(field); + if (mappedField == null) { return newUnmappedFieldQuery(field); } - if (forceAnalyzer != null && (analyzeWildcard || currentFieldType.getTextSearchInfo().isTokenized())) { + if (forceAnalyzer != null && (analyzeWildcard || mappedField.getTextSearchInfo().isTokenized())) { setAnalyzer(forceAnalyzer); - return super.getWildcardQuery(currentFieldType.name(), termStr); + return super.getWildcardQuery(mappedField.name(), termStr); } if (getAllowLeadingWildcard() == false && (termStr.startsWith("*") || termStr.startsWith("?"))) { throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery"); } - return currentFieldType.normalizedWildcardQuery(termStr, getMultiTermRewriteMethod(), context); + return mappedField.normalizedWildcardQuery(termStr, getMultiTermRewriteMethod(), context); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); @@ -751,15 +749,15 @@ protected Query getRegexpQuery(String field, String termStr) throws ParseExcepti private Query getRegexpQuerySingle(String field, String termStr) throws ParseException { Analyzer oldAnalyzer = getAnalyzer(); try { - MappedFieldType currentFieldType = queryBuilder.context.getFieldType(field); - if (currentFieldType == null) { + MappedField mappedField = queryBuilder.context.getMappedField(field); + if (mappedField == null) { return newUnmappedFieldQuery(field); } if (forceAnalyzer != null) { setAnalyzer(forceAnalyzer); return super.getRegexpQuery(field, termStr); } - return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(), getMultiTermRewriteMethod(), context); + return mappedField.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(), getMultiTermRewriteMethod(), context); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java index 1437413c102cc..fc358a63cd70c 100644 --- a/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java @@ -25,7 +25,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -75,11 +75,11 @@ public SimpleQueryStringQueryParser( } } - private Analyzer getAnalyzer(MappedFieldType ft) { + private Analyzer getAnalyzer(MappedField mappedField) { if (getAnalyzer() != null) { return analyzer; } - return ft.getTextSearchInfo().searchAnalyzer(); + return mappedField.getTextSearchInfo().searchAnalyzer(); } /** @@ -100,11 +100,11 @@ public void setDefaultOperator(BooleanClause.Occur operator) { @Override protected Query newTermQuery(Term term, float boost) { - MappedFieldType ft = context.getFieldType(term.field()); - if (ft == null) { + MappedField mappedField = context.getMappedField(term.field()); + if (mappedField == null) { return newUnmappedFieldQuery(term.field()); } - return ft.termQuery(term.bytes(), context); + return mappedField.termQuery(term.bytes(), context); } @Override @@ -121,14 +121,14 @@ public Query newFuzzyQuery(String text, int fuzziness) { List disjuncts = new ArrayList<>(); for (Map.Entry entry : weights.entrySet()) { final String fieldName = entry.getKey(); - final MappedFieldType ft = context.getFieldType(fieldName); - if (ft == null) { + final MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { disjuncts.add(newUnmappedFieldQuery(fieldName)); continue; } try { - final BytesRef term = getAnalyzer(ft).normalize(fieldName, text); - Query query = ft.fuzzyQuery( + final BytesRef term = getAnalyzer(mappedField).normalize(fieldName, text); + Query query = mappedField.fuzzyQuery( term, Fuzziness.fromEdits(fuzziness), settings.fuzzyPrefixLength, @@ -170,20 +170,20 @@ public Query newPrefixQuery(String text) { List disjuncts = new ArrayList<>(); for (Map.Entry entry : weights.entrySet()) { final String fieldName = entry.getKey(); - final MappedFieldType ft = context.getFieldType(fieldName); - if (ft == null) { + final MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { disjuncts.add(newUnmappedFieldQuery(fieldName)); continue; } try { if (settings.analyzeWildcard()) { - Query analyzedQuery = newPossiblyAnalyzedQuery(fieldName, text, getAnalyzer(ft)); + Query analyzedQuery = newPossiblyAnalyzedQuery(fieldName, text, getAnalyzer(mappedField)); if (analyzedQuery != null) { disjuncts.add(wrapWithBoost(analyzedQuery, entry.getValue())); } } else { - BytesRef term = getAnalyzer(ft).normalize(fieldName, text); - Query query = ft.prefixQuery(term.utf8ToString(), null, context); + BytesRef term = getAnalyzer(mappedField).normalize(fieldName, text); + Query query = mappedField.prefixQuery(term.utf8ToString(), null, context); disjuncts.add(wrapWithBoost(query, entry.getValue())); } } catch (RuntimeException e) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 14d5efd0bb872..a2022a333693f 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -101,7 +101,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; @@ -2022,11 +2022,11 @@ public ShardLongFieldRange getTimestampRange() { if (mapperService() == null) { return ShardLongFieldRange.UNKNOWN; // no mapper service, no idea if the field even exists } - final MappedFieldType mappedFieldType = mapperService().fieldType(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD); - if (mappedFieldType instanceof DateFieldMapper.DateFieldType == false) { + final MappedField mappedField = mapperService().mappedField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD); + if (mappedField == null || mappedField.type() instanceof DateFieldMapper.DateFieldType == false) { return ShardLongFieldRange.UNKNOWN; // field missing or not a date } - if (mappedFieldType.isIndexed() == false) { + if (mappedField.isIndexed() == false) { return ShardLongFieldRange.UNKNOWN; // range information missing } @@ -3231,7 +3231,7 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { store, indexSettings.getMergePolicy(), buildIndexAnalyzer(mapperService), - similarityService.similarity(mapperService == null ? null : mapperService::fieldType), + similarityService.similarity(mapperService == null ? null : mapperService::mappedField), codecService, shardEventListener, indexCache != null ? indexCache.query() : null, diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index a03702df14523..f506dde1dfe83 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -27,7 +27,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import org.elasticsearch.script.ScriptService; @@ -126,8 +126,8 @@ public SimilarityService( /** * The similarity to use in searches, which takes into account per-field configuration. */ - public Similarity similarity(@Nullable Function fieldTypeLookup) { - return (fieldTypeLookup != null) ? new PerFieldSimilarity(defaultSimilarity, fieldTypeLookup) : defaultSimilarity; + public Similarity similarity(@Nullable Function fieldLookup) { + return (fieldLookup != null) ? new PerFieldSimilarity(defaultSimilarity, fieldLookup) : defaultSimilarity; } public SimilarityProvider getSimilarity(String name) { @@ -148,19 +148,19 @@ public Similarity getDefaultSimilarity() { static class PerFieldSimilarity extends PerFieldSimilarityWrapper { private final Similarity defaultSimilarity; - private final Function fieldTypeLookup; + private final Function fieldLookup; - PerFieldSimilarity(Similarity defaultSimilarity, Function fieldTypeLookup) { + PerFieldSimilarity(Similarity defaultSimilarity, Function fieldLookup) { super(); this.defaultSimilarity = defaultSimilarity; - this.fieldTypeLookup = Objects.requireNonNull(fieldTypeLookup, "fieldTypeLookup cannot be null"); + this.fieldLookup = Objects.requireNonNull(fieldLookup, "fieldTypeLookup cannot be null"); } @Override public Similarity get(String name) { - MappedFieldType fieldType = fieldTypeLookup.apply(name); - return (fieldType != null && fieldType.getTextSearchInfo().similarity() != null) - ? fieldType.getTextSearchInfo().similarity().get() + MappedField mappedField = fieldLookup.apply(name); + return (mappedField != null && mappedField.getTextSearchInfo().similarity() != null) + ? mappedField.getTextSearchInfo().similarity().get() : defaultSimilarity; } } diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 3852e40e800f9..6b1c83299107f 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -30,7 +30,7 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.DocumentParser; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.ParsedDocument; @@ -174,13 +174,16 @@ private static void handleFieldWildcards(IndexShard indexShard, TermVectorsReque request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY)); } - private static boolean isValidField(MappedFieldType fieldType) { + private static boolean isValidField(MappedField mappedField) { + if (mappedField == null) { + return false; + } // must be a string - if (fieldType instanceof StringFieldType == false) { + if (mappedField.type() instanceof StringFieldType == false) { return false; } // and must be indexed - if (fieldType.isIndexed() == false) { + if (mappedField.isIndexed() == false) { return false; } return true; @@ -196,12 +199,12 @@ private static Fields addGeneratedTermVectors( /* only keep valid fields */ Set validFields = new HashSet<>(); for (String field : selectedFields) { - MappedFieldType fieldType = indexShard.mapperService().fieldType(field); - if (isValidField(fieldType) == false) { + MappedField mappedField = indexShard.mapperService().mappedField(field); + if (isValidField(mappedField) == false) { continue; } // already retrieved, only if the analyzer hasn't been overridden at the field - if (fieldType.getTextSearchInfo().termVectors() != TextSearchInfo.TermVector.NONE + if (mappedField.getTextSearchInfo().termVectors() != TextSearchInfo.TermVector.NONE && (request.perFieldAnalyzer() == null || request.perFieldAnalyzer().containsKey(field) == false)) { continue; } @@ -313,8 +316,8 @@ private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVect Set seenFields = new HashSet<>(); Collection documentFields = new HashSet<>(); for (IndexableField field : doc.getFields()) { - MappedFieldType fieldType = indexShard.mapperService().fieldType(field.name()); - if (isValidField(fieldType) == false) { + MappedField mappedField = indexShard.mapperService().mappedField(field.name()); + if (isValidField(mappedField) == false) { continue; } if (request.selectedFields() != null && request.selectedFields().contains(field.name()) == false) { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index bb85d9395a2dc..b7247d378a0c6 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -87,8 +87,8 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; -import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; @@ -1672,7 +1672,7 @@ public CoordinatorRewriteContextProvider getCoordinatorRewriteContextProvider(Lo client, nowInMillis, clusterService::state, - this::getTimestampFieldType + this::getTimestampField ); } @@ -1776,8 +1776,8 @@ public boolean allPendingDanglingIndicesWritten() { * - the field is not a timestamp field. */ @Nullable - public DateFieldMapper.DateFieldType getTimestampFieldType(Index index) { - return timestampFieldMapperService.getTimestampFieldType(index); + public MappedField getTimestampField(Index index) { + return timestampFieldMapperService.getTimestampField(index); } } diff --git a/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java b/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java index 85ff54472f7a1..8ea51d7d0ab80 100644 --- a/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java +++ b/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java @@ -27,7 +27,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.node.Node; @@ -56,7 +56,7 @@ public class TimestampFieldMapperService extends AbstractLifecycleComponent impl * The type of the {@code @timestamp} field keyed by index. Futures may be completed with {@code null} to indicate that there is * no usable {@code @timestamp} field. */ - private final Map> fieldTypesByIndex = ConcurrentCollections.newConcurrentMap(); + private final Map> fieldTypesByIndex = ConcurrentCollections.newConcurrentMap(); public TimestampFieldMapperService(Settings settings, ThreadPool threadPool, IndicesService indicesService) { this.indicesService = indicesService; @@ -103,7 +103,7 @@ public void applyClusterState(ClusterChangedEvent event) { if (hasUsefulTimestampField(indexMetadata) && fieldTypesByIndex.containsKey(index) == false) { logger.trace("computing timestamp mapping for {}", index); - final PlainActionFuture future = new PlainActionFuture<>(); + final PlainActionFuture future = new PlainActionFuture<>(); fieldTypesByIndex.put(index, future); final IndexService indexService = indicesService.indexService(index); @@ -152,10 +152,10 @@ private static boolean hasUsefulTimestampField(IndexMetadata indexMetadata) { return timestampRange.isComplete() && timestampRange != IndexLongFieldRange.UNKNOWN; } - private static DateFieldMapper.DateFieldType fromMapperService(MapperService mapperService) { - final MappedFieldType mappedFieldType = mapperService.fieldType(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD); - if (mappedFieldType instanceof DateFieldMapper.DateFieldType) { - return (DateFieldMapper.DateFieldType) mappedFieldType; + private static MappedField fromMapperService(MapperService mapperService) { + final MappedField mappedField = mapperService.mappedField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD); + if (mappedField != null && mappedField.type() instanceof DateFieldMapper.DateFieldType) { + return mappedField; } else { return null; } @@ -169,8 +169,8 @@ private static DateFieldMapper.DateFieldType fromMapperService(MapperService map * - the field is not a timestamp field. */ @Nullable - public DateFieldMapper.DateFieldType getTimestampFieldType(Index index) { - final PlainActionFuture future = fieldTypesByIndex.get(index); + public MappedField getTimestampField(Index index) { + final PlainActionFuture future = fieldTypesByIndex.get(index); if (future == null || future.isDone() == false) { return null; } diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java index 9175aa3b7f2a5..644bef69c04c7 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java @@ -22,7 +22,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.AbstractNumericDocValues; import org.elasticsearch.index.fielddata.AbstractSortedDocValues; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import java.io.IOException; import java.util.Collection; @@ -51,8 +51,8 @@ static class Numeric extends GroupingDocValuesSelector { private long value; private boolean hasValue; - Numeric(MappedFieldType fieldType) { - super(fieldType.name()); + Numeric(MappedField mappedField) { + super(mappedField.name()); } @Override @@ -139,8 +139,8 @@ static class Keyword extends GroupingDocValuesSelector { private SortedDocValues values; private int ord; - Keyword(MappedFieldType fieldType) { - super(fieldType.name()); + Keyword(MappedField mappedField) { + super(mappedField.name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java index eaa49fceb4e63..186113c9f7cac 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java @@ -37,7 +37,7 @@ import org.apache.lucene.search.grouping.GroupSelector; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import java.io.IOException; import java.util.Comparator; @@ -81,7 +81,7 @@ public String toString() { * field. * * @param groupField The sort field used to group documents. - * @param groupFieldType The {@link MappedFieldType} for this sort field. + * @param mappedGroupField The {@link MappedField} for this sort field. * @param groupSort The {@link Sort} used to sort the groups. * The grouping keeps only the top sorted document per grouping key. * This must be non-null, ie, if you want to groupSort by relevance @@ -91,12 +91,18 @@ public String toString() { */ public static SinglePassGroupingCollector createNumeric( String groupField, - MappedFieldType groupFieldType, + MappedField mappedGroupField, Sort groupSort, int topN, @Nullable FieldDoc after ) { - return new SinglePassGroupingCollector<>(new GroupingDocValuesSelector.Numeric(groupFieldType), groupField, groupSort, topN, after); + return new SinglePassGroupingCollector<>( + new GroupingDocValuesSelector.Numeric(mappedGroupField), + groupField, + groupSort, + topN, + after + ); } /** @@ -105,7 +111,7 @@ public static SinglePassGroupingCollector createNumeric( * an {@link IllegalStateException} if a document contains more than one value for the field. * * @param groupField The sort field used to group documents. - * @param groupFieldType The {@link MappedFieldType} for this sort field. + * @param mappedGroupField The {@link MappedField} for this sort field. * @param groupSort The {@link Sort} used to sort the groups. The grouping keeps only the top sorted * document per grouping key. * This must be non-null, ie, if you want to groupSort by relevance use Sort.RELEVANCE. @@ -114,12 +120,18 @@ public static SinglePassGroupingCollector createNumeric( */ public static SinglePassGroupingCollector createKeyword( String groupField, - MappedFieldType groupFieldType, + MappedField mappedGroupField, Sort groupSort, int topN, @Nullable FieldDoc after ) { - return new SinglePassGroupingCollector<>(new GroupingDocValuesSelector.Keyword(groupFieldType), groupField, groupSort, topN, after); + return new SinglePassGroupingCollector<>( + new GroupingDocValuesSelector.Keyword(mappedGroupField), + groupField, + groupSort, + topN, + after + ); } private final String groupField; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java index 58fd7f85f6076..e4f4d12ec1640 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java @@ -19,7 +19,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -40,7 +40,7 @@ class BinaryValuesSource extends SingleDimensionValuesSource { BinaryValuesSource( BigArrays bigArrays, LongConsumer breakerConsumer, - MappedFieldType fieldType, + MappedField mappedField, CheckedFunction docValuesFunc, DocValueFormat format, boolean missingBucket, @@ -48,7 +48,7 @@ class BinaryValuesSource extends SingleDimensionValuesSource { int size, int reverseMul ) { - super(bigArrays, format, fieldType, missingBucket, missingOrder, size, reverseMul); + super(bigArrays, format, mappedField, missingBucket, missingOrder, size, reverseMul); this.breakerConsumer = breakerConsumer; this.docValuesFunc = docValuesFunc; this.values = bigArrays.newObjectArray(Math.min(size, 100)); @@ -196,12 +196,12 @@ public void collect(int doc, long bucket) throws IOException { @Override SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) { - if (checkIfSortedDocsIsApplicable(reader, fieldType) == false - || fieldType instanceof StringFieldType == false + if (checkIfSortedDocsIsApplicable(reader, mappedField) == false + || mappedField.type() instanceof StringFieldType == false || (query != null && query.getClass() != MatchAllDocsQuery.class)) { return null; } - return new TermsSortedDocsProducer(fieldType.name()); + return new TermsSortedDocsProducer(mappedField.name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index ae762bf4738b3..e8e02cfd1c1d7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -244,7 +244,7 @@ protected AggregatorFactory doBuild( values[i] = null; } else if (obj instanceof Comparable c) { values[i] = c; - } else if (obj instanceof Map && configs[i].fieldType().getClass() == TimeSeriesIdFieldType.class) { + } else if (obj instanceof Map && configs[i].mappedField().type().getClass() == TimeSeriesIdFieldType.class) { // If input is a _tsid map, encode the map to the _tsid BytesRef values[i] = configs[i].format().parseBytesRef(obj); } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index dd91224f21819..8e2c20f75038a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -274,10 +274,10 @@ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException CompositeValuesSourceConfig sourceConfig = sourceConfigs[i]; SingleDimensionValuesSource source = sources[i]; SortField indexSortField = indexSort.getSort()[i]; - if (source.fieldType == null + if (source.mappedField == null // TODO: can we handle missing bucket when using index sort optimization ? || source.missingBucket - || indexSortField.getField().equals(source.fieldType.name()) == false + || indexSortField.getField().equals(source.mappedField.name()) == false || isMaybeMultivalued(context, indexSortField) || sourceConfig.hasScript()) { break; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java index 779abc73a534d..8cc6d27e19054 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java @@ -11,7 +11,7 @@ import org.apache.lucene.index.IndexReader; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.sort.SortOrder; @@ -33,7 +33,7 @@ SingleDimensionValuesSource createValuesSource( private final String name; @Nullable - private final MappedFieldType fieldType; + private final MappedField mappedField; private final ValuesSource vs; private final DocValueFormat format; private final int reverseMul; @@ -46,7 +46,7 @@ SingleDimensionValuesSource createValuesSource( * Creates a new {@link CompositeValuesSourceConfig}. * * @param name The name of the source. - * @param fieldType The field type or null if the source is a script. + * @param mappedField The field or null if the source is a script. * @param vs The underlying {@link ValuesSource}. * @param format The {@link DocValueFormat} of this source. * @param order The sort order associated with this source. @@ -56,7 +56,7 @@ SingleDimensionValuesSource createValuesSource( */ CompositeValuesSourceConfig( String name, - @Nullable MappedFieldType fieldType, + @Nullable MappedField mappedField, ValuesSource vs, DocValueFormat format, SortOrder order, @@ -66,7 +66,7 @@ SingleDimensionValuesSource createValuesSource( SingleDimensionValuesSourceProvider singleDimensionValuesSourceProvider ) { this.name = name; - this.fieldType = fieldType; + this.mappedField = mappedField; this.vs = vs; this.format = format; this.reverseMul = order == SortOrder.ASC ? 1 : -1; @@ -84,10 +84,10 @@ String name() { } /** - * Returns the {@link MappedFieldType} for this config. + * Returns the {@link MappedField} for this config. */ - MappedFieldType fieldType() { - return fieldType; + MappedField mappedField() { + return mappedField; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java index 54a00dbdc2d38..8c2406dcf61ba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSource.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.Rounding; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.histogram.SizedBucketAggregator; @@ -22,7 +22,7 @@ public class DateHistogramValuesSource extends LongValuesSource implements Sized DateHistogramValuesSource( BigArrays bigArrays, - MappedFieldType fieldType, + MappedField mappedField, RoundingValuesSource roundingValuesSource, DocValueFormat format, boolean missingBucket, @@ -32,7 +32,7 @@ public class DateHistogramValuesSource extends LongValuesSource implements Sized ) { super( bigArrays, - fieldType, + mappedField, roundingValuesSource::longValues, roundingValuesSource::round, format, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index 7468af95b053c..0e5353df64e2f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; @@ -240,10 +240,10 @@ public static void register(ValuesSourceRegistry.Builder builder) { RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding); // is specified in the builder. final DocValueFormat docValueFormat = format == null ? DocValueFormat.RAW : valuesSourceConfig.format(); - final MappedFieldType fieldType = valuesSourceConfig.fieldType(); + final MappedField mappedField = valuesSourceConfig.mappedField(); return new CompositeValuesSourceConfig( name, - fieldType, + mappedField, vs, docValueFormat, order, @@ -259,7 +259,7 @@ public static void register(ValuesSourceRegistry.Builder builder) { final RoundingValuesSource roundingValuesSource = (RoundingValuesSource) compositeValuesSourceConfig.valuesSource(); return new DateHistogramValuesSource( bigArrays, - compositeValuesSourceConfig.fieldType(), + compositeValuesSourceConfig.mappedField(), roundingValuesSource, compositeValuesSourceConfig.format(), compositeValuesSourceConfig.missingBucket(), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java index e1b15f0db93ea..e3c2eb48991ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java @@ -17,7 +17,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -35,7 +35,7 @@ class DoubleValuesSource extends SingleDimensionValuesSource { DoubleValuesSource( BigArrays bigArrays, - MappedFieldType fieldType, + MappedField mappedField, CheckedFunction docValuesFunc, DocValueFormat format, boolean missingBucket, @@ -43,7 +43,7 @@ class DoubleValuesSource extends SingleDimensionValuesSource { int size, int reverseMul ) { - super(bigArrays, format, fieldType, missingBucket, missingOrder, size, reverseMul); + super(bigArrays, format, mappedField, missingBucket, missingOrder, size, reverseMul); this.docValuesFunc = docValuesFunc; this.bits = this.missingBucket ? new BitArray(100, bigArrays) : null; boolean success = false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java index b799cc0475b02..1c7bf75b22cbf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileCellIdSource; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; @@ -81,11 +81,11 @@ static void register(ValuesSourceRegistry.Builder builder) { (valuesSourceConfig, precision, boundingBox, name, hasScript, format, missingBucket, missingOrder, order) -> { ValuesSource.GeoPoint geoPoint = (ValuesSource.GeoPoint) valuesSourceConfig.getValuesSource(); // is specified in the builder. - final MappedFieldType fieldType = valuesSourceConfig.fieldType(); + final MappedField mappedField = valuesSourceConfig.mappedField(); GeoTileCellIdSource cellIdSource = new GeoTileCellIdSource(geoPoint, precision, boundingBox); return new CompositeValuesSourceConfig( name, - fieldType, + mappedField, cellIdSource, DocValueFormat.GEOTILE, order, @@ -103,7 +103,7 @@ static void register(ValuesSourceRegistry.Builder builder) { final ValuesSource.Numeric cis = (ValuesSource.Numeric) compositeValuesSourceConfig.valuesSource(); return new GeoTileValuesSource( bigArrays, - compositeValuesSourceConfig.fieldType(), + compositeValuesSourceConfig.mappedField(), cis::longValues, LongUnaryOperator.identity(), compositeValuesSourceConfig.format(), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java index 174e684bcabad..6fdd1dc7a7b13 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileValuesSource.java @@ -12,7 +12,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import java.io.IOException; @@ -27,7 +27,7 @@ class GeoTileValuesSource extends LongValuesSource { GeoTileValuesSource( BigArrays bigArrays, - MappedFieldType fieldType, + MappedField mappedField, CheckedFunction docValuesFunc, LongUnaryOperator rounding, DocValueFormat format, @@ -36,7 +36,7 @@ class GeoTileValuesSource extends LongValuesSource { int size, int reverseMul ) { - super(bigArrays, fieldType, docValuesFunc, rounding, format, missingBucket, missingOrder, size, reverseMul); + super(bigArrays, mappedField, docValuesFunc, rounding, format, missingBucket, missingOrder, size, reverseMul); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index 30009bad1acc5..fd93c3adca770 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Releasables; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -44,7 +44,7 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource { GlobalOrdinalValuesSource( BigArrays bigArrays, - MappedFieldType type, + MappedField mappedField, CheckedFunction docValuesFunc, DocValueFormat format, boolean missingBucket, @@ -52,7 +52,7 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource { int size, int reverseMul ) { - super(bigArrays, format, type, missingBucket, missingOrder, size, reverseMul); + super(bigArrays, format, mappedField, missingBucket, missingOrder, size, reverseMul); this.docValuesFunc = docValuesFunc; this.values = bigArrays.newLongArray(Math.min(size, 100), false); } @@ -105,7 +105,7 @@ void setAfter(Comparable value) { if (missingBucket && value == null) { afterValue = null; afterValueGlobalOrd = MISSING_VALUE_FLAG; - } else if (value.getClass() == String.class || (fieldType == null)) { + } else if (value.getClass() == String.class || (mappedField == null)) { // the value might be not string if this field is missing in this shard but present in other shards // and doesn't have a string type afterValue = format.parseBytesRef(value); @@ -190,12 +190,12 @@ public void collect(int doc, long bucket) throws IOException { @Override SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) { - if (checkIfSortedDocsIsApplicable(reader, fieldType) == false - || fieldType instanceof StringFieldType == false + if (checkIfSortedDocsIsApplicable(reader, mappedField) == false + || mappedField.type() instanceof StringFieldType == false || (query != null && query.getClass() != MatchAllDocsQuery.class)) { return null; } - return new TermsSortedDocsProducer(fieldType.name()); + return new TermsSortedDocsProducer(mappedField.name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java index cdd4906058c84..c52f0e7ac38fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -72,10 +72,10 @@ public static void register(ValuesSourceRegistry.Builder builder) { (valuesSourceConfig, interval, name, hasScript, format, missingBucket, missingOrder, order) -> { ValuesSource.Numeric numeric = (ValuesSource.Numeric) valuesSourceConfig.getValuesSource(); final HistogramValuesSource vs = new HistogramValuesSource(numeric, interval); - final MappedFieldType fieldType = valuesSourceConfig.fieldType(); + final MappedField mappedField = valuesSourceConfig.mappedField(); return new CompositeValuesSourceConfig( name, - fieldType, + mappedField, vs, valuesSourceConfig.format(), order, @@ -91,7 +91,7 @@ public static void register(ValuesSourceRegistry.Builder builder) { final ValuesSource.Numeric numericValuesSource = (ValuesSource.Numeric) compositeValuesSourceConfig.valuesSource(); return new DoubleValuesSource( bigArrays, - compositeValuesSourceConfig.fieldType(), + compositeValuesSourceConfig.mappedField(), numericValuesSource::doubleValues, compositeValuesSourceConfig.format(), compositeValuesSourceConfig.missingBucket(), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java index f33ba1cab62f1..68f281e78aab7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java @@ -26,7 +26,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -50,7 +50,7 @@ class LongValuesSource extends SingleDimensionValuesSource { LongValuesSource( BigArrays bigArrays, - MappedFieldType fieldType, + MappedField mappedField, CheckedFunction docValuesFunc, LongUnaryOperator rounding, DocValueFormat format, @@ -59,7 +59,7 @@ class LongValuesSource extends SingleDimensionValuesSource { int size, int reverseMul ) { - super(bigArrays, format, fieldType, missingBucket, missingOrder, size, reverseMul); + super(bigArrays, format, mappedField, missingBucket, missingOrder, size, reverseMul); this.bigArrays = bigArrays; this.docValuesFunc = docValuesFunc; this.rounding = rounding; @@ -236,7 +236,7 @@ private static boolean checkMatchAllOrRangeQuery(Query query, String fieldName) @Override SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) { query = extractQuery(query); - if (checkIfSortedDocsIsApplicable(reader, fieldType) == false || checkMatchAllOrRangeQuery(query, fieldType.name()) == false) { + if (checkIfSortedDocsIsApplicable(reader, mappedField) == false || checkMatchAllOrRangeQuery(query, mappedField.name()) == false) { return null; } final byte[] lowerPoint; @@ -249,7 +249,7 @@ SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query quer upperPoint = null; } - if (fieldType instanceof NumberFieldMapper.NumberFieldType ft) { + if (mappedField.type()instanceof NumberFieldMapper.NumberFieldType ft) { final ToLongFunction toBucketFunction; switch (ft.typeName()) { @@ -266,11 +266,11 @@ SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query quer default: return null; } - return new PointsSortedDocsProducer(fieldType.name(), toBucketFunction, lowerPoint, upperPoint); - } else if (fieldType instanceof DateFieldMapper.DateFieldType) { - ToLongFunction decode = ((DateFieldMapper.DateFieldType) fieldType).resolution()::parsePointAsMillis; + return new PointsSortedDocsProducer(mappedField.name(), toBucketFunction, lowerPoint, upperPoint); + } else if (mappedField.type() instanceof DateFieldMapper.DateFieldType) { + ToLongFunction decode = ((DateFieldMapper.DateFieldType) mappedField.type()).resolution()::parsePointAsMillis; ToLongFunction toBucketFunction = value -> rounding.applyAsLong(decode.applyAsLong(value)); - return new PointsSortedDocsProducer(fieldType.name(), toBucketFunction, lowerPoint, upperPoint); + return new PointsSortedDocsProducer(mappedField.name(), toBucketFunction, lowerPoint, upperPoint); } else { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java index bd3c8b7eb322c..8903e43402349 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.sort.SortOrder; @@ -28,7 +28,7 @@ abstract class SingleDimensionValuesSource> implements R protected final BigArrays bigArrays; protected final DocValueFormat format; @Nullable - protected final MappedFieldType fieldType; + protected final MappedField mappedField; protected final boolean missingBucket; protected final MissingOrder missingOrder; @@ -42,7 +42,7 @@ abstract class SingleDimensionValuesSource> implements R * * @param bigArrays The big arrays object. * @param format The format of the source. - * @param fieldType The field type or null if the source is a script. + * @param mappedField The mapped field or null if the source is a script. * @param missingBucket If true, an explicit `null bucket represents documents with missing values. * @param missingOrder How to order missing buckets if missingBucket is true. * @param size The number of values to record. @@ -51,7 +51,7 @@ abstract class SingleDimensionValuesSource> implements R SingleDimensionValuesSource( BigArrays bigArrays, DocValueFormat format, - @Nullable MappedFieldType fieldType, + @Nullable MappedField mappedField, boolean missingBucket, MissingOrder missingOrder, int size, @@ -59,7 +59,7 @@ abstract class SingleDimensionValuesSource> implements R ) { this.bigArrays = bigArrays; this.format = format; - this.fieldType = fieldType; + this.mappedField = mappedField; this.missingBucket = missingBucket; this.missingOrder = missingOrder; this.size = size; @@ -147,8 +147,8 @@ abstract LeafBucketCollector getLeafCollector(Comparable value, LeafReaderCon /** * Returns true if a {@link SortedDocsProducer} should be used to optimize the execution. */ - protected boolean checkIfSortedDocsIsApplicable(IndexReader reader, MappedFieldType fieldType) { - if (fieldType == null || (missingBucket && afterValue == null) || fieldType.isIndexed() == false || + protected boolean checkIfSortedDocsIsApplicable(IndexReader reader, MappedField mappedField) { + if (mappedField == null || (missingBucket && afterValue == null) || mappedField.isIndexed() == false || // inverse of the natural order reverseMul == -1) { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java index dcbaed78ba063..87ea44213d851 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java @@ -98,7 +98,7 @@ static void register(ValuesSourceRegistry.Builder builder) { } return new CompositeValuesSourceConfig( name, - valuesSourceConfig.fieldType(), + valuesSourceConfig.mappedField(), valuesSourceConfig.getValuesSource(), docValueFormat, order, @@ -116,7 +116,7 @@ static void register(ValuesSourceRegistry.Builder builder) { if (vs.isFloatingPoint()) { return new DoubleValuesSource( bigArrays, - compositeValuesSourceConfig.fieldType(), + compositeValuesSourceConfig.mappedField(), vs::doubleValues, compositeValuesSourceConfig.format(), compositeValuesSourceConfig.missingBucket(), @@ -130,7 +130,7 @@ static void register(ValuesSourceRegistry.Builder builder) { rounding = LongUnaryOperator.identity(); return new LongValuesSource( bigArrays, - compositeValuesSourceConfig.fieldType(), + compositeValuesSourceConfig.mappedField(), vs::longValues, rounding, compositeValuesSourceConfig.format(), @@ -152,7 +152,7 @@ static void register(ValuesSourceRegistry.Builder builder) { List.of(CoreValuesSourceType.KEYWORD, CoreValuesSourceType.IP), (valuesSourceConfig, name, hasScript, format, missingBucket, missingOrder, order) -> new CompositeValuesSourceConfig( name, - valuesSourceConfig.fieldType(), + valuesSourceConfig.mappedField(), valuesSourceConfig.getValuesSource(), valuesSourceConfig.format(), order, @@ -170,7 +170,7 @@ static void register(ValuesSourceRegistry.Builder builder) { ValuesSource.Bytes.WithOrdinals vs = (ValuesSource.Bytes.WithOrdinals) compositeValuesSourceConfig.valuesSource(); return new GlobalOrdinalValuesSource( bigArrays, - compositeValuesSourceConfig.fieldType(), + compositeValuesSourceConfig.mappedField(), vs::globalOrdinalsValues, compositeValuesSourceConfig.format(), compositeValuesSourceConfig.missingBucket(), @@ -183,7 +183,7 @@ static void register(ValuesSourceRegistry.Builder builder) { return new BinaryValuesSource( bigArrays, addRequestCircuitBreakerBytes, - compositeValuesSourceConfig.fieldType(), + compositeValuesSourceConfig.mappedField(), vs::bytesValues, compositeValuesSourceConfig.format(), compositeValuesSourceConfig.missingBucket(), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 1343b8aa377f3..a3ea9839a247d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -164,7 +164,7 @@ private static FromDateRange adaptIntoRangeOrNull( "couldn't adapt [{}], no range for [{}/{}]", name, valuesSourceConfig.fieldContext().field(), - valuesSourceConfig.fieldType() + valuesSourceConfig.mappedField() ); return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 377fc15302fc2..5e3ce2c2442da 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -382,8 +382,8 @@ public static FromFilters adaptIntoFiltersOrNull( if (averageDocsPerRange < DOCS_PER_RANGE_TO_USE_FILTERS) { return null; } - if (valuesSourceConfig.fieldType() instanceof DateFieldType - && ((DateFieldType) valuesSourceConfig.fieldType()).resolution() == Resolution.NANOSECONDS) { + if (valuesSourceConfig.mappedField().type() instanceof DateFieldType + && ((DateFieldType) valuesSourceConfig.mappedField().type()).resolution() == Resolution.NANOSECONDS) { // We don't generate sensible Queries for nanoseconds. return null; } @@ -428,9 +428,9 @@ protected FromFilters adapt(CheckedFunction metadata) throws IOException { - if (fieldType == null) { + if (mappedField == null) { return createUnmapped(parent, metadata); } @@ -167,7 +168,13 @@ protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound car ? null : includeExclude.convertToStringFilter(DocValueFormat.RAW); - final SignificanceLookup lookup = new SignificanceLookup(context, samplingContext, fieldType, DocValueFormat.RAW, backgroundFilter); + final SignificanceLookup lookup = new SignificanceLookup( + context, + samplingContext, + mappedField, + DocValueFormat.RAW, + backgroundFilter + ); final CollectorSource collectorSource = createCollectorSource(); boolean success = false; try { @@ -226,7 +233,7 @@ private CollectorSource createCollectorSource() { return new ProfilingSignificantTextCollectorSource( context.lookup().source(), context.bigArrays(), - fieldType, + mappedField, analyzer, fieldNames, filterDuplicateText @@ -235,7 +242,7 @@ private CollectorSource createCollectorSource() { return new SignificantTextCollectorSource( context.lookup().source(), context.bigArrays(), - fieldType, + mappedField, analyzer, fieldNames, filterDuplicateText @@ -245,7 +252,7 @@ private CollectorSource createCollectorSource() { private static class SignificantTextCollectorSource implements MapStringTermsAggregator.CollectorSource { private final SourceLookup sourceLookup; private final BigArrays bigArrays; - private final MappedFieldType fieldType; + private final MappedField mappedField; private final Analyzer analyzer; private final String[] sourceFieldNames; private final BytesRefBuilder scratch = new BytesRefBuilder(); @@ -254,14 +261,14 @@ private static class SignificantTextCollectorSource implements MapStringTermsAgg SignificantTextCollectorSource( SourceLookup sourceLookup, BigArrays bigArrays, - MappedFieldType fieldType, + MappedField mappedField, Analyzer analyzer, String[] sourceFieldNames, boolean filterDuplicateText ) { this.sourceLookup = sourceLookup; this.bigArrays = bigArrays; - this.fieldType = fieldType; + this.mappedField = mappedField; this.analyzer = analyzer; this.sourceFieldNames = sourceFieldNames; dupSequenceSpotters = filterDuplicateText ? bigArrays.newObjectArray(1) : null; @@ -269,7 +276,7 @@ private static class SignificantTextCollectorSource implements MapStringTermsAgg @Override public String describe() { - return "analyze " + fieldType.name() + " from _source"; + return "analyze " + mappedField.name() + " from _source"; } @Override @@ -316,13 +323,13 @@ private void collectFromSource(int doc, long owningBucketOrd, DuplicateByteSeque return null; } if (obj instanceof BytesRef) { - return fieldType.valueForDisplay(obj).toString(); + return mappedField.valueForDisplay(obj).toString(); } return obj.toString(); }).iterator(); while (itr.hasNext()) { String text = itr.next(); - TokenStream ts = analyzer.tokenStream(fieldType.name(), text); + TokenStream ts = analyzer.tokenStream(mappedField.name(), text); processTokenStream( includeExclude, doc, @@ -419,12 +426,12 @@ private static class ProfilingSignificantTextCollectorSource extends Significant private ProfilingSignificantTextCollectorSource( SourceLookup sourceLookup, BigArrays bigArrays, - MappedFieldType fieldType, + MappedField mappedField, Analyzer analyzer, String[] sourceFieldNames, boolean filterDuplicateText ) { - super(sourceLookup, bigArrays, fieldType, analyzer, sourceFieldNames, filterDuplicateText); + super(sourceLookup, bigArrays, mappedField, analyzer, sourceFieldNames, filterDuplicateText); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java index 9170afc1af03a..ea70ada59abae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.DocCountFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.Rewriteable; @@ -94,12 +94,12 @@ public abstract class AggregationContext implements Releasable { * Lookup the context for a field. */ public final FieldContext buildFieldContext(String field) { - MappedFieldType ft = getFieldType(field); - if (ft == null) { + MappedField mappedField = getMappedField(field); + if (mappedField == null) { // The field is unmapped return null; } - return new FieldContext(field, buildFieldData(ft), ft); + return new FieldContext(field, buildFieldData(mappedField), mappedField); } /** @@ -129,19 +129,19 @@ public abstract Analyzer buildCustomAnalyzer( /** * Lookup the context for an already resolved field type. */ - public final FieldContext buildFieldContext(MappedFieldType ft) { - return new FieldContext(ft.name(), buildFieldData(ft), ft); + public final FieldContext buildFieldContext(MappedField field) { + return new FieldContext(field.name(), buildFieldData(field), field); } /** * Build field data. */ - protected abstract IndexFieldData buildFieldData(MappedFieldType ft); + protected abstract IndexFieldData buildFieldData(MappedField field); /** - * Lookup a {@link MappedFieldType} by path. + * Lookup a {@link MappedField} by path. */ - public abstract MappedFieldType getFieldType(String path); + public abstract MappedField getMappedField(String path); /** * Returns a set of field names that match a regex-like pattern @@ -434,13 +434,13 @@ public Analyzer buildCustomAnalyzer( } @Override - protected IndexFieldData buildFieldData(MappedFieldType ft) { - return context.getForField(ft); + protected IndexFieldData buildFieldData(MappedField mappedField) { + return context.getForField(mappedField); } @Override - public MappedFieldType getFieldType(String path) { - return context.getFieldType(path); + public MappedField getMappedField(String path) { + return context.getMappedField(path); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java index e41e0d29691e8..f59c1e8d20a54 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java @@ -63,7 +63,7 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa if ((fieldContext.indexFieldData() instanceof IndexNumericFieldData) == false) { throw new IllegalArgumentException( - "Expected numeric type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + "Expected numeric type on field [" + fieldContext.field() + "], but got [" + fieldContext.mappedField().typeName() + "]" ); } @@ -164,7 +164,11 @@ public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType sc public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script, AggregationContext context) { if ((fieldContext.indexFieldData() instanceof IndexGeoPointFieldData) == false) { throw new IllegalArgumentException( - "Expected geo_point type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + "Expected geo_point type on field [" + + fieldContext.field() + + "], but got [" + + fieldContext.mappedField().typeName() + + "]" ); } @@ -201,10 +205,12 @@ public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType sc @Override public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script, AggregationContext context) { - MappedFieldType fieldType = fieldContext.fieldType(); + MappedFieldType fieldType = fieldContext.mappedField().type(); if (fieldType instanceof RangeFieldMapper.RangeFieldType == false) { - throw new IllegalArgumentException("Asked for range ValuesSource, but field is of type " + fieldType.name()); + throw new IllegalArgumentException( + "Asked for range ValuesSource, but field is of type " + fieldContext.mappedField().name() + ); } RangeFieldMapper.RangeFieldType rangeFieldType = (RangeFieldMapper.RangeFieldType) fieldType; return new ValuesSource.Range(fieldContext.indexFieldData(), rangeFieldType.rangeType()); @@ -275,10 +281,10 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa private ValuesSource.Numeric fieldData(FieldContext fieldContext, AggregationContext context) { if ((fieldContext.indexFieldData() instanceof IndexNumericFieldData) == false) { throw new IllegalArgumentException( - "Expected numeric type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + "Expected numeric type on field [" + fieldContext.field() + "], but got [" + fieldContext.mappedField().typeName() + "]" ); } - if (fieldContext.fieldType() instanceof DateFieldType == false) { + if (fieldContext.mappedField().type() instanceof DateFieldType == false) { return new ValuesSource.Numeric.FieldData((IndexNumericFieldData) fieldContext.indexFieldData()); } @@ -292,7 +298,7 @@ private ValuesSource.Numeric fieldData(FieldContext fieldContext, AggregationCon */ @Override public Function roundingPreparer() throws IOException { - DateFieldType dft = (DateFieldType) fieldContext.fieldType(); + DateFieldType dft = (DateFieldType) fieldContext.mappedField().type(); /* * The range of dates, min first, then max. This is an array so we can * write to it inside the QueryVisitor below. @@ -300,7 +306,7 @@ public Function roundingPreparer() throws IOExcepti long[] range = new long[] { Long.MIN_VALUE, Long.MAX_VALUE }; // Check the search index for bounds - if (fieldContext.fieldType().isIndexed()) { + if (fieldContext.mappedField().isIndexed()) { log.trace("Attempting to apply index bound date rounding"); /* * We can't look up the min and max date without both the @@ -318,12 +324,12 @@ public Function roundingPreparer() throws IOExcepti boolean isMultiValue = false; for (LeafReaderContext leaf : context.searcher().getLeafContexts()) { - if (fieldContext.fieldType().isIndexed()) { + if (fieldContext.mappedField().isIndexed()) { PointValues pointValues = leaf.reader().getPointValues(fieldContext.field()); if (pointValues != null && pointValues.size() != pointValues.getDocCount()) { isMultiValue = true; } - } else if (fieldContext.fieldType().hasDocValues()) { + } else if (fieldContext.mappedField().hasDocValues()) { if (DocValues.unwrapSingleton(leaf.reader().getSortedNumericDocValues(fieldContext.field())) == null) { isMultiValue = true; } @@ -350,7 +356,7 @@ public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { @Override public boolean acceptField(String field) { - return field.equals(fieldContext.fieldType().name()); + return field.equals(fieldContext.mappedField().name()); }; @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java index 101e94b6717c4..7090913bf393f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/FieldContext.java @@ -8,7 +8,7 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; /** * Used by all field data based aggregators. This determine the context of the field data the aggregators are operating @@ -18,7 +18,7 @@ public class FieldContext { private final String field; private final IndexFieldData indexFieldData; - private final MappedFieldType fieldType; + private final MappedField mappedField; /** * Constructs a field data context for the given field and its index field data @@ -26,10 +26,10 @@ public class FieldContext { * @param field The name of the field * @param indexFieldData The index field data of the field */ - public FieldContext(String field, IndexFieldData indexFieldData, MappedFieldType fieldType) { + public FieldContext(String field, IndexFieldData indexFieldData, MappedField mappedField) { this.field = field; this.indexFieldData = indexFieldData; - this.fieldType = fieldType; + this.mappedField = mappedField; } public String field() { @@ -43,12 +43,12 @@ public IndexFieldData indexFieldData() { return indexFieldData; } - public MappedFieldType fieldType() { - return fieldType; + public MappedField mappedField() { + return mappedField; } public String getTypeName() { - return fieldType.typeName(); + return mappedField.typeName(); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index bd797375f9636..59506d0b5578c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -12,7 +12,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.script.AggregationScript; @@ -216,7 +216,7 @@ private static ValuesSourceType getLegacyMapping( return CoreValuesSourceType.NUMERIC; } else if (indexFieldData instanceof IndexGeoPointFieldData) { return CoreValuesSourceType.GEOPOINT; - } else if (fieldContext.fieldType() instanceof RangeFieldMapper.RangeFieldType) { + } else if (fieldContext.mappedField().type() instanceof RangeFieldMapper.RangeFieldType) { return CoreValuesSourceType.RANGE; } else { if (userValueTypeHint == null) { @@ -243,7 +243,7 @@ private static DocValueFormat resolveFormat( @Nullable FieldContext fieldContext ) { if (fieldContext != null) { - return fieldContext.fieldType().docValueFormat(format, tz); + return fieldContext.mappedField().docValueFormat(format, tz); } // Script or Unmapped case return valuesSourceType.getFormatter(format, tz); @@ -254,8 +254,8 @@ private static DocValueFormat resolveFormat( * are operating on, for example Parent and Child join aggregations, which use the join relation to find the field they are reading from * rather than a user specified field. */ - public static ValuesSourceConfig resolveFieldOnly(MappedFieldType fieldType, AggregationContext context) { - FieldContext fieldContext = context.buildFieldContext(fieldType); + public static ValuesSourceConfig resolveFieldOnly(MappedField mappedField, AggregationContext context) { + FieldContext fieldContext = context.buildFieldContext(mappedField); ValuesSourceType vstype = fieldContext.indexFieldData().getValuesSourceType(); return new ValuesSourceConfig(vstype, fieldContext, false, null, null, null, null, null, context); } @@ -347,18 +347,18 @@ public FieldContext fieldContext() { * and then cast up to a double. Used to correct precision errors. */ public DoubleUnaryOperator reduceToStoredPrecisionFunction() { - if (fieldContext() != null && fieldType() instanceof NumberFieldMapper.NumberFieldType) { - return ((NumberFieldMapper.NumberFieldType) fieldType())::reduceToStoredPrecision; + if (fieldContext() != null && mappedField().type() instanceof NumberFieldMapper.NumberFieldType) { + return ((NumberFieldMapper.NumberFieldType) mappedField().type())::reduceToStoredPrecision; } return (value) -> value; } /** - * Convenience method for looking up the mapped field type backing this values source, if it exists. + * Convenience method for looking up the mapped field backing this values source, if it exists. */ @Nullable - public MappedFieldType fieldType() { - return fieldContext == null ? null : fieldContext.fieldType(); + public MappedField mappedField() { + return fieldContext == null ? null : fieldContext.mappedField(); } public AggregationScript.LeafFactory script() { @@ -424,7 +424,7 @@ public boolean hasOrdinals() { */ @Nullable public Function getPointReaderOrNull() { - return alignesWithSearchIndex() ? fieldType().pointReaderIfPossible() : null; + return alignesWithSearchIndex() ? mappedField().pointReaderIfPossible() : null; } /** @@ -434,7 +434,7 @@ public Function getPointReaderOrNull() { * the ordering. */ public boolean alignesWithSearchIndex() { - return script() == null && missing() == null && fieldType() != null && fieldType().isIndexed(); + return script() == null && missing() == null && mappedField() != null && mappedField().isIndexed(); } /** @@ -445,9 +445,9 @@ public String getDescription() { return "Script yielding [" + (scriptValueType != null ? scriptValueType.getPreferredName() : "unknown type") + "]"; } - MappedFieldType fieldType = fieldType(); - if (fieldType != null) { - return "Field [" + fieldType.name() + "] of type [" + fieldType.typeName() + "]"; + MappedField mappedField = mappedField(); + if (mappedField != null) { + return "Field [" + mappedField.name() + "] of type [" + mappedField.typeName() + "]"; } return "unmapped field"; } diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java index 03882eba07c41..3f27adfc05a90 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType.CollapseType; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -186,24 +186,24 @@ public int hashCode() { } public CollapseContext build(SearchExecutionContext searchExecutionContext) { - MappedFieldType fieldType = searchExecutionContext.getFieldType(field); - if (fieldType == null) { + MappedField mappedField = searchExecutionContext.getMappedField(field); + if (mappedField == null) { throw new IllegalArgumentException("no mapping found for `" + field + "` in order to collapse on"); } - if (fieldType.collapseType() == CollapseType.NONE) { + if (mappedField.collapseType() == CollapseType.NONE) { throw new IllegalArgumentException( - "collapse is not supported for the field [" + fieldType.name() + "] of the type [" + fieldType.typeName() + "]" + "collapse is not supported for the field [" + mappedField.name() + "] of the type [" + mappedField.typeName() + "]" ); } - if (fieldType.hasDocValues() == false) { + if (mappedField.hasDocValues() == false) { throw new IllegalArgumentException("cannot collapse on field `" + field + "` without `doc_values`"); } - if (fieldType.isIndexed() == false && (innerHits != null && innerHits.isEmpty() == false)) { + if (mappedField.isIndexed() == false && (innerHits != null && innerHits.isEmpty() == false)) { throw new IllegalArgumentException( "cannot expand `inner_hits` for collapse field `" + field + "`, " + "only indexed field can retrieve `inner_hits`" ); } - return new CollapseContext(field, fieldType, innerHits); + return new CollapseContext(field, mappedField, innerHits); } } diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java index 62d7f7cc74cd4..8f92f88bfdf9e 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java @@ -9,7 +9,7 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Sort; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType.CollapseType; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.lucene.grouping.SinglePassGroupingCollector; @@ -21,12 +21,12 @@ */ public class CollapseContext { private final String fieldName; - private final MappedFieldType fieldType; + private final MappedField mappedField; private final List innerHits; - public CollapseContext(String fieldName, MappedFieldType fieldType, List innerHits) { + public CollapseContext(String fieldName, MappedField mappedField, List innerHits) { this.fieldName = fieldName; - this.fieldType = fieldType; + this.mappedField = mappedField; this.innerHits = innerHits; } @@ -37,9 +37,9 @@ public String getFieldName() { return fieldName; } - /** The field type used for collapsing **/ - public MappedFieldType getFieldType() { - return fieldType; + /** The mapped field used for collapsing **/ + public MappedField getMappedField() { + return mappedField; } /** The inner hit options to expand the collapsed results **/ @@ -48,10 +48,10 @@ public List getInnerHit() { } public SinglePassGroupingCollector createTopDocs(Sort sort, int topN, FieldDoc after) { - if (fieldType.collapseType() == CollapseType.KEYWORD) { - return SinglePassGroupingCollector.createKeyword(fieldName, fieldType, sort, topN, after); - } else if (fieldType.collapseType() == CollapseType.NUMERIC) { - return SinglePassGroupingCollector.createNumeric(fieldName, fieldType, sort, topN, after); + if (mappedField.collapseType() == CollapseType.KEYWORD) { + return SinglePassGroupingCollector.createKeyword(fieldName, mappedField, sort, topN, after); + } else if (mappedField.collapseType() == CollapseType.NUMERIC) { + return SinglePassGroupingCollector.createNumeric(fieldName, mappedField, sort, topN, after); } else { throw new IllegalStateException("collapse is not supported on this field type"); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 79492167596d3..cd899fe7247ba 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -22,7 +22,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.SearchExecutionContext; @@ -266,8 +266,8 @@ private static FieldsVisitor createStoredFieldsVisitor(SearchContext context, Ma SearchExecutionContext searchExecutionContext = context.getSearchExecutionContext(); Collection fieldNames = searchExecutionContext.getMatchingFieldNames(fieldNameOrPattern); for (String fieldName : fieldNames) { - MappedFieldType fieldType = searchExecutionContext.getFieldType(fieldName); - String storedField = fieldType.name(); + MappedField mappedField = searchExecutionContext.getMappedField(fieldName); + String storedField = mappedField.name(); Set requestedFields = storedToRequestedFields.computeIfAbsent(storedField, key -> new HashSet<>()); requestedFields.add(fieldName); } @@ -344,7 +344,7 @@ private static HitContext prepareNonNestedHitContext( return new HitContext(hit, subReaderContext, subDocId); } else { SearchHit hit; - loadStoredFields(context.getSearchExecutionContext()::getFieldType, profiler, fieldReader, fieldsVisitor, subDocId); + loadStoredFields(context.getSearchExecutionContext()::getMappedField, profiler, fieldReader, fieldsVisitor, subDocId); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); Map metaFields = new HashMap<>(); @@ -418,7 +418,7 @@ private static HitContext prepareNestedHitContext( } } else { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(searchExecutionContext::getFieldType, profiler, storedFieldReader, rootFieldsVisitor, nestedInfo.rootDoc()); + loadStoredFields(searchExecutionContext::getMappedField, profiler, storedFieldReader, rootFieldsVisitor, nestedInfo.rootDoc()); rootId = rootFieldsVisitor.id(); if (needSource) { @@ -436,7 +436,7 @@ private static HitContext prepareNestedHitContext( Map metaFields = emptyMap(); if (context.hasStoredFields() && context.storedFieldsContext().fieldNames().isEmpty() == false) { FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); - loadStoredFields(searchExecutionContext::getFieldType, profiler, storedFieldReader, nestedFieldsVisitor, nestedInfo.doc()); + loadStoredFields(searchExecutionContext::getMappedField, profiler, storedFieldReader, nestedFieldsVisitor, nestedInfo.doc()); if (nestedFieldsVisitor.fields().isEmpty() == false) { docFields = new HashMap<>(); metaFields = new HashMap<>(); @@ -480,7 +480,7 @@ private static HitContext prepareNestedHitContext( } private static void loadStoredFields( - Function fieldTypeLookup, + Function mappedFieldLookup, Profiler profileListener, CheckedBiConsumer fieldReader, FieldsVisitor fieldVisitor, @@ -490,7 +490,7 @@ private static void loadStoredFields( profileListener.startLoadingStoredFields(); fieldVisitor.reset(); fieldReader.accept(docId, fieldVisitor); - fieldVisitor.postProcess(fieldTypeLookup); + fieldVisitor.postProcess(mappedFieldLookup); } finally { profileListener.stopLoadingStoredFields(); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index 8515bff1049d9..03490e3e36ae9 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -10,7 +10,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.index.mapper.DocValueFetcher; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -40,13 +40,13 @@ public FetchSubPhaseProcessor getProcessor(FetchContext context) { */ List fields = new ArrayList<>(); for (FieldAndFormat fieldAndFormat : context.docValuesContext().fields()) { - MappedFieldType ft = context.getSearchExecutionContext().getFieldType(fieldAndFormat.field); - if (ft == null) { + MappedField mappedField = context.getSearchExecutionContext().getMappedField(fieldAndFormat.field); + if (mappedField == null) { continue; } ValueFetcher fetcher = new DocValueFetcher( - ft.docValueFormat(fieldAndFormat.format, null), - context.searchLookup().getForField(ft) + mappedField.docValueFormat(fieldAndFormat.format, null), + context.searchLookup().getForField(mappedField) ); fields.add(new DocValueField(fieldAndFormat.field, fetcher)); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java index 4a8f341837a97..023055a4e0624 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NestedValueFetcher; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.query.SearchExecutionContext; @@ -70,7 +70,7 @@ private static FieldFetcher create( } for (String field : context.getMatchingFieldNames(fieldPattern)) { - MappedFieldType ft = context.getFieldType(field); + MappedField mappedField = context.getMappedField(field); // we want to skip metadata fields if we have a wildcard pattern if (context.isMetadataField(field) && isWildcardPattern) { continue; @@ -95,7 +95,7 @@ private static FieldFetcher create( if (nestedParentPath == null) { ValueFetcher valueFetcher; try { - valueFetcher = ft.valueFetcher(context, fieldAndFormat.format); + valueFetcher = mappedField.valueFetcher(context, fieldAndFormat.format); } catch (IllegalArgumentException e) { StringBuilder error = new StringBuilder("error fetching [").append(field).append(']'); if (isWildcardPattern) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index 9893d07af7979..98b389507396c 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.lucene.search.vectorhighlight.CustomFieldQuery; @@ -67,11 +68,11 @@ public FastVectorHighlighter(Settings settings) { public HighlightField highlight(FieldHighlightContext fieldContext) throws IOException { SearchHighlightContext.Field field = fieldContext.field; FetchSubPhase.HitContext hitContext = fieldContext.hitContext; - MappedFieldType fieldType = fieldContext.fieldType; + MappedField mappedField = fieldContext.mappedField; boolean forceSource = fieldContext.forceSource; boolean fixBrokenAnalysis = fieldContext.context.containsBrokenAnalysis(fieldContext.fieldName); - if (canHighlight(fieldType) == false) { + if (canHighlight(mappedField.type()) == false) { throw new IllegalArgumentException( "the field [" + fieldContext.fieldName @@ -85,7 +86,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc fieldContext.cache.put(CACHE_KEY, new HighlighterEntry()); } HighlighterEntry cache = (HighlighterEntry) fieldContext.cache.get(CACHE_KEY); - FieldHighlightEntry entry = cache.fields.get(fieldType); + FieldHighlightEntry entry = cache.fields.get(mappedField); if (entry == null) { FragListBuilder fragListBuilder; if (field.fieldOptions().numberOfFragments() == 0) { @@ -98,7 +99,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc Function fragmentsBuilderSupplier = fragmentsBuilderSupplier( field, - fieldType, + mappedField, fieldContext.context, forceSource, fixBrokenAnalysis @@ -137,7 +138,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter(); } CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter()); - cache.fields.put(fieldType, entry); + cache.fields.put(mappedField, entry); } final FieldQuery fieldQuery; if (field.fieldOptions().requireFieldMatch()) { @@ -162,7 +163,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc fieldQuery, hitContext.reader(), hitContext.docId(), - fieldType.name(), + mappedField.name(), field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, @@ -177,7 +178,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc fieldQuery, hitContext.reader(), hitContext.docId(), - fieldType.name(), + mappedField.name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, @@ -201,7 +202,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc fragments = fragmentsBuilder.createFragments( hitContext.reader(), hitContext.docId(), - fieldType.name(), + mappedField.name(), fieldFragList, 1, field.fieldOptions().preTags(), @@ -218,7 +219,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc private Function fragmentsBuilderSupplier( SearchHighlightContext.Field field, - MappedFieldType fieldType, + MappedField mappedField, FetchContext fetchContext, boolean forceSource, boolean fixBrokenAnalysis @@ -226,12 +227,12 @@ private Function fragmentsBuilderSupplier( BoundaryScanner boundaryScanner = getBoundaryScanner(field); FieldOptions options = field.fieldOptions(); Function supplier; - if (forceSource == false && fieldType.isStored()) { + if (forceSource == false && mappedField.isStored()) { if (options.numberOfFragments() != 0 && options.scoreOrdered()) { supplier = ignored -> new ScoreOrderFragmentsBuilder(options.preTags(), options.postTags(), boundaryScanner); } else { supplier = ignored -> new SimpleFragmentsBuilder( - fieldType, + mappedField, fixBrokenAnalysis, options.preTags(), options.postTags(), @@ -241,7 +242,7 @@ private Function fragmentsBuilderSupplier( } else { if (options.numberOfFragments() != 0 && options.scoreOrdered()) { supplier = lookup -> new SourceScoreOrderFragmentsBuilder( - fieldType, + mappedField, fetchContext, fixBrokenAnalysis, lookup, @@ -251,7 +252,7 @@ private Function fragmentsBuilderSupplier( ); } else { supplier = lookup -> new SourceSimpleFragmentsBuilder( - fieldType, + mappedField, fetchContext, fixBrokenAnalysis, lookup, @@ -315,6 +316,6 @@ private static class FieldHighlightEntry { private static class HighlighterEntry { public org.apache.lucene.search.vectorhighlight.FastVectorHighlighter fvh; - public Map fields = new HashMap<>(); + public Map fields = new HashMap<>(); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java index 4341c904fa92b..68e86e86ec657 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java @@ -8,7 +8,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -18,7 +18,7 @@ public class FieldHighlightContext { public final String fieldName; public final SearchHighlightContext.Field field; - public final MappedFieldType fieldType; + public final MappedField mappedField; public final FetchContext context; public final FetchSubPhase.HitContext hitContext; public final Query query; @@ -28,7 +28,7 @@ public class FieldHighlightContext { public FieldHighlightContext( String fieldName, SearchHighlightContext.Field field, - MappedFieldType fieldType, + MappedField mappedField, FetchContext context, FetchSubPhase.HitContext hitContext, Query query, @@ -37,7 +37,7 @@ public FieldHighlightContext( ) { this.fieldName = fieldName; this.field = field; - this.fieldType = fieldType; + this.mappedField = mappedField; this.context = context; this.hitContext = hitContext; this.query = query; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 4301e8734fcf9..a0da919cf29f1 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -11,7 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -107,7 +107,7 @@ private Map> contextBuilders boolean fieldNameContainsWildcards = field.field().contains("*"); for (String fieldName : fieldNamesToHighlight) { - MappedFieldType fieldType = context.getSearchExecutionContext().getFieldType(fieldName); + MappedField mappedField = context.getSearchExecutionContext().getMappedField(fieldName); // We should prevent highlighting if a field is anything but a text, match_only_text, // or keyword field. @@ -119,12 +119,12 @@ private Map> contextBuilders // If the field was explicitly given we assume that whoever issued the query knew // what they were doing and try to highlight anyway. if (fieldNameContainsWildcards) { - if (fieldType.typeName().equals(TextFieldMapper.CONTENT_TYPE) == false - && fieldType.typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false - && fieldType.typeName().equals("match_only_text") == false) { + if (mappedField.typeName().equals(TextFieldMapper.CONTENT_TYPE) == false + && mappedField.typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false + && mappedField.typeName().equals("match_only_text") == false) { continue; } - if (highlighter.canHighlight(fieldType) == false) { + if (highlighter.canHighlight(mappedField.type()) == false) { continue; } } @@ -135,9 +135,9 @@ private Map> contextBuilders builders.put( fieldName, hc -> new FieldHighlightContext( - fieldType.name(), + mappedField.name(), field, - fieldType, + mappedField, context, hc, highlightQuery == null ? query : highlightQuery, diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java index a8b124a19d7ad..2a0d306edeb9b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -11,7 +11,7 @@ import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -38,18 +38,18 @@ private HighlightUtils() { * Load field values for highlighting. */ public static List loadFieldValues( - MappedFieldType fieldType, + MappedField mappedField, SearchExecutionContext searchContext, FetchSubPhase.HitContext hitContext, boolean forceSource ) throws IOException { - if (forceSource == false && fieldType.isStored()) { - CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false); + if (forceSource == false && mappedField.isStored()) { + CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mappedField.name()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); - List textsToHighlight = fieldVisitor.fields().get(fieldType.name()); + List textsToHighlight = fieldVisitor.fields().get(mappedField.name()); return Objects.requireNonNullElse(textsToHighlight, Collections.emptyList()); } - ValueFetcher fetcher = fieldType.valueFetcher(searchContext, null); + ValueFetcher fetcher = mappedField.valueFetcher(searchContext, null); fetcher.setNextReader(hitContext.readerContext()); return fetcher.fetchValues(hitContext.sourceLookup(), new ArrayList()); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 984668ece9ac3..e4330260e99c7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -58,7 +59,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc SearchHighlightContext.Field field = fieldContext.field; FetchContext context = fieldContext.context; FetchSubPhase.HitContext hitContext = fieldContext.hitContext; - MappedFieldType fieldType = fieldContext.fieldType; + MappedField mappedField = fieldContext.mappedField; Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; @@ -66,15 +67,15 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc fieldContext.cache.put(CACHE_KEY, new HashMap<>()); } @SuppressWarnings("unchecked") - Map cache = (Map< - MappedFieldType, + Map cache = (Map< + MappedField, org.apache.lucene.search.highlight.Highlighter>) fieldContext.cache.get(CACHE_KEY); - org.apache.lucene.search.highlight.Highlighter entry = cache.get(fieldType); + org.apache.lucene.search.highlight.Highlighter entry = cache.get(mappedField); if (entry == null) { QueryScorer queryScorer = new CustomQueryScorer( fieldContext.query, - field.fieldOptions().requireFieldMatch() ? fieldType.name() : null + field.fieldOptions().requireFieldMatch() ? mappedField.name() : null ); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; @@ -98,7 +99,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc // always highlight across all data entry.setMaxDocCharsToAnalyze(Integer.MAX_VALUE); - cache.put(fieldType, entry); + cache.put(mappedField, entry); } // a HACK to make highlighter do highlighting, even though its using the single frag list builder @@ -113,7 +114,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc ); textsToHighlight = HighlightUtils.loadFieldValues( - fieldType, + mappedField, context.getSearchExecutionContext(), hitContext, fieldContext.forceSource @@ -121,7 +122,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc int fragNumBase = 0; for (Object textToHighlight : textsToHighlight) { - String text = convertFieldValue(fieldType, textToHighlight); + String text = convertFieldValue(mappedField.type(), textToHighlight); int textLength = text.length(); if ((queryMaxAnalyzedOffset == null || queryMaxAnalyzedOffset > maxAnalyzedOffset) && (textLength > maxAnalyzedOffset)) { throw new IllegalArgumentException( @@ -147,7 +148,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc ); } - try (TokenStream tokenStream = analyzer.tokenStream(fieldType.name(), text)) { + try (TokenStream tokenStream = analyzer.tokenStream(mappedField.name(), text)) { if (tokenStream.hasAttribute(CharTermAttribute.class) == false || tokenStream.hasAttribute(OffsetAttribute.class) == false) { // can't perform highlighting if the stream has no terms (binary token stream) or no offsets @@ -203,7 +204,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc if (noMatchSize > 0 && textsToHighlight.size() > 0) { // Pull an excerpt from the beginning of the string but make sure to split the string on a term boundary. String fieldContents = textsToHighlight.get(0).toString(); - int end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents); + int end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mappedField.name(), fieldContents); if (end > 0) { return new HighlightField(fieldContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) }); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java index e365417834710..198e2f376fa63 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java @@ -11,7 +11,7 @@ import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; /** * Direct Subclass of Lucene's org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder @@ -19,18 +19,18 @@ */ public class SimpleFragmentsBuilder extends org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder { - protected final MappedFieldType fieldType; + protected final MappedField mappedField; private final boolean fixBrokenAnalysis; public SimpleFragmentsBuilder( - MappedFieldType fieldType, + MappedField mappedField, boolean fixBrokenAnalysis, String[] preTags, String[] postTags, BoundaryScanner boundaryScanner ) { super(preTags, postTags, boundaryScanner); - this.fieldType = fieldType; + this.mappedField = mappedField; this.fixBrokenAnalysis = fixBrokenAnalysis; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java index 0c17068718ea7..844f7267a1968 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo; import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.lookup.SourceLookup; @@ -26,13 +26,13 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder { private final FetchContext fetchContext; - private final MappedFieldType fieldType; + private final MappedField mappedField; private final SourceLookup sourceLookup; private final ValueFetcher valueFetcher; private final boolean fixBrokenAnalysis; public SourceScoreOrderFragmentsBuilder( - MappedFieldType fieldType, + MappedField mappedField, FetchContext fetchContext, boolean fixBrokenAnalysis, SourceLookup sourceLookup, @@ -42,9 +42,9 @@ public SourceScoreOrderFragmentsBuilder( ) { super(preTags, postTags, boundaryScanner); this.fetchContext = fetchContext; - this.fieldType = fieldType; + this.mappedField = mappedField; this.sourceLookup = sourceLookup; - this.valueFetcher = fieldType.valueFetcher(fetchContext.getSearchExecutionContext(), null); + this.valueFetcher = mappedField.valueFetcher(fetchContext.getSearchExecutionContext(), null); this.fixBrokenAnalysis = fixBrokenAnalysis; } @@ -61,7 +61,7 @@ protected Field[] getFields(IndexReader reader, int docId, String fieldName) thr } Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mappedField.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java index 5c1c53ae73056..9cfb36603ef31 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java @@ -11,7 +11,7 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.lookup.SourceLookup; @@ -27,7 +27,7 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { private final ValueFetcher valueFetcher; public SourceSimpleFragmentsBuilder( - MappedFieldType fieldType, + MappedField mappedField, FetchContext fetchContext, boolean fixBrokenAnalysis, SourceLookup sourceLookup, @@ -35,10 +35,10 @@ public SourceSimpleFragmentsBuilder( String[] postTags, BoundaryScanner boundaryScanner ) { - super(fieldType, fixBrokenAnalysis, preTags, postTags, boundaryScanner); + super(mappedField, fixBrokenAnalysis, preTags, postTags, boundaryScanner); this.fetchContext = fetchContext; this.sourceLookup = sourceLookup; - this.valueFetcher = fieldType.valueFetcher(fetchContext.getSearchExecutionContext(), null); + this.valueFetcher = mappedField.valueFetcher(fetchContext.getSearchExecutionContext(), null); } public static final Field[] EMPTY_FIELDS = new Field[0]; @@ -59,7 +59,7 @@ protected Field[] getFields(IndexReader reader, int docId, String fieldName) thr } Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mappedField.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index d9833fd8703b5..0b25405f09e32 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.query.SearchExecutionContext; @@ -59,7 +60,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc cache.put(fieldContext.fieldName, buildHighlighter(fieldContext)); } CustomUnifiedHighlighter highlighter = cache.get(fieldContext.fieldName); - MappedFieldType fieldType = fieldContext.fieldType; + MappedField mappedField = fieldContext.mappedField; SearchHighlightContext.Field field = fieldContext.field; FetchSubPhase.HitContext hitContext = fieldContext.hitContext; @@ -67,7 +68,7 @@ public HighlightField highlight(FieldHighlightContext fieldContext) throws IOExc List fieldValues = loadFieldValues( highlighter, fieldContext.context.getSearchExecutionContext(), - fieldType, + mappedField, hitContext, fieldContext.forceSource ); @@ -117,12 +118,12 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) th ); PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder); IndexSearcher searcher = fieldContext.context.searcher(); - OffsetSource offsetSource = getOffsetSource(fieldContext.context, fieldContext.fieldType); + OffsetSource offsetSource = getOffsetSource(fieldContext.context, fieldContext.mappedField.type()); BreakIterator breakIterator; int higlighterNumberOfFragments; if (numberOfFragments == 0 // non-tokenized fields should not use any break iterator (ignore boundaryScannerType) - || fieldContext.fieldType.getTextSearchInfo().isTokenized() == false) { + || fieldContext.mappedField.getTextSearchInfo().isTokenized() == false) { /* * We use a control char to separate values, which is the * only char that the custom break iterator breaks the text @@ -168,13 +169,13 @@ protected Analyzer wrapAnalyzer(Analyzer analyzer, Integer maxAnalyzedOffset) { protected List loadFieldValues( CustomUnifiedHighlighter highlighter, SearchExecutionContext searchContext, - MappedFieldType fieldType, + MappedField mappedField, FetchSubPhase.HitContext hitContext, boolean forceSource ) throws IOException { - return HighlightUtils.loadFieldValues(fieldType, searchContext, hitContext, forceSource) + return HighlightUtils.loadFieldValues(mappedField, searchContext, hitContext, forceSource) .stream() - .map((s) -> convertFieldValue(fieldType, s)) + .map((s) -> convertFieldValue(mappedField.type(), s)) .toList(); } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java index 99a6f929ec6eb..c2085fb4d3218 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java @@ -7,7 +7,7 @@ */ package org.elasticsearch.search.lookup; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import java.util.ArrayList; import java.util.List; @@ -15,9 +15,9 @@ public class FieldLookup { - // we can cached fieldType completely per name, since its on an index/shard level (the lookup, and it does not change within the scope + // we can cache mapped field completely per name, since its on an index/shard level (the lookup, and it does not change within the scope // of a search request) - private final MappedFieldType fieldType; + private final MappedField mappedField; private Map> fields; @@ -29,12 +29,12 @@ public class FieldLookup { private boolean valuesLoaded = false; - FieldLookup(MappedFieldType fieldType) { - this.fieldType = fieldType; + FieldLookup(MappedField mappedField) { + this.mappedField = mappedField; } - MappedFieldType fieldType() { - return fieldType; + MappedField mappedField() { + return mappedField; } public Map> fields() { @@ -72,7 +72,7 @@ public Object getValue() { } valueLoaded = true; value = null; - List values = fields.get(fieldType.name()); + List values = fields.get(mappedField.name()); return values != null ? value = values.get(0) : null; } @@ -82,6 +82,6 @@ public List getValues() { } valuesLoaded = true; values.clear(); - return values = fields().get(fieldType.name()); + return values = fields().get(mappedField.name()); } } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java index 8a71978db46ce..00083a5eabf53 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.script.field.DocValuesScriptFieldFactory; import org.elasticsearch.script.field.Field; @@ -26,8 +26,8 @@ public class LeafDocLookup implements Map> { - private final Function fieldTypeLookup; - private final Function> fieldDataLookup; + private final Function mappedFieldLookup; + private final Function> fieldDataLookup; private final LeafReaderContext reader; private int docId = -1; @@ -35,11 +35,11 @@ public class LeafDocLookup implements Map> { private final Map localCacheScriptFieldData = Maps.newMapWithExpectedSize(4); LeafDocLookup( - Function fieldTypeLookup, - Function> fieldDataLookup, + Function mappedFieldLookup, + Function> fieldDataLookup, LeafReaderContext reader ) { - this.fieldTypeLookup = fieldTypeLookup; + this.mappedFieldLookup = mappedFieldLookup; this.fieldDataLookup = fieldDataLookup; this.reader = reader; } @@ -52,9 +52,9 @@ public DocValuesScriptFieldFactory getScriptFieldFactory(String fieldName) { DocValuesScriptFieldFactory factory = localCacheScriptFieldData.get(fieldName); if (factory == null) { - final MappedFieldType fieldType = fieldTypeLookup.apply(fieldName); + final MappedField mappedField = mappedFieldLookup.apply(fieldName); - if (fieldType == null) { + if (mappedField == null) { throw new IllegalArgumentException("No field found for [" + fieldName + "] in mapping"); } @@ -63,7 +63,7 @@ public DocValuesScriptFieldFactory getScriptFieldFactory(String fieldName) { factory = AccessController.doPrivileged(new PrivilegedAction() { @Override public DocValuesScriptFieldFactory run() { - return fieldDataLookup.apply(fieldType).load(reader).getScriptFieldFactory(fieldName); + return fieldDataLookup.apply(mappedField).load(reader).getScriptFieldFactory(fieldName); } }); @@ -93,7 +93,7 @@ public ScriptDocValues get(Object key) { public boolean containsKey(Object key) { String fieldName = key.toString(); DocValuesScriptFieldFactory docValuesFieldFactory = localCacheScriptFieldData.get(fieldName); - return docValuesFieldFactory != null || fieldTypeLookup.apply(fieldName) != null; + return docValuesFieldFactory != null || mappedFieldLookup.apply(fieldName) != null; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookup.java index 3371f2ea1735f..5f82b495be4a1 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookup.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,7 @@ @SuppressWarnings({ "unchecked", "rawtypes" }) public class LeafStoredFieldsLookup implements Map { - private final Function fieldTypeLookup; + private final Function mappedFieldLookup; private final CheckedBiConsumer reader; private int docId = -1; @@ -35,10 +35,10 @@ public class LeafStoredFieldsLookup implements Map { private final Map cachedFieldData = new HashMap<>(); LeafStoredFieldsLookup( - Function fieldTypeLookup, + Function mappedFieldLookup, CheckedBiConsumer reader ) { - this.fieldTypeLookup = fieldTypeLookup; + this.mappedFieldLookup = mappedFieldLookup; this.reader = reader; } @@ -118,22 +118,22 @@ public boolean containsValue(Object value) { private FieldLookup loadFieldData(String name) { FieldLookup data = cachedFieldData.get(name); if (data == null) { - MappedFieldType fieldType = fieldTypeLookup.apply(name); - if (fieldType == null) { + MappedField mappedField = mappedFieldLookup.apply(name); + if (mappedField == null) { throw new IllegalArgumentException("No field found for [" + name + "] in mapping"); } - data = new FieldLookup(fieldType); + data = new FieldLookup(mappedField); cachedFieldData.put(name, data); } if (data.fields() == null) { List values = new ArrayList<>(2); - SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values); + SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.mappedField(), values); try { reader.accept(docId, visitor); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } - data.fields(singletonMap(data.fieldType().name(), values)); + data.fields(singletonMap(data.mappedField().name(), values)); } return data; } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java index f2e552f2cf3f6..5d2ce38fc675a 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SearchLookup.java @@ -10,7 +10,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import java.util.Collections; import java.util.LinkedHashSet; @@ -40,18 +40,18 @@ public class SearchLookup { */ private final Set fieldChain; private final SourceLookup sourceLookup; - private final Function fieldTypeLookup; - private final BiFunction, IndexFieldData> fieldDataLookup; + private final Function mappedFieldLookup; + private final BiFunction, IndexFieldData> fieldDataLookup; /** * Create the top level field lookup for a search request. Provides a way to look up fields from doc_values, * stored fields, or _source. */ public SearchLookup( - Function fieldTypeLookup, - BiFunction, IndexFieldData> fieldDataLookup + Function mappedFieldLookup, + BiFunction, IndexFieldData> fieldDataLookup ) { - this.fieldTypeLookup = fieldTypeLookup; + this.mappedFieldLookup = mappedFieldLookup; this.fieldChain = Collections.emptySet(); this.sourceLookup = new SourceLookup(); this.fieldDataLookup = fieldDataLookup; @@ -67,7 +67,7 @@ public SearchLookup( private SearchLookup(SearchLookup searchLookup, Set fieldChain) { this.fieldChain = Collections.unmodifiableSet(fieldChain); this.sourceLookup = searchLookup.sourceLookup; - this.fieldTypeLookup = searchLookup.fieldTypeLookup; + this.mappedFieldLookup = searchLookup.mappedFieldLookup; this.fieldDataLookup = searchLookup.fieldDataLookup; } @@ -95,18 +95,18 @@ public final SearchLookup forkAndTrackFieldReferences(String field) { public LeafSearchLookup getLeafSearchLookup(LeafReaderContext context) { return new LeafSearchLookup( context, - new LeafDocLookup(fieldTypeLookup, this::getForField, context), + new LeafDocLookup(mappedFieldLookup, this::getForField, context), sourceLookup, - new LeafStoredFieldsLookup(fieldTypeLookup, (doc, visitor) -> context.reader().document(doc, visitor)) + new LeafStoredFieldsLookup(mappedFieldLookup, (doc, visitor) -> context.reader().document(doc, visitor)) ); } - public MappedFieldType fieldType(String fieldName) { - return fieldTypeLookup.apply(fieldName); + public MappedField mappedField(String fieldName) { + return mappedFieldLookup.apply(fieldName); } - public IndexFieldData getForField(MappedFieldType fieldType) { - return fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())); + public IndexFieldData getForField(MappedField mappedField) { + return fieldDataLookup.apply(mappedField, () -> forkAndTrackFieldReferences(mappedField.name())); } public SourceLookup source() { diff --git a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java index 30a9e5b6d5ad6..57fe0fe9a3b48 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.xcontent.ObjectParser; @@ -253,14 +253,14 @@ private Query createSliceQuery(int id, int max, SearchExecutionContext context, } return new TermsSliceQuery(IdFieldMapper.NAME, id, max); } else { - MappedFieldType type = context.getFieldType(field); - if (type == null) { + MappedField mappedField = context.getMappedField(field); + if (mappedField == null) { throw new IllegalArgumentException("field " + field + " not found"); } - if (type.hasDocValues() == false) { + if (mappedField.hasDocValues() == false) { throw new IllegalArgumentException("cannot load numeric doc values on " + field); } else { - IndexFieldData ifm = context.getForField(type); + IndexFieldData ifm = context.getForField(mappedField); if (ifm instanceof IndexNumericFieldData == false) { throw new IllegalArgumentException("cannot load numeric doc values on " + field); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 46c88b8915782..d87bd514999a6 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; @@ -356,13 +357,13 @@ public SortFieldAndFormat build(SearchExecutionContext context) throws IOExcepti return new SortFieldAndFormat(new ShardDocSortField(context.getShardRequestIndex(), reverse), DocValueFormat.RAW); } - MappedFieldType fieldType = context.getFieldType(fieldName); - Nested nested = nested(context, fieldType); - if (fieldType == null) { - fieldType = resolveUnmappedType(context); + MappedField mappedField = context.getMappedField(fieldName); + Nested nested = nested(context, mappedField); + if (mappedField == null) { + mappedField = resolveUnmappedField(context); } - IndexFieldData fieldData = context.getForField(fieldType); + IndexFieldData fieldData = context.getForField(mappedField); if (fieldData instanceof IndexNumericFieldData == false && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); @@ -373,7 +374,7 @@ public SortFieldAndFormat build(SearchExecutionContext context) throws IOExcepti if (fieldData instanceof IndexNumericFieldData == false) { throw new QueryShardException( context, - "[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName() + "[numeric_type] option cannot be set on a non-numeric field, got " + mappedField.typeName() ); } IndexNumericFieldData numericFieldData = (IndexNumericFieldData) fieldData; @@ -386,7 +387,7 @@ public SortFieldAndFormat build(SearchExecutionContext context) throws IOExcepti isNanosecond = ((IndexNumericFieldData) fieldData).getNumericType() == NumericType.DATE_NANOSECONDS; } } - DocValueFormat formatter = fieldType.docValueFormat(format, null); + DocValueFormat formatter = mappedField.docValueFormat(format, null); if (format != null) { formatter = DocValueFormat.enableFormatSortValues(formatter); } @@ -413,12 +414,12 @@ public boolean isBottomSortShardDisjoint(SearchExecutionContext context, SearchS if (canRewriteToMatchNone() == false) { return false; } - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { // unmapped return false; } - if (fieldType.isIndexed() == false) { + if (mappedField.isIndexed() == false) { return false; } DocValueFormat docValueFormat = bottomSortValues.getSortValueFormats()[0]; @@ -432,7 +433,7 @@ public boolean isBottomSortShardDisjoint(SearchExecutionContext context, SearchS Object minValue = order() == SortOrder.DESC ? bottomSortValue : null; Object maxValue = order() == SortOrder.DESC ? null : bottomSortValue; try { - MappedFieldType.Relation relation = fieldType.isFieldWithinQuery( + MappedFieldType.Relation relation = mappedField.isFieldWithinQuery( context.getIndexReader(), minValue, maxValue, @@ -456,13 +457,13 @@ public BucketedSort buildBucketedSort(SearchExecutionContext context, BigArrays throw new IllegalArgumentException("sorting by _doc is not supported"); } - MappedFieldType fieldType = context.getFieldType(fieldName); - Nested nested = nested(context, fieldType); - if (fieldType == null) { - fieldType = resolveUnmappedType(context); + MappedField mappedField = context.getMappedField(fieldName); + Nested nested = nested(context, mappedField); + if (mappedField == null) { + mappedField = resolveUnmappedField(context); } - IndexFieldData fieldData = context.getForField(fieldType); + IndexFieldData fieldData = context.getForField(mappedField); if (fieldData instanceof IndexNumericFieldData == false && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); @@ -471,7 +472,7 @@ public BucketedSort buildBucketedSort(SearchExecutionContext context, BigArrays if (fieldData instanceof IndexNumericFieldData == false) { throw new QueryShardException( context, - "[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName() + "[numeric_type] option cannot be set on a non-numeric field, got " + mappedField.typeName() ); } IndexNumericFieldData numericFieldData = (IndexNumericFieldData) fieldData; @@ -483,7 +484,7 @@ public BucketedSort buildBucketedSort(SearchExecutionContext context, BigArrays localSortMode(), nested, order, - fieldType.docValueFormat(null, null), + mappedField.docValueFormat(null, null), bucketSize, extra ); @@ -495,7 +496,7 @@ public BucketedSort buildBucketedSort(SearchExecutionContext context, BigArrays localSortMode(), nested, order, - fieldType.docValueFormat(null, null), + mappedField.docValueFormat(null, null), bucketSize, extra ); @@ -504,7 +505,7 @@ public BucketedSort buildBucketedSort(SearchExecutionContext context, BigArrays "error building sort for field [" + fieldName + "] of type [" - + fieldType.typeName() + + mappedField.typeName() + "] in index [" + context.index().getName() + "]: " @@ -514,11 +515,11 @@ public BucketedSort buildBucketedSort(SearchExecutionContext context, BigArrays } } - private MappedFieldType resolveUnmappedType(SearchExecutionContext context) { + private MappedField resolveUnmappedField(SearchExecutionContext context) { if (unmappedType == null) { throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on"); } - return context.buildAnonymousFieldType(unmappedType); + return context.buildAnonymousField(unmappedType); } private MultiValueMode localSortMode() { @@ -529,8 +530,8 @@ private MultiValueMode localSortMode() { return order == SortOrder.DESC ? MultiValueMode.MAX : MultiValueMode.MIN; } - private Nested nested(SearchExecutionContext context, MappedFieldType fieldType) throws IOException { - if (fieldType == null) { + private Nested nested(SearchExecutionContext context, MappedField mappedField) throws IOException { + if (mappedField == null) { return null; } if (nestedSort == null) { @@ -572,8 +573,8 @@ public static MinAndMax getMinMaxOrNull(SearchExecutionContext context, Field return null; } IndexReader reader = context.getIndexReader(); - MappedFieldType fieldType = context.getFieldType(sortField.getField()); - if (reader == null || (fieldType == null || fieldType.isIndexed() == false)) { + MappedField mappedField = context.getMappedField(sortField.getField()); + if (reader == null || (mappedField == null || mappedField.isIndexed() == false)) { return null; } switch (IndexSortConfig.getSortFieldType(sortField)) { @@ -581,11 +582,11 @@ public static MinAndMax getMinMaxOrNull(SearchExecutionContext context, Field case INT: case DOUBLE: case FLOAT: - return extractNumericMinAndMax(reader, sortField, fieldType, sortBuilder); + return extractNumericMinAndMax(reader, sortField, mappedField, sortBuilder); case STRING: case STRING_VAL: - if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - Terms terms = MultiTerms.getTerms(reader, fieldType.name()); + if (mappedField.type() instanceof KeywordFieldMapper.KeywordFieldType) { + Terms terms = MultiTerms.getTerms(reader, mappedField.name()); if (terms == null) { return null; } @@ -599,15 +600,15 @@ public static MinAndMax getMinMaxOrNull(SearchExecutionContext context, Field private static MinAndMax extractNumericMinAndMax( IndexReader reader, SortField sortField, - MappedFieldType fieldType, + MappedField mappedField, FieldSortBuilder sortBuilder ) throws IOException { - String fieldName = fieldType.name(); + String fieldName = mappedField.name(); byte[] minPackedValue = PointValues.getMinPackedValue(reader, fieldName); if (minPackedValue == null) { return null; } - if (fieldType instanceof NumberFieldType numberFieldType) { + if (mappedField.type()instanceof NumberFieldType numberFieldType) { Number minPoint = numberFieldType.parsePoint(minPackedValue); Number maxPoint = numberFieldType.parsePoint(PointValues.getMaxPackedValue(reader, fieldName)); return switch (IndexSortConfig.getSortFieldType(sortField)) { @@ -617,7 +618,7 @@ private static MinAndMax extractNumericMinAndMax( case FLOAT -> new MinAndMax<>(minPoint.floatValue(), maxPoint.floatValue()); default -> null; }; - } else if (fieldType instanceof DateFieldType dateFieldType) { + } else if (mappedField.type()instanceof DateFieldType dateFieldType) { Function dateConverter = createDateConverter(sortBuilder, dateFieldType); Long min = dateConverter.apply(minPackedValue); Long max = dateConverter.apply(PointValues.getMaxPackedValue(reader, fieldName)); diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index cd9478e7a8a68..8ea23e6fbfa4d 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointIndexFieldData.LatLonPointIndexFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -603,8 +603,8 @@ private MultiValueMode localSortMode() { } private IndexGeoPointFieldData fieldData(SearchExecutionContext context) { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { if (ignoreUnmapped) { return new LatLonPointIndexFieldData( fieldName, @@ -616,7 +616,7 @@ private IndexGeoPointFieldData fieldData(SearchExecutionContext context) { throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort"); } } - return context.getForField(fieldType); + return context.getForField(mappedField); } private Nested nested(SearchExecutionContext context) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index b8f3c30905b75..86a9a66b09eb2 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardException; @@ -254,8 +254,8 @@ public SortFieldAndFormat build(SearchExecutionContext context) throws IOExcepti if ("version".equals(this.type.toString())) { try { // TODO there must be a better way to get the field type... - MappedFieldType scriptFieldType = context.buildAnonymousFieldType(this.type.toString()); - scriptResultValueFormat = scriptFieldType.docValueFormat(null, null); + MappedField mappedField = context.buildAnonymousField(this.type.toString()); + scriptResultValueFormat = mappedField.docValueFormat(null, null); } catch (Exception e) { // "version" type is not available, fall back to RAW and sort as a string } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index a6515ae8dd2da..2cbb5111d9b17 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.xcontent.ParseField; @@ -291,9 +291,9 @@ protected void populateCommonFields(SearchExecutionContext context, SuggestionSe if (context.isFieldMapped(field) == false) { throw new IllegalArgumentException("no mapping found for field [" + field + "]"); } - MappedFieldType fieldType = context.getFieldType(field); + MappedField mappedField = context.getMappedField(field); if (analyzer == null) { - suggestionContext.setAnalyzer(fieldType.getTextSearchInfo().searchAnalyzer()); + suggestionContext.setAnalyzer(mappedField.getTextSearchInfo().searchAnalyzer()); } else { Analyzer luceneAnalyzer = context.getIndexAnalyzers().get(analyzer); if (luceneAnalyzer == null) { @@ -302,7 +302,7 @@ protected void populateCommonFields(SearchExecutionContext context, SuggestionSe suggestionContext.setAnalyzer(luceneAnalyzer); } - suggestionContext.setField(fieldType.name()); + suggestionContext.setField(mappedField.name()); if (size != null) { suggestionContext.setSize(size); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index c2a233645a596..94a3f44fe91e4 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -18,6 +18,7 @@ import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.CompletionFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggester; @@ -40,8 +41,9 @@ protected Suggest.Suggestion> contexts = Collections.emptyMap(); - if (fieldType.hasContextMappings()) { + if (completionFieldType.hasContextMappings()) { List rawContexts = collector.getContexts(suggestDoc.doc); if (rawContexts.size() > 0) { - contexts = fieldType.getContextMappings().getNamedContexts(rawContexts); + contexts = completionFieldType.getContextMappings().getNamedContexts(rawContexts); } } if (numResult++ < suggestionContext.getSize()) { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index d10b10c09d66a..90bfdba7e1691 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.mapper.CompletionFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; @@ -279,23 +279,23 @@ public SuggestionContext build(SearchExecutionContext context) throws IOExceptio if (shardSize != null) { suggestionContext.setShardSize(shardSize); } - MappedFieldType mappedFieldType = context.getFieldType(suggestionContext.getField()); - if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType == false) { + MappedField mappedField = context.getMappedField(suggestionContext.getField()); + if (mappedField.type() instanceof CompletionFieldMapper.CompletionFieldType == false) { throw new IllegalArgumentException("Field [" + suggestionContext.getField() + "] is not a completion suggest field"); } - CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType; - suggestionContext.setFieldType(type); - if (type.hasContextMappings() && contextBytes != null) { + CompletionFieldMapper.CompletionFieldType completionFieldType = (CompletionFieldMapper.CompletionFieldType) mappedField.type(); + suggestionContext.setMappedField(mappedField); + if (completionFieldType.hasContextMappings() && contextBytes != null) { Map> queryContexts = parseContextBytes( contextBytes, context.getParserConfig(), - type.getContextMappings() + completionFieldType.getContextMappings() ); suggestionContext.setQueryContexts(queryContexts); } else if (contextBytes != null) { - throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context"); + throw new IllegalArgumentException("suggester [" + mappedField.name() + "] doesn't expect any context"); } - assert suggestionContext.getFieldType() != null : "no completion field type set"; + assert suggestionContext.getMappedField() != null : "no completion field type set"; return suggestionContext; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java index f9347cc2c9fa0..e654f638f3800 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionContext.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.CompletionFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.completion.context.ContextMapping; @@ -26,18 +27,20 @@ protected CompletionSuggestionContext(SearchExecutionContext searchExecutionCont super(CompletionSuggester.INSTANCE, searchExecutionContext); } - private CompletionFieldMapper.CompletionFieldType fieldType; + private MappedField mappedField; + CompletionFieldMapper.CompletionFieldType completionFieldType; private FuzzyOptions fuzzyOptions; private RegexOptions regexOptions; private boolean skipDuplicates; private Map> queryContexts = Collections.emptyMap(); - CompletionFieldMapper.CompletionFieldType getFieldType() { - return this.fieldType; + MappedField getMappedField() { + return this.mappedField; } - void setFieldType(CompletionFieldMapper.CompletionFieldType fieldType) { - this.fieldType = fieldType; + void setMappedField(MappedField mappedField) { + this.mappedField = mappedField; + completionFieldType = (CompletionFieldMapper.CompletionFieldType) mappedField.type(); } void setRegexOptions(RegexOptions regexOptions) { @@ -73,10 +76,9 @@ public boolean isSkipDuplicates() { } CompletionQuery toQuery() { - CompletionFieldMapper.CompletionFieldType fieldType = getFieldType(); final CompletionQuery query; if (getPrefix() != null) { - query = createCompletionQuery(getPrefix(), fieldType); + query = createCompletionQuery(getPrefix()); } else if (getRegex() != null) { if (fuzzyOptions != null) { throw new IllegalArgumentException("can not use 'fuzzy' options with 'regex"); @@ -84,23 +86,29 @@ CompletionQuery toQuery() { if (regexOptions == null) { regexOptions = RegexOptions.builder().build(); } - query = fieldType.regexpQuery(getRegex(), regexOptions.getFlagsValue(), regexOptions.getMaxDeterminizedStates()); + query = completionFieldType.regexpQuery( + mappedField.name(), + getRegex(), + regexOptions.getFlagsValue(), + regexOptions.getMaxDeterminizedStates() + ); } else if (getText() != null) { - query = createCompletionQuery(getText(), fieldType); + query = createCompletionQuery(getText()); } else { throw new IllegalArgumentException("'prefix/text' or 'regex' must be defined"); } - if (fieldType.hasContextMappings()) { - ContextMappings contextMappings = fieldType.getContextMappings(); + if (completionFieldType.hasContextMappings()) { + ContextMappings contextMappings = completionFieldType.getContextMappings(); return contextMappings.toContextQuery(query, queryContexts); } return query; } - private CompletionQuery createCompletionQuery(BytesRef prefix, CompletionFieldMapper.CompletionFieldType fieldType) { + private CompletionQuery createCompletionQuery(BytesRef prefix) { final CompletionQuery query; if (fuzzyOptions != null) { - query = fieldType.fuzzyQuery( + query = completionFieldType.fuzzyQuery( + mappedField.name(), prefix.utf8ToString(), Fuzziness.fromEdits(fuzzyOptions.getEditDistance()), fuzzyOptions.getFuzzyPrefixLength(), @@ -110,7 +118,7 @@ private CompletionQuery createCompletionQuery(BytesRef prefix, CompletionFieldMa fuzzyOptions.isUnicodeAware() ); } else { - query = fieldType.prefixQuery(prefix); + query = completionFieldType.prefixQuery(mappedField.name(), prefix); } return query; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index 35aa5b01fcedc..94c8c95bc485e 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -14,7 +14,7 @@ import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -130,7 +130,7 @@ public final List parseQueryContext(XContentParser parser) * Checks if the current context is consistent with the rest of the fields. For example, the GeoContext * should check that the field that it points to has the correct type. */ - public void validateReferences(Version indexVersionCreated, Function fieldResolver) { + public void validateReferences(Version indexVersionCreated, Function fieldResolver) { // No validation is required by default } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index f1dd6a9fe5619..35d62d9882de3 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -23,7 +23,7 @@ import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser.Token; @@ -276,10 +276,10 @@ public List toInternalQueryContexts(List } @Override - public void validateReferences(Version indexVersionCreated, Function fieldResolver) { + public void validateReferences(Version indexVersionCreated, Function fieldResolver) { if (fieldName != null) { - MappedFieldType mappedFieldType = fieldResolver.apply(fieldName); - if (mappedFieldType == null) { + MappedField mappedField = fieldResolver.apply(fieldName); + if (mappedField == null) { if (indexVersionCreated.before(Version.V_7_0_0)) { deprecationLogger.warn( DeprecationCategory.MAPPINGS, @@ -295,7 +295,7 @@ public void validateReferences(Version indexVersionCreated, Function fields = new HashMap<>(); + private final Map fields = new HashMap<>(); private void addIndexMinMaxTimestamps(Index index, String fieldName, long minTimeStamp, long maxTimestamp) { if (clusterState.metadata().index(index) != null) { @@ -881,7 +882,7 @@ private void addIndexMinMaxTimestamps(Index index, String fieldName, long minTim clusterState = ClusterState.builder(clusterState).metadata(metadataBuilder).build(); - fields.put(index, new DateFieldMapper.DateFieldType(fieldName)); + fields.put(index, new MappedField(fieldName, new DateFieldMapper.DateFieldType())); } private void addIndexMinMaxTimestamps(Index index, long minTimestamp, long maxTimestamp) { @@ -902,7 +903,7 @@ private void addIndexMinMaxTimestamps(Index index, long minTimestamp, long maxTi Metadata.Builder metadataBuilder = Metadata.builder(clusterState.metadata()).put(indexMetadataBuilder); clusterState = ClusterState.builder(clusterState).metadata(metadataBuilder).build(); - fields.put(index, new DateFieldMapper.DateFieldType("@timestamp")); + fields.put(index, new MappedField("@timestamp", new DateFieldMapper.DateFieldType())); } private void addIndex(Index index) { @@ -918,7 +919,7 @@ private void addIndex(Index index) { Metadata.Builder metadataBuilder = Metadata.builder(clusterState.metadata()).put(indexMetadataBuilder); clusterState = ClusterState.builder(clusterState).metadata(metadataBuilder).build(); - fields.put(index, new DateFieldMapper.DateFieldType("@timestamp")); + fields.put(index, new MappedField("@timestamp", new DateFieldMapper.DateFieldType())); } public CoordinatorRewriteContextProvider build() { diff --git a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 87c89f9e5fa7f..57ec74c3e30c7 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; @@ -125,29 +126,36 @@ public void testInvalidMissing() { public void testIndexSortingNoDocValues() { IndexSettings indexSettings = indexSettings(Settings.builder().put("index.sort.field", "field").build()); - MappedFieldType fieldType = new MappedFieldType("field", false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) { + MappedFieldType fieldType = new MappedFieldType(false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) { @Override public String typeName() { return null; } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder( + String name, + String fullyQualifiedIndexName, + Supplier searchLookup + ) { searchLookup.get(); return null; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new UnsupportedOperationException(); } }; - Exception iae = expectThrows(IllegalArgumentException.class, () -> buildIndexSort(indexSettings, fieldType)); + Exception iae = expectThrows( + IllegalArgumentException.class, + () -> buildIndexSort(indexSettings, new MappedField("field", fieldType)) + ); assertEquals("docvalues not found for index sort field:[field]", iae.getMessage()); assertThat(iae.getCause(), instanceOf(UnsupportedOperationException.class)); assertEquals("index sorting not supported on runtime field [field]", iae.getCause().getMessage()); @@ -155,8 +163,11 @@ public Query termQuery(Object value, SearchExecutionContext context) { public void testSortingAgainstAliases() { IndexSettings indexSettings = indexSettings(Settings.builder().put("index.sort.field", "field").build()); - MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType("aliased"); - Exception e = expectThrows(IllegalArgumentException.class, () -> buildIndexSort(indexSettings, Map.of("field", aliased))); + MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType(); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> buildIndexSort(indexSettings, Map.of("field", new MappedField("aliased", aliased))) + ); assertEquals("Cannot use alias [field] as an index sort field", e.getMessage()); } @@ -164,8 +175,8 @@ public void testSortingAgainstAliasesPre713() { IndexSettings indexSettings = indexSettings( Settings.builder().put("index.version.created", Version.V_7_12_0).put("index.sort.field", "field").build() ); - MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType("aliased"); - Sort sort = buildIndexSort(indexSettings, Map.of("field", aliased)); + MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType(); + Sort sort = buildIndexSort(indexSettings, Map.of("field", new MappedField("aliased", aliased))); assertThat(sort.getSort(), arrayWithSize(1)); assertThat(sort.getSort()[0].getField(), equalTo("aliased")); assertWarnings( @@ -183,7 +194,11 @@ public void testTimeSeriesMode() { .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2021-04-29T00:00:00Z") .build() ); - Sort sort = buildIndexSort(indexSettings, TimeSeriesIdFieldMapper.FIELD_TYPE, new DateFieldMapper.DateFieldType("@timestamp")); + Sort sort = buildIndexSort( + indexSettings, + new MappedField(TimeSeriesIdFieldMapper.NAME, TimeSeriesIdFieldMapper.FIELD_TYPE), + new MappedField("@timestamp", new DateFieldMapper.DateFieldType()) + ); assertThat(sort.getSort(), arrayWithSize(2)); assertThat(sort.getSort()[0].getField(), equalTo("_tsid")); assertThat(sort.getSort()[1].getField(), equalTo("@timestamp")); @@ -198,19 +213,22 @@ public void testTimeSeriesModeNoTimestamp() { .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2021-04-29T00:00:00Z") .build() ); - Exception e = expectThrows(IllegalArgumentException.class, () -> buildIndexSort(indexSettings, TimeSeriesIdFieldMapper.FIELD_TYPE)); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> buildIndexSort(indexSettings, new MappedField(TimeSeriesIdFieldMapper.NAME, TimeSeriesIdFieldMapper.FIELD_TYPE)) + ); assertThat(e.getMessage(), equalTo("unknown index sort field:[@timestamp] required by [index.mode=time_series]")); } - private Sort buildIndexSort(IndexSettings indexSettings, MappedFieldType... mfts) { - Map lookup = Maps.newMapWithExpectedSize(mfts.length); - for (MappedFieldType mft : mfts) { - assertNull(lookup.put(mft.name(), mft)); + private Sort buildIndexSort(IndexSettings indexSettings, MappedField... mappedFields) { + Map lookup = Maps.newMapWithExpectedSize(mappedFields.length); + for (MappedField mappedField : mappedFields) { + assertNull(lookup.put(mappedField.name(), mappedField)); } return buildIndexSort(indexSettings, lookup); } - private Sort buildIndexSort(IndexSettings indexSettings, Map lookup) { + private Sort buildIndexSort(IndexSettings indexSettings, Map lookup) { IndexSortConfig config = indexSettings.getIndexSortConfig(); assertTrue(config.hasIndexSort()); IndicesFieldDataCache cache = new IndicesFieldDataCache(indexSettings.getSettings(), null); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 267b1dceaac95..4dd23a9c94a76 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.plugins.Plugin; @@ -100,9 +100,9 @@ public void testThatAnalyzersAreUsedInMapping() throws IOException { .endObject(); MapperService mapperService = createIndex("test", indexSettings, mapping).mapperService(); - MappedFieldType fieldType = mapperService.fieldType("field"); - assertThat(fieldType.getTextSearchInfo().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); - NamedAnalyzer fieldMapperNamedAnalyzer = fieldType.getTextSearchInfo().searchAnalyzer(); + MappedField mappedField = mapperService.mappedField("field"); + assertThat(mappedField.getTextSearchInfo().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); + NamedAnalyzer fieldMapperNamedAnalyzer = mappedField.getTextSearchInfo().searchAnalyzer(); assertThat(fieldMapperNamedAnalyzer.analyzer(), is(namedAnalyzer.analyzer())); } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 0007525f4b357..2e305086242d1 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -84,80 +84,78 @@ protected Collection> getPlugins() { } public > IFD getForField(String type, String fieldName, boolean docValues) { - final MappedFieldType fieldType; + final MappedField mappedField; final MapperBuilderContext context = MapperBuilderContext.ROOT; if (type.equals("string")) { if (docValues) { - fieldType = new KeywordFieldMapper.Builder(fieldName, Version.CURRENT).build(context).fieldType(); + mappedField = new KeywordFieldMapper.Builder(fieldName, Version.CURRENT).build(context).field(); } else { - fieldType = new TextFieldMapper.Builder(fieldName, createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + mappedField = new TextFieldMapper.Builder(fieldName, createDefaultIndexAnalyzers()).fielddata(true).build(context).field(); } } else if (type.equals("float")) { - fieldType = new NumberFieldMapper.Builder( + mappedField = new NumberFieldMapper.Builder( fieldName, NumberFieldMapper.NumberType.FLOAT, ScriptCompiler.NONE, false, true, Version.CURRENT - ).docValues(docValues).build(context).fieldType(); + ).docValues(docValues).build(context).field(); } else if (type.equals("double")) { - fieldType = new NumberFieldMapper.Builder( + mappedField = new NumberFieldMapper.Builder( fieldName, NumberFieldMapper.NumberType.DOUBLE, ScriptCompiler.NONE, false, true, Version.CURRENT - ).docValues(docValues).build(context).fieldType(); + ).docValues(docValues).build(context).field(); } else if (type.equals("long")) { - fieldType = new NumberFieldMapper.Builder( + mappedField = new NumberFieldMapper.Builder( fieldName, NumberFieldMapper.NumberType.LONG, ScriptCompiler.NONE, false, true, Version.CURRENT - ).docValues(docValues).build(context).fieldType(); + ).docValues(docValues).build(context).field(); } else if (type.equals("int")) { - fieldType = new NumberFieldMapper.Builder( + mappedField = new NumberFieldMapper.Builder( fieldName, NumberFieldMapper.NumberType.INTEGER, ScriptCompiler.NONE, false, true, Version.CURRENT - ).docValues(docValues).build(context).fieldType(); + ).docValues(docValues).build(context).field(); } else if (type.equals("short")) { - fieldType = new NumberFieldMapper.Builder( + mappedField = new NumberFieldMapper.Builder( fieldName, NumberFieldMapper.NumberType.SHORT, ScriptCompiler.NONE, false, true, Version.CURRENT - ).docValues(docValues).build(context).fieldType(); + ).docValues(docValues).build(context).field(); } else if (type.equals("byte")) { - fieldType = new NumberFieldMapper.Builder( + mappedField = new NumberFieldMapper.Builder( fieldName, NumberFieldMapper.NumberType.BYTE, ScriptCompiler.NONE, false, true, Version.CURRENT - ).docValues(docValues).build(context).fieldType(); + ).docValues(docValues).build(context).field(); } else if (type.equals("geo_point")) { - fieldType = new GeoPointFieldMapper.Builder(fieldName, ScriptCompiler.NONE, false, Version.CURRENT).docValues(docValues) + mappedField = new GeoPointFieldMapper.Builder(fieldName, ScriptCompiler.NONE, false, Version.CURRENT).docValues(docValues) .build(context) - .fieldType(); + .field(); } else if (type.equals("binary")) { - fieldType = new BinaryFieldMapper.Builder(fieldName, docValues).build(context).fieldType(); + mappedField = new BinaryFieldMapper.Builder(fieldName, docValues).build(context).field(); } else { throw new UnsupportedOperationException(type); } - return searchExecutionContext.getForField(fieldType); + return searchExecutionContext.getForField(mappedField); } @Before diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index 0bee4ffdf8313..7f878bb4cb659 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -12,7 +12,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.TextFieldMapper; @@ -52,11 +52,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedField mappedField = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) .fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0) .build(builderContext) - .fieldType(); - IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft); + .field(); + IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(mappedField); for (LeafReaderContext context : contexts) { LeafOrdinalsFieldData loadDirect = fieldData.loadDirect(context); SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); @@ -67,11 +67,11 @@ public void testFilterByFrequency() throws Exception { } { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedField mappedField = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, 201, 100) .build(builderContext) - .fieldType(); - IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft); + .field(); + IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(mappedField); for (LeafReaderContext context : contexts) { LeafOrdinalsFieldData loadDirect = fieldData.loadDirect(context); SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); @@ -82,11 +82,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true);// test # docs with value - MappedFieldType ft = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedField mappedField = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) - .fieldType(); - IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft); + .field(); + IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(mappedField); for (LeafReaderContext context : contexts) { LeafOrdinalsFieldData loadDirect = fieldData.loadDirect(context); SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); @@ -98,11 +98,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedField mappedField = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) - .fieldType(); - IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft); + .field(); + IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(mappedField); for (LeafReaderContext context : contexts) { LeafOrdinalsFieldData loadDirect = fieldData.loadDirect(context); SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 83ecc8f7552e2..1171c54efa204 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -78,42 +78,37 @@ public void testGetForFieldDefaults() { indicesService.getCircuitBreakerService() ); MapperBuilderContext context = MapperBuilderContext.ROOT; - final MappedFieldType stringMapper = new KeywordFieldMapper.Builder("string", Version.CURRENT).build(context).fieldType(); + final MappedField stringMapper = new KeywordFieldMapper.Builder("string", Version.CURRENT).build(context).field(); ifdService.clear(); IndexFieldData fd = ifdService.getForField(stringMapper, "test", () -> { throw new UnsupportedOperationException(); }); assertTrue(fd instanceof SortedSetOrdinalsIndexFieldData); - for (MappedFieldType mapper : Arrays.asList( - new NumberFieldMapper.Builder("int", BYTE, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).fieldType(), - new NumberFieldMapper.Builder("int", SHORT, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).fieldType(), - new NumberFieldMapper.Builder("int", INTEGER, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).fieldType(), - new NumberFieldMapper.Builder("long", LONG, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).fieldType() + for (MappedField mapper : Arrays.asList( + new NumberFieldMapper.Builder("int", BYTE, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).field(), + new NumberFieldMapper.Builder("int", SHORT, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).field(), + new NumberFieldMapper.Builder("int", INTEGER, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).field(), + new NumberFieldMapper.Builder("long", LONG, ScriptCompiler.NONE, false, true, Version.CURRENT).build(context).field() )) { ifdService.clear(); fd = ifdService.getForField(mapper, "test", () -> { throw new UnsupportedOperationException(); }); assertTrue(fd instanceof SortedNumericIndexFieldData); } - final MappedFieldType floatMapper = new NumberFieldMapper.Builder( + final MappedField floatMapper = new NumberFieldMapper.Builder( "float", NumberType.FLOAT, ScriptCompiler.NONE, false, true, Version.CURRENT - ).build(context).fieldType(); + ).build(context).field(); ifdService.clear(); fd = ifdService.getForField(floatMapper, "test", () -> { throw new UnsupportedOperationException(); }); assertTrue(fd instanceof SortedDoublesIndexFieldData); - final MappedFieldType doubleMapper = new NumberFieldMapper.Builder( - "double", - DOUBLE, - ScriptCompiler.NONE, - false, - true, - Version.CURRENT - ).build(context).fieldType(); + final MappedField doubleMapper = new NumberFieldMapper.Builder("double", DOUBLE, ScriptCompiler.NONE, false, true, Version.CURRENT) + .build(context) + .field(); ifdService.clear(); fd = ifdService.getForField(doubleMapper, "test", () -> { throw new UnsupportedOperationException(); }); assertTrue(fd instanceof SortedDoublesIndexFieldData); @@ -128,15 +123,15 @@ public void testGetForFieldRuntimeField() { indicesService.getCircuitBreakerService() ); final SetOnce> searchLookupSetOnce = new SetOnce<>(); - MappedFieldType ft = mock(MappedFieldType.class); - when(ft.fielddataBuilder(ArgumentMatchers.any(), ArgumentMatchers.any())).thenAnswer(invocationOnMock -> { + MappedField mappedField = mock(MappedField.class); + when(mappedField.fielddataBuilder(ArgumentMatchers.any(), ArgumentMatchers.any())).thenAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") Supplier searchLookup = (Supplier) invocationOnMock.getArguments()[1]; searchLookupSetOnce.set(searchLookup); return (IndexFieldData.Builder) (cache, breakerService) -> null; }); SearchLookup searchLookup = new SearchLookup(null, null); - ifdService.getForField(ft, "qualified", () -> searchLookup); + ifdService.getForField(mappedField, "qualified", () -> searchLookup); assertSame(searchLookup, searchLookupSetOnce.get().get()); } @@ -151,12 +146,12 @@ public void testClearField() throws Exception { ); final MapperBuilderContext context = MapperBuilderContext.ROOT; - final MappedFieldType mapper1 = new TextFieldMapper.Builder("field_1", createDefaultIndexAnalyzers()).fielddata(true) + final MappedField mapper1 = new TextFieldMapper.Builder("field_1", createDefaultIndexAnalyzers()).fielddata(true) .build(context) - .fieldType(); - final MappedFieldType mapper2 = new TextFieldMapper.Builder("field_2", createDefaultIndexAnalyzers()).fielddata(true) + .field(); + final MappedField mapper2 = new TextFieldMapper.Builder("field_2", createDefaultIndexAnalyzers()).fielddata(true) .build(context) - .fieldType(); + .field(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("field_1", "thisisastring", Store.NO)); @@ -218,9 +213,7 @@ public void testFieldDataCacheListener() throws Exception { ); final MapperBuilderContext context = MapperBuilderContext.ROOT; - final MappedFieldType mapper1 = new TextFieldMapper.Builder("s", createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + final MappedField mapper1 = new TextFieldMapper.Builder("s", createDefaultIndexAnalyzers()).fielddata(true).build(context).field(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("s", "thisisastring", Store.NO)); @@ -303,7 +296,7 @@ public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, lon } } - private void doTestRequireDocValues(MappedFieldType ft) { + private void doTestRequireDocValues(MappedField mappedField) { ThreadPool threadPool = new TestThreadPool("random_threadpool_name"); try { IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null); @@ -312,12 +305,12 @@ private void doTestRequireDocValues(MappedFieldType ft) { cache, null ); - if (ft.hasDocValues()) { - ifds.getForField(ft, "test", () -> { throw new UnsupportedOperationException(); }); // no exception + if (mappedField.hasDocValues()) { + ifds.getForField(mappedField, "test", () -> { throw new UnsupportedOperationException(); }); // no exception } else { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> ifds.getForField(ft, "test", () -> { throw new UnsupportedOperationException(); }) + () -> ifds.getForField(mappedField, "test", () -> { throw new UnsupportedOperationException(); }) ); assertThat(e.getMessage(), containsString("doc values")); } @@ -327,33 +320,40 @@ private void doTestRequireDocValues(MappedFieldType ft) { } public void testRequireDocValuesOnLongs() { - doTestRequireDocValues(new NumberFieldMapper.NumberFieldType("field", LONG)); + doTestRequireDocValues(new MappedField("field", new NumberFieldMapper.NumberFieldType(LONG))); doTestRequireDocValues( - new NumberFieldMapper.NumberFieldType("field", LONG, true, false, false, false, null, Collections.emptyMap(), null, false, null) + new MappedField( + "field", + new NumberFieldMapper.NumberFieldType(LONG, true, false, false, false, null, Collections.emptyMap(), null, false, null) + ) ); } public void testRequireDocValuesOnDoubles() { - doTestRequireDocValues(new NumberFieldMapper.NumberFieldType("field", NumberType.DOUBLE)); + doTestRequireDocValues(new MappedField("field", new NumberFieldMapper.NumberFieldType(DOUBLE))); doTestRequireDocValues( - new NumberFieldMapper.NumberFieldType( + new MappedField( "field", - NumberType.DOUBLE, - true, - false, - false, - false, - null, - Collections.emptyMap(), - null, - false, - null + new NumberFieldMapper.NumberFieldType( + NumberType.DOUBLE, + true, + false, + false, + false, + null, + Collections.emptyMap(), + null, + false, + null + ) ) ); } public void testRequireDocValuesOnBools() { - doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType("field")); - doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType("field", true, false, false, null, null, Collections.emptyMap())); + doTestRequireDocValues(new MappedField("field", new BooleanFieldMapper.BooleanFieldType())); + doTestRequireDocValues( + new MappedField("field", new BooleanFieldMapper.BooleanFieldType(true, false, false, null, null, Collections.emptyMap())) + ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/AbstractNonTextScriptFieldTypeTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/AbstractNonTextScriptFieldTypeTestCase.java index c5fc0d3d44a8c..e242000c491a0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/AbstractNonTextScriptFieldTypeTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AbstractNonTextScriptFieldTypeTestCase.java @@ -15,25 +15,22 @@ abstract class AbstractNonTextScriptFieldTypeTestCase extends AbstractScriptFieldTypeTestCase { public void testFuzzyQueryIsError() { - assertQueryOnlyOnTextAndKeyword( - "fuzzy", - () -> simpleMappedFieldType().fuzzyQuery("cat", Fuzziness.AUTO, 0, 1, true, mockContext()) - ); + assertQueryOnlyOnTextAndKeyword("fuzzy", () -> simpleMappedField().fuzzyQuery("cat", Fuzziness.AUTO, 0, 1, true, mockContext())); } public void testPrefixQueryIsError() { - assertQueryOnlyOnTextKeywordAndWildcard("prefix", () -> simpleMappedFieldType().prefixQuery("cat", null, mockContext())); + assertQueryOnlyOnTextKeywordAndWildcard("prefix", () -> simpleMappedField().prefixQuery("cat", null, mockContext())); } public void testRegexpQueryIsError() { assertQueryOnlyOnTextAndKeyword( "regexp", - () -> simpleMappedFieldType().regexpQuery("cat", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext()) + () -> simpleMappedField().regexpQuery("cat", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext()) ); } public void testWildcardQueryIsError() { - assertQueryOnlyOnTextKeywordAndWildcard("wildcard", () -> simpleMappedFieldType().wildcardQuery("cat", null, mockContext())); + assertQueryOnlyOnTextKeywordAndWildcard("wildcard", () -> simpleMappedField().wildcardQuery("cat", null, mockContext())); } private void assertQueryOnlyOnTextAndKeyword(String queryName, ThrowingRunnable buildQuery) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index d5d8b1b4d273c..9c0a51f381705 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -46,9 +46,9 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC private static final ToXContent.Params INCLUDE_DEFAULTS = new ToXContent.MapParams(Map.of("include_defaults", "true")); - protected abstract MappedFieldType simpleMappedFieldType(); + protected abstract MappedField simpleMappedField(); - protected abstract MappedFieldType loopFieldType(); + protected abstract MappedField loopField(); protected abstract String typeName(); @@ -144,11 +144,11 @@ public void testFieldCaps() throws Exception { .endObject(); concreteIndexMapping = createMapperService(mapping); } - MappedFieldType scriptFieldType = scriptIndexMapping.fieldType("field"); - MappedFieldType concreteIndexType = concreteIndexMapping.fieldType("field"); - assertEquals(concreteIndexType.familyTypeName(), scriptFieldType.familyTypeName()); - assertEquals(concreteIndexType.isSearchable(), scriptFieldType.isSearchable()); - assertEquals(concreteIndexType.isAggregatable(), scriptFieldType.isAggregatable()); + MappedField scriptField = scriptIndexMapping.mappedField("field"); + MappedField concreteIndex = concreteIndexMapping.mappedField("field"); + assertEquals(concreteIndex.familyTypeName(), scriptField.familyTypeName()); + assertEquals(concreteIndex.isSearchable(), scriptField.isSearchable()); + assertEquals(concreteIndex.isAggregatable(), scriptField.isAggregatable()); } @SuppressWarnings("unused") @@ -166,17 +166,17 @@ public void testFieldCaps() throws Exception { @SuppressWarnings("unused") public abstract void testRangeQuery() throws IOException; - protected abstract Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx); + protected abstract Query randomRangeQuery(MappedField mappedField, SearchExecutionContext ctx); @SuppressWarnings("unused") public abstract void testTermQuery() throws IOException; - protected abstract Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx); + protected abstract Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx); @SuppressWarnings("unused") public abstract void testTermsQuery() throws IOException; - protected abstract Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx); + protected abstract Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx); protected static SearchExecutionContext mockContext() { return mockContext(true); @@ -194,14 +194,14 @@ protected boolean supportsRangeQueries() { return true; } - protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries, MappedFieldType mappedFieldType) { + protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries, MappedField mappedField) { SearchExecutionContext context = mock(SearchExecutionContext.class); - if (mappedFieldType != null) { - when(context.getFieldType(anyString())).thenReturn(mappedFieldType); + if (mappedField != null) { + when(context.getMappedField(anyString())).thenReturn(mappedField); } when(context.allowExpensiveQueries()).thenReturn(allowExpensiveQueries); SearchLookup lookup = new SearchLookup( - context::getFieldType, + context::getMappedField, (mft, lookupSupplier) -> mft.fielddataBuilder("test", lookupSupplier).build(null, null) ); when(context.lookup()).thenReturn(lookup); @@ -209,17 +209,17 @@ protected static SearchExecutionContext mockContext(boolean allowExpensiveQuerie } public void testExistsQueryIsExpensive() { - checkExpensiveQuery(MappedFieldType::existsQuery); + checkExpensiveQuery(MappedField::existsQuery); } public void testExistsQueryInLoop() { - checkLoop(MappedFieldType::existsQuery); + checkLoop(MappedField::existsQuery); } public void testRangeQueryWithShapeRelationIsError() { Exception e = expectThrows( IllegalArgumentException.class, - () -> simpleMappedFieldType().rangeQuery(1, 2, true, true, ShapeRelation.DISJOINT, null, null, null) + () -> simpleMappedField().rangeQuery(1, 2, true, true, ShapeRelation.DISJOINT, null, null, null) ); assertThat(e.getMessage(), equalTo("Runtime field [test] of type [" + typeName() + "] does not support DISJOINT ranges")); } @@ -256,22 +256,22 @@ public void testTermsQueryInLoop() { public void testPhraseQueryIsError() { assumeTrue("Impl does not support term queries", supportsTermQueries()); - assertQueryOnlyOnText("phrase", () -> simpleMappedFieldType().phraseQuery(null, 1, false, null)); + assertQueryOnlyOnText("phrase", () -> simpleMappedField().phraseQuery(null, 1, false, null)); } public void testPhrasePrefixQueryIsError() { assumeTrue("Impl does not support term queries", supportsTermQueries()); - assertQueryOnlyOnText("phrase prefix", () -> simpleMappedFieldType().phrasePrefixQuery(null, 1, 1, null)); + assertQueryOnlyOnText("phrase prefix", () -> simpleMappedField().phrasePrefixQuery(null, 1, 1, null)); } public void testMultiPhraseQueryIsError() { assumeTrue("Impl does not support term queries", supportsTermQueries()); - assertQueryOnlyOnText("phrase", () -> simpleMappedFieldType().multiPhraseQuery(null, 1, false, null)); + assertQueryOnlyOnText("phrase", () -> simpleMappedField().multiPhraseQuery(null, 1, false, null)); } public void testSpanPrefixQueryIsError() { assumeTrue("Impl does not support term queries", supportsTermQueries()); - assertQueryOnlyOnText("span prefix", () -> simpleMappedFieldType().spanPrefixQuery(null, null, null)); + assertQueryOnlyOnText("span prefix", () -> simpleMappedField().spanPrefixQuery(null, null, null)); } public final void testCacheable() throws IOException { @@ -296,13 +296,13 @@ public final void testCacheable() throws IOException { { SearchExecutionContext c = createSearchExecutionContext(mapperService); - c.getFieldType("field").existsQuery(c); + c.getMappedField("field").existsQuery(c); assertFalse(c.isCacheable()); } { SearchExecutionContext c = createSearchExecutionContext(mapperService); - c.getFieldType("field_source").existsQuery(c); + c.getMappedField("field_source").existsQuery(c); assertTrue(c.isCacheable()); } } @@ -325,16 +325,16 @@ protected final String readSource(IndexReader reader, int docId) throws IOExcept return reader.document(docId).getBinaryValue("_source").utf8ToString(); } - protected final void checkExpensiveQuery(BiConsumer queryBuilder) { - Exception e = expectThrows(ElasticsearchException.class, () -> queryBuilder.accept(simpleMappedFieldType(), mockContext(false))); + protected final void checkExpensiveQuery(BiConsumer queryBuilder) { + Exception e = expectThrows(ElasticsearchException.class, () -> queryBuilder.accept(simpleMappedField(), mockContext(false))); assertThat( e.getMessage(), equalTo("queries cannot be executed against runtime fields while [search.allow_expensive_queries] is set to [false].") ); } - protected final void checkLoop(BiConsumer queryBuilder) { - Exception e = expectThrows(IllegalArgumentException.class, () -> queryBuilder.accept(loopFieldType(), mockContext())); + protected final void checkLoop(BiConsumer queryBuilder) { + Exception e = expectThrows(IllegalArgumentException.class, () -> queryBuilder.accept(loopField(), mockContext())); assertThat(e.getMessage(), equalTo("Cyclic dependency detected while resolving runtime fields: test -> test")); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index ac6f0d863e461..2a1ab30817748 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -115,8 +115,8 @@ public void testStoredValue() throws IOException { assertTrue(field.fieldType().stored()); assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); - MappedFieldType fieldType = mapperService.fieldType("field"); - Object originalValue = fieldType.valueForDisplay(indexedValue); + MappedField mappedField = mapperService.mappedField("field"); + Object originalValue = mappedField.valueForDisplay(indexedValue); assertEquals(new BytesArray(value), originalValue); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java index 55f076e985c4d..38ba4f7417077 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java @@ -20,13 +20,13 @@ public class BooleanFieldTypeTests extends FieldTypeTestCase { public void testValueFormat() { - MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType("field"); - assertEquals(false, ft.docValueFormat(null, null).format(0)); - assertEquals(true, ft.docValueFormat(null, null).format(1)); + MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType(); + assertEquals(false, ft.docValueFormat("field", null, null).format(0)); + assertEquals(true, ft.docValueFormat("field", null, null).format(1)); } public void testValueForSearch() { - MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType("field"); + MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType(); assertEquals(true, ft.valueForDisplay("T")); assertEquals(false, ft.valueForDisplay("F")); expectThrows(IllegalArgumentException.class, () -> ft.valueForDisplay(0)); @@ -35,52 +35,50 @@ public void testValueForSearch() { } public void testTermQuery() { - MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType("field"); - assertEquals(new TermQuery(new Term("field", "T")), ft.termQuery("true", MOCK_CONTEXT)); - assertEquals(new TermQuery(new Term("field", "F")), ft.termQuery("false", MOCK_CONTEXT)); + MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType(); + assertEquals(new TermQuery(new Term("field", "T")), ft.termQuery("field", "true", MOCK_CONTEXT)); + assertEquals(new TermQuery(new Term("field", "F")), ft.termQuery("field", "false", MOCK_CONTEXT)); - MappedFieldType ft2 = new BooleanFieldMapper.BooleanFieldType("field", false); - assertEquals(SortedNumericDocValuesField.newSlowExactQuery("field", 1), ft2.termQuery("true", MOCK_CONTEXT)); - assertEquals(SortedNumericDocValuesField.newSlowExactQuery("field", 0), ft2.termQuery("false", MOCK_CONTEXT)); + MappedFieldType ft2 = new BooleanFieldMapper.BooleanFieldType(false); + assertEquals(SortedNumericDocValuesField.newSlowExactQuery("field", 1), ft2.termQuery("field", "true", MOCK_CONTEXT)); + assertEquals(SortedNumericDocValuesField.newSlowExactQuery("field", 0), ft2.termQuery("field", "false", MOCK_CONTEXT)); - MappedFieldType unsearchable = new BooleanFieldMapper.BooleanFieldType("field", false, false); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("true", MOCK_CONTEXT)); + MappedFieldType unsearchable = new BooleanFieldMapper.BooleanFieldType(false, false); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> unsearchable.termQuery("field", "true", MOCK_CONTEXT) + ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } public void testRangeQuery() { - MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType("field"); + MappedFieldType ft = new BooleanFieldMapper.BooleanFieldType(); Query expected = new TermRangeQuery("field", BooleanFieldMapper.Values.FALSE, BooleanFieldMapper.Values.TRUE, true, true); - assertEquals(expected, ft.rangeQuery("false", "true", true, true, null, null, null, MOCK_CONTEXT)); + assertEquals(expected, ft.rangeQuery("field", "false", "true", true, true, null, null, null, MOCK_CONTEXT)); - ft = new BooleanFieldMapper.BooleanFieldType("field", false); + ft = new BooleanFieldMapper.BooleanFieldType(false); expected = SortedNumericDocValuesField.newSlowRangeQuery("field", 0, 1); - assertEquals(expected, ft.rangeQuery("false", "true", true, true, null, null, null, MOCK_CONTEXT)); + assertEquals(expected, ft.rangeQuery("field", "false", "true", true, true, null, null, null, MOCK_CONTEXT)); - MappedFieldType unsearchable = new BooleanFieldMapper.BooleanFieldType("field", false, false); + MappedFieldType unsearchable = new BooleanFieldMapper.BooleanFieldType(false, false); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.rangeQuery("false", "true", true, true, null, null, null, MOCK_CONTEXT) + () -> unsearchable.rangeQuery("field", "false", "true", true, true, null, null, null, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } public void testFetchSourceValue() throws IOException { - MappedFieldType fieldType = new BooleanFieldMapper.BooleanFieldType("field"); - assertEquals(List.of(true), fetchSourceValue(fieldType, true)); - assertEquals(List.of(false), fetchSourceValue(fieldType, "false")); - assertEquals(List.of(false), fetchSourceValue(fieldType, "")); + MappedField mappedField = new MappedField("field", new BooleanFieldMapper.BooleanFieldType()); + assertEquals(List.of(true), fetchSourceValue(mappedField, true)); + assertEquals(List.of(false), fetchSourceValue(mappedField, "false")); + assertEquals(List.of(false), fetchSourceValue(mappedField, "")); - MappedFieldType nullFieldType = new BooleanFieldMapper.BooleanFieldType( + MappedField nullField = new MappedField( "field", - true, - false, - true, - true, - null, - Collections.emptyMap() + new BooleanFieldMapper.BooleanFieldType(true, false, true, true, null, Collections.emptyMap()) ); - assertEquals(List.of(true), fetchSourceValue(nullFieldType, null)); + assertEquals(List.of(true), fetchSourceValue(nullField, null)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index 759162ff30710..94f5f0812207c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -67,8 +67,9 @@ public void testDocValues() throws IOException { List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - BooleanScriptFieldType ft = simpleMappedFieldType(); - BooleanScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); + MappedField mappedField = simpleMappedField(); + BooleanScriptFieldData ifd = (BooleanScriptFieldData) mappedField.fielddataBuilder("test", mockContext()::lookup) + .build(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { @@ -105,7 +106,8 @@ public void testSort() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); + BooleanScriptFieldData ifd = (BooleanScriptFieldData) simpleMappedField().fielddataBuilder("test", mockContext()::lookup) + .build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}")); @@ -121,7 +123,7 @@ public void testUsedInScript() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); + SearchExecutionContext searchContext = mockContext(true, simpleMappedField()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { @@ -169,7 +171,7 @@ public void testExistsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(3)); + assertThat(searcher.count(simpleMappedField().existsQuery(mockContext())), equalTo(3)); } } } @@ -180,22 +182,22 @@ public void testRangeQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - MappedFieldType ft = simpleMappedFieldType(); - assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(false, true, false, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(0)); + MappedField field = simpleMappedField(); + assertThat(searcher.count(field.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(false, true, false, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(0)); } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - MappedFieldType ft = simpleMappedFieldType(); - assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(false, true, true, false, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(0)); + MappedField field = simpleMappedField(); + assertThat(searcher.count(field.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(false, true, true, false, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(0)); } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { @@ -203,43 +205,37 @@ public void testRangeQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - MappedFieldType ft = simpleMappedFieldType(); - assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(2)); - assertThat(searcher.count(ft.rangeQuery(false, false, false, false, null, null, null, mockContext())), equalTo(0)); - assertThat(searcher.count(ft.rangeQuery(true, true, false, false, null, null, null, mockContext())), equalTo(0)); + MappedField field = simpleMappedField(); + assertThat(searcher.count(field.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(field.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(2)); + assertThat(searcher.count(field.rangeQuery(false, false, false, false, null, null, null, mockContext())), equalTo(0)); + assertThat(searcher.count(field.rangeQuery(true, true, false, false, null, null, null, mockContext())), equalTo(0)); } } } public void testRangeQueryDegeneratesIntoNotExpensive() throws IOException { assertThat( - simpleMappedFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()), + simpleMappedField().rangeQuery(true, true, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class) ); assertThat( - simpleMappedFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()), + simpleMappedField().rangeQuery(false, false, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class) ); // Even if the running the field would blow up because it loops the query *still* just returns none. - assertThat( - loopFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()), - instanceOf(MatchNoDocsQuery.class) - ); - assertThat( - loopFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()), - instanceOf(MatchNoDocsQuery.class) - ); + assertThat(loopField().rangeQuery(true, true, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class)); + assertThat(loopField().rangeQuery(false, false, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class)); } @Override - protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { + protected Query randomRangeQuery(MappedField field, SearchExecutionContext ctx) { // Builds a random range query that doesn't degenerate into match none return switch (randomInt(2)) { - case 0 -> ft.rangeQuery(true, true, true, true, null, null, null, ctx); - case 1 -> ft.rangeQuery(false, true, true, true, null, null, null, ctx); - case 2 -> ft.rangeQuery(false, true, false, true, null, null, null, ctx); + case 0 -> field.rangeQuery(true, true, true, true, null, null, null, ctx); + case 1 -> field.rangeQuery(false, true, true, true, null, null, null, ctx); + case 2 -> field.rangeQuery(false, true, false, true, null, null, null, ctx); default -> throw new UnsupportedOperationException(); }; } @@ -250,9 +246,9 @@ public void testTermQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery("true", mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termQuery(true, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery("true", mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(false, mockContext())), equalTo(0)); assertThat(searcher.count(build("xor_param", Map.of("param", false)).termQuery(true, mockContext())), equalTo(1)); } } @@ -260,18 +256,18 @@ public void testTermQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery("false", mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(null, mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termQuery(false, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery("false", mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(null, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(true, mockContext())), equalTo(0)); assertThat(searcher.count(build("xor_param", Map.of("param", false)).termQuery(false, mockContext())), equalTo(1)); } } } @Override - protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termQuery(randomBoolean(), ctx); + protected Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termQuery(randomBoolean(), ctx); } @Override @@ -280,35 +276,35 @@ public void testTermsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("true", "true"), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(0)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, false), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(true, true), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of("true", "true"), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(false, false), mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(true, false), mockContext())), equalTo(1)); } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("false", "false"), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(singletonList(null), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(0)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, false), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(false, false), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of("false", "false"), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(singletonList(null), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(true, true), mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(true, false), mockContext())), equalTo(1)); } } } public void testEmptyTermsQueryDegeneratesIntoMatchNone() throws IOException { - assertThat(simpleMappedFieldType().termsQuery(List.of(), mockContext()), instanceOf(MatchNoDocsQuery.class)); + assertThat(simpleMappedField().termsQuery(List.of(), mockContext()), instanceOf(MatchNoDocsQuery.class)); } @Override - protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { + protected Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx) { return switch (randomInt(2)) { - case 0 -> ft.termsQuery(List.of(true), ctx); - case 1 -> ft.termsQuery(List.of(false), ctx); - case 2 -> ft.termsQuery(List.of(false, true), ctx); + case 0 -> mappedField.termsQuery(List.of(true), ctx); + case 1 -> mappedField.termsQuery(List.of(false), ctx); + case 2 -> mappedField.termsQuery(List.of(false, true), ctx); default -> throw new UnsupportedOperationException(); }; } @@ -349,24 +345,24 @@ public XContentParser parser() { searcher, source, "*", - simpleMappedFieldType().existsQuery(mockContext()), - ootb.fieldType().existsQuery(mockContext()) + simpleMappedField().existsQuery(mockContext()), + ootb.field().existsQuery(mockContext()) ); boolean term = randomBoolean(); assertSameCount( searcher, source, term, - simpleMappedFieldType().termQuery(term, mockContext()), - ootb.fieldType().termQuery(term, mockContext()) + simpleMappedField().termQuery(term, mockContext()), + ootb.field().termQuery(term, mockContext()) ); List terms = randomList(0, 3, ESTestCase::randomBoolean); assertSameCount( searcher, source, terms, - simpleMappedFieldType().termsQuery(terms, mockContext()), - ootb.fieldType().termsQuery(terms, mockContext()) + simpleMappedField().termsQuery(terms, mockContext()), + ootb.field().termsQuery(terms, mockContext()) ); boolean low; boolean high; @@ -382,8 +378,8 @@ public XContentParser parser() { searcher, source, (includeLow ? "[" : "(") + low + "," + high + (includeHigh ? "]" : ")"), - simpleMappedFieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()), - ootb.fieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()) + simpleMappedField().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()), + ootb.field().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()) ); } } @@ -399,12 +395,12 @@ private void assertSameCount(IndexSearcher searcher, String source, Object query } @Override - protected BooleanScriptFieldType simpleMappedFieldType() { + protected MappedField simpleMappedField() { return build("read_foo", Map.of()); } @Override - protected MappedFieldType loopFieldType() { + protected MappedField loopField() { return build("loop", Map.of()); } @@ -413,7 +409,7 @@ protected String typeName() { return "boolean"; } - private static BooleanScriptFieldType build(String code, Map params) { + private static MappedField build(String code, Map params) { return build(new Script(ScriptType.INLINE, "test", code, params)); } @@ -444,7 +440,7 @@ public void execute() { }; } - private static BooleanScriptFieldType build(Script script) { - return new BooleanScriptFieldType("test", factory(script), script, emptyMap()); + private static MappedField build(Script script) { + return new MappedField("test", new BooleanScriptFieldType(factory(script), script, emptyMap())); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index eb987a30f966e..d551bdb0c6a52 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -749,7 +749,7 @@ public void testPrefixQueryType() throws Exception { DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; - Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co")); + Query prefixQuery = completionFieldMapper.fieldType().prefixQuery("field", new BytesRef("co")); assertThat(prefixQuery, instanceOf(PrefixCompletionQuery.class)); } @@ -759,6 +759,7 @@ public void testFuzzyQueryType() throws Exception { CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() .fuzzyQuery( + "field", "co", Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS), FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX, @@ -775,7 +776,7 @@ public void testRegexQueryType() throws Exception { Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() - .regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + .regexpQuery("field", new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldTypeTests.java index 03313fdba29c4..7bbfb4e030d7c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldTypeTests.java @@ -22,14 +22,17 @@ public class CompletionFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()); - MappedFieldType fieldType = new CompletionFieldMapper.CompletionFieldType("name", defaultAnalyzer, Collections.emptyMap()); + MappedField mappedField = new MappedField( + "value", + new CompletionFieldMapper.CompletionFieldType(defaultAnalyzer, Collections.emptyMap()) + ); - assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); + assertEquals(List.of("value"), fetchSourceValue(mappedField, "value")); List list = List.of("first", "second"); - assertEquals(list, fetchSourceValue(fieldType, list)); + assertEquals(list, fetchSourceValue(mappedField, list)); Map object = Map.of("input", List.of("first", "second"), "weight", "2.718"); - assertEquals(List.of(object), fetchSourceValue(fieldType, object)); + assertEquals(List.of(object), fetchSourceValue(mappedField, object)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompositeRuntimeFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompositeRuntimeFieldTests.java index 019023e0043c3..498fd23e8ce6c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompositeRuntimeFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompositeRuntimeFieldTests.java @@ -78,51 +78,51 @@ public void testObjectDefinition() throws IOException { b.endObject(); })); - assertNull(mapperService.mappingLookup().getFieldType("obj")); - assertNull(mapperService.mappingLookup().getFieldType("long-subfield")); - assertNull(mapperService.mappingLookup().getFieldType("str-subfield")); - assertNull(mapperService.mappingLookup().getFieldType("double-subfield")); - assertNull(mapperService.mappingLookup().getFieldType("boolean-subfield")); - assertNull(mapperService.mappingLookup().getFieldType("ip-subfield")); - assertNull(mapperService.mappingLookup().getFieldType("geopoint-subfield")); - assertNull(mapperService.mappingLookup().getFieldType("obj.any-subfield")); - MappedFieldType longSubfield = mapperService.mappingLookup().getFieldType("obj.long-subfield"); + assertNull(mapperService.mappingLookup().getMappedField("obj")); + assertNull(mapperService.mappingLookup().getMappedField("long-subfield")); + assertNull(mapperService.mappingLookup().getMappedField("str-subfield")); + assertNull(mapperService.mappingLookup().getMappedField("double-subfield")); + assertNull(mapperService.mappingLookup().getMappedField("boolean-subfield")); + assertNull(mapperService.mappingLookup().getMappedField("ip-subfield")); + assertNull(mapperService.mappingLookup().getMappedField("geopoint-subfield")); + assertNull(mapperService.mappingLookup().getMappedField("obj.any-subfield")); + MappedField longSubfield = mapperService.mappingLookup().getMappedField("obj.long-subfield"); assertEquals("obj.long-subfield", longSubfield.name()); assertEquals("long", longSubfield.typeName()); - MappedFieldType strSubfield = mapperService.mappingLookup().getFieldType("obj.str-subfield"); + MappedField strSubfield = mapperService.mappingLookup().getMappedField("obj.str-subfield"); assertEquals("obj.str-subfield", strSubfield.name()); assertEquals("keyword", strSubfield.typeName()); - MappedFieldType doubleSubfield = mapperService.mappingLookup().getFieldType("obj.double-subfield"); + MappedField doubleSubfield = mapperService.mappingLookup().getMappedField("obj.double-subfield"); assertEquals("obj.double-subfield", doubleSubfield.name()); assertEquals("double", doubleSubfield.typeName()); - MappedFieldType booleanSubfield = mapperService.mappingLookup().getFieldType("obj.boolean-subfield"); + MappedField booleanSubfield = mapperService.mappingLookup().getMappedField("obj.boolean-subfield"); assertEquals("obj.boolean-subfield", booleanSubfield.name()); assertEquals("boolean", booleanSubfield.typeName()); - MappedFieldType ipSubfield = mapperService.mappingLookup().getFieldType("obj.ip-subfield"); + MappedField ipSubfield = mapperService.mappingLookup().getMappedField("obj.ip-subfield"); assertEquals("obj.ip-subfield", ipSubfield.name()); assertEquals("ip", ipSubfield.typeName()); - MappedFieldType geoPointSubfield = mapperService.mappingLookup().getFieldType("obj.geopoint-subfield"); + MappedField geoPointSubfield = mapperService.mappingLookup().getMappedField("obj.geopoint-subfield"); assertEquals("obj.geopoint-subfield", geoPointSubfield.name()); assertEquals("geo_point", geoPointSubfield.typeName()); RuntimeField rf = mapperService.mappingLookup().getMapping().getRoot().getRuntimeField("obj"); assertEquals("obj", rf.name()); - Collection mappedFieldTypes = rf.asMappedFieldTypes().toList(); - for (MappedFieldType mappedFieldType : mappedFieldTypes) { - if (mappedFieldType.name().equals("obj.long-subfield")) { - assertSame(longSubfield, mappedFieldType); - } else if (mappedFieldType.name().equals("obj.str-subfield")) { - assertSame(strSubfield, mappedFieldType); - } else if (mappedFieldType.name().equals("obj.double-subfield")) { - assertSame(doubleSubfield, mappedFieldType); - } else if (mappedFieldType.name().equals("obj.boolean-subfield")) { - assertSame(booleanSubfield, mappedFieldType); - } else if (mappedFieldType.name().equals("obj.ip-subfield")) { - assertSame(ipSubfield, mappedFieldType); - } else if (mappedFieldType.name().equals("obj.geopoint-subfield")) { - assertSame(geoPointSubfield, mappedFieldType); + Collection mappedFields = rf.asMappedFields().toList(); + for (MappedField mappedField : mappedFields) { + if (mappedField.name().equals("obj.long-subfield")) { + assertSame(longSubfield, mappedField); + } else if (mappedField.name().equals("obj.str-subfield")) { + assertSame(strSubfield, mappedField); + } else if (mappedField.name().equals("obj.double-subfield")) { + assertSame(doubleSubfield, mappedField); + } else if (mappedField.name().equals("obj.boolean-subfield")) { + assertSame(booleanSubfield, mappedField); + } else if (mappedField.name().equals("obj.ip-subfield")) { + assertSame(ipSubfield, mappedField); + } else if (mappedField.name().equals("obj.geopoint-subfield")) { + assertSame(geoPointSubfield, mappedField); } else { - fail("unexpected subfield [" + mappedFieldType.name() + "]"); + fail("unexpected subfield [" + mappedField.name() + "]"); } } } @@ -254,18 +254,18 @@ public void testMappingUpdate() throws IOException { merge(mapperService, b); - assertNull(mapperService.mappingLookup().getFieldType("obj.long-subfield")); - assertNull(mapperService.mappingLookup().getFieldType("obj.str-subfield")); - MappedFieldType doubleSubField = mapperService.mappingLookup().getFieldType("obj.double-subfield"); + assertNull(mapperService.mappingLookup().getMappedField("obj.long-subfield")); + assertNull(mapperService.mappingLookup().getMappedField("obj.str-subfield")); + MappedField doubleSubField = mapperService.mappingLookup().getMappedField("obj.double-subfield"); assertEquals("obj.double-subfield", doubleSubField.name()); assertEquals("double", doubleSubField.typeName()); RuntimeField rf = mapperService.mappingLookup().getMapping().getRoot().getRuntimeField("obj"); assertEquals("obj", rf.name()); - Collection mappedFieldTypes = rf.asMappedFieldTypes().toList(); - assertEquals(1, mappedFieldTypes.size()); - assertSame(doubleSubField, mappedFieldTypes.iterator().next()); + Collection mappedFields = rf.asMappedFields().toList(); + assertEquals(1, mappedFields.size()); + assertSame(doubleSubField, mappedFields.iterator().next()); assertEquals(""" {"obj":{"type":"composite","script":{"source":"dummy2","lang":"painless"},\ @@ -332,7 +332,7 @@ public void testParseDocumentSubFieldAccess() throws IOException { withLuceneIndex(mapperService, iw -> iw.addDocuments(Arrays.asList(doc1.rootDoc(), doc2.rootDoc())), reader -> { SearchLookup searchLookup = new SearchLookup( - mapperService::fieldType, + mapperService::mappedField, (mft, lookupSupplier) -> mft.fielddataBuilder("test", lookupSupplier).build(null, null) ); @@ -368,8 +368,8 @@ public void testParseDocumentDynamicMapping() throws IOException { assertNull(mapperService.mappingLookup().getMapper("obj.long")); assertNull(mapperService.mappingLookup().getMapper("obj.str")); - assertNotNull(mapperService.mappingLookup().getFieldType("obj.long")); - assertNotNull(mapperService.mappingLookup().getFieldType("obj.str")); + assertNotNull(mapperService.mappingLookup().getMappedField("obj.long")); + assertNotNull(mapperService.mappingLookup().getMappedField("obj.str")); XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); @@ -391,8 +391,8 @@ public void testParseDocumentDynamicMapping() throws IOException { assertNotNull(mapperService.mappingLookup().getMapper("obj.long")); assertNull(mapperService.mappingLookup().getMapper("obj.str")); - assertNotNull(mapperService.mappingLookup().getFieldType("obj.long")); - assertNotNull(mapperService.mappingLookup().getFieldType("obj.str")); + assertNotNull(mapperService.mappingLookup().getMappedField("obj.long")); + assertNotNull(mapperService.mappingLookup().getMappedField("obj.str")); } public void testParseDocumentSubfieldsOutsideRuntimeObject() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java index e63e5e816483f..03575eae8fb89 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ConstantScoreTextFieldTypeTests.java @@ -45,16 +45,19 @@ public class ConstantScoreTextFieldTypeTests extends FieldTypeTestCase { private static ConstantScoreTextFieldType createFieldType() { - return new ConstantScoreTextFieldType("field"); + return new ConstantScoreTextFieldType(); } public void testTermQuery() { MappedFieldType ft = createFieldType(); - assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field", "foo"))), ft.termQuery("foo", null)); - assertEquals(AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo")), ft.termQueryCaseInsensitive("fOo", null)); + assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field", "foo"))), ft.termQuery("field", "foo", null)); + assertEquals( + AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo")), + ft.termQueryCaseInsensitive("field", "fOo", null) + ); - MappedFieldType unsearchable = new ConstantScoreTextFieldType("field", false, false, Collections.emptyMap()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("bar", null)); + MappedFieldType unsearchable = new ConstantScoreTextFieldType(false, false, Collections.emptyMap()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", "bar", null)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } @@ -63,12 +66,12 @@ public void testTermsQuery() { List terms = new ArrayList<>(); terms.add(new BytesRef("foo")); terms.add(new BytesRef("bar")); - assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), null)); + assertEquals(new TermInSetQuery("field", terms), ft.termsQuery("field", Arrays.asList("foo", "bar"), null)); - MappedFieldType unsearchable = new ConstantScoreTextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new ConstantScoreTextFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.termsQuery(Arrays.asList("foo", "bar"), null) + () -> unsearchable.termsQuery("field", Arrays.asList("foo", "bar"), null) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } @@ -77,12 +80,12 @@ public void testRangeQuery() { MappedFieldType ft = createFieldType(); assertEquals( new TermRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), - ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT) ); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[range] queries on [text] or [keyword] fields cannot be executed when " + "'search.allow_expensive_queries' is set to false.", @@ -92,18 +95,18 @@ public void testRangeQuery() { public void testRegexpQuery() { MappedFieldType ft = createFieldType(); - assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_CONTEXT)); + assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("field", "foo.*", 0, 0, 10, null, MOCK_CONTEXT)); - MappedFieldType unsearchable = new ConstantScoreTextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new ConstantScoreTextFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.regexpQuery("foo.*", 0, 0, 10, null, MOCK_CONTEXT) + () -> unsearchable.regexpQuery("field", "foo.*", 0, 0, 10, null, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.regexpQuery("foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.regexpQuery("field", "foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } @@ -112,19 +115,20 @@ public void testFuzzyQuery() { MappedFieldType ft = createFieldType(); assertEquals( new ConstantScoreQuery(new FuzzyQuery(new Term("field", "foo"), 2, 1, 50, true)), - ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) + ft.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) ); - MappedFieldType unsearchable = new ConstantScoreTextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new ConstantScoreTextFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) + () -> unsearchable.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); ElasticsearchException ee = expectThrows( ElasticsearchException.class, () -> ft.fuzzyQuery( + "field", "foo", Fuzziness.AUTO, randomInt(10) + 1, @@ -140,18 +144,18 @@ public void testIndexPrefixes() { ConstantScoreTextFieldType ft = createFieldType(); ft.setIndexPrefixes(2, 10); - Query q = ft.prefixQuery("goin", CONSTANT_SCORE_REWRITE, false, randomMockContext()); + Query q = ft.prefixQuery("field", "goin", CONSTANT_SCORE_REWRITE, false, randomMockContext()); assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q); - q = ft.prefixQuery("internationalisatio", CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT); + q = ft.prefixQuery("field", "internationalisatio", CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT); assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q); - q = ft.prefixQuery("Internationalisatio", CONSTANT_SCORE_REWRITE, true, MOCK_CONTEXT); + q = ft.prefixQuery("field", "Internationalisatio", CONSTANT_SCORE_REWRITE, true, MOCK_CONTEXT); assertEquals(AutomatonQueries.caseInsensitivePrefixQuery(new Term("field", "Internationalisatio")), q); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.prefixQuery("internationalisatio", null, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.prefixQuery("field", "internationalisatio", null, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " @@ -159,7 +163,7 @@ public void testIndexPrefixes() { ee.getMessage() ); - q = ft.prefixQuery("g", CONSTANT_SCORE_REWRITE, false, randomMockContext()); + q = ft.prefixQuery("field", "g", CONSTANT_SCORE_REWRITE, false, randomMockContext()); Automaton automaton = Operations.concatenate(Arrays.asList(Automata.makeChar('g'), Automata.makeAnyChar())); Query expected = new ConstantScoreQuery( @@ -172,24 +176,24 @@ public void testIndexPrefixes() { } public void testFetchSourceValue() throws IOException { - ConstantScoreTextFieldType fieldType = createFieldType(); + MappedField mappedField = new MappedField("field", createFieldType()); - assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); - assertEquals(List.of("42"), fetchSourceValue(fieldType, 42L)); - assertEquals(List.of("true"), fetchSourceValue(fieldType, true)); + assertEquals(List.of("value"), fetchSourceValue(mappedField, "value")); + assertEquals(List.of("42"), fetchSourceValue(mappedField, 42L)); + assertEquals(List.of("true"), fetchSourceValue(mappedField, true)); } public void testWildcardQuery() { ConstantScoreTextFieldType ft = createFieldType(); // case sensitive - AutomatonQuery actual = (AutomatonQuery) ft.wildcardQuery("*Butterflies*", null, false, MOCK_CONTEXT); + AutomatonQuery actual = (AutomatonQuery) ft.wildcardQuery("field", "*Butterflies*", null, false, MOCK_CONTEXT); AutomatonQuery expected = new WildcardQuery(new Term("field", new BytesRef("*Butterflies*"))); assertEquals(expected, actual); assertFalse(new CharacterRunAutomaton(actual.getAutomaton()).run("some butterflies somewhere")); // case insensitive - actual = (AutomatonQuery) ft.wildcardQuery("*Butterflies*", null, true, MOCK_CONTEXT); + actual = (AutomatonQuery) ft.wildcardQuery("field", "*Butterflies*", null, true, MOCK_CONTEXT); expected = AutomatonQueries.caseInsensitiveWildcardQuery(new Term("field", new BytesRef("*Butterflies*"))); assertEquals(expected, actual); assertTrue(new CharacterRunAutomaton(actual.getAutomaton()).run("some butterflies somewhere")); @@ -197,7 +201,7 @@ public void testWildcardQuery() { ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.wildcardQuery("field", "valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } @@ -208,7 +212,7 @@ public void testWildcardQuery() { public void testNormalizedWildcardQuery() { ConstantScoreTextFieldType ft = createFieldType(); - AutomatonQuery actual = (AutomatonQuery) ft.normalizedWildcardQuery("*Butterflies*", null, MOCK_CONTEXT); + AutomatonQuery actual = (AutomatonQuery) ft.normalizedWildcardQuery("field", "*Butterflies*", null, MOCK_CONTEXT); AutomatonQuery expected = new WildcardQuery(new Term("field", new BytesRef("*butterflies*"))); assertEquals(expected, actual); assertTrue(new CharacterRunAutomaton(actual.getAutomaton()).run("some butterflies somewhere")); @@ -216,32 +220,32 @@ public void testNormalizedWildcardQuery() { ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.wildcardQuery("field", "valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testTermIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource termIntervals = ft.termIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource termIntervals = ft.termIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.term(new BytesRef("foo")), termIntervals); } public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource prefixIntervals = ft.prefixIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); } public void testWildcardIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource wildcardIntervals = ft.wildcardIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); } public void testFuzzyIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); + IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("field", "foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); @@ -250,14 +254,14 @@ public void testFuzzyIntervals() throws IOException { public void testPrefixIntervalsWithIndexedPrefixes() { ConstantScoreTextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); - IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource prefixIntervals = ft.prefixIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.fixField("field._index_prefix", Intervals.term(new BytesRef("foo"))), prefixIntervals); } public void testWildcardIntervalsWithIndexedPrefixes() { ConstantScoreTextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); - IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource wildcardIntervals = ft.wildcardIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 186c43616432c..910926a7da3c9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -220,7 +220,7 @@ public void testNanosNullValue() throws IOException { fieldMapping(b -> b.field("type", "date_nanos").field("null_value", "2016-03-11")) ); - DateFieldMapper.DateFieldType ft = (DateFieldMapper.DateFieldType) mapperService.fieldType("field"); + DateFieldMapper.DateFieldType ft = (DateFieldMapper.DateFieldType) mapperService.mappedField("field").type(); long expectedNullValue = ft.parse("2016-03-11"); doc = mapperService.documentMapper().parse(source(b -> b.nullField("field"))); @@ -286,8 +286,8 @@ public void testTimeZoneParsing() throws Exception { public void testMergeDate() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "date").field("format", "yyyy/MM/dd"))); - assertThat(mapperService.fieldType("field"), notNullValue()); - assertFalse(mapperService.fieldType("field").isStored()); + assertThat(mapperService.mappedField("field"), notNullValue()); + assertFalse(mapperService.mappedField("field").isStored()); Exception e = expectThrows( IllegalArgumentException.class, @@ -319,31 +319,32 @@ public void testFetchDocValuesMillis() throws IOException { MapperService mapperService = createMapperService( fieldMapping(b -> b.field("type", "date").field("format", "strict_date_time||epoch_millis")) ); - MappedFieldType ft = mapperService.fieldType("field"); - DocValueFormat format = ft.docValueFormat(null, null); + MappedField mappedField = mapperService.mappedField("field"); + DocValueFormat format = mappedField.docValueFormat(null, null); String date = "2020-05-15T21:33:02.123Z"; - assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, date)); - assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, 1589578382123L)); + assertEquals(List.of(date), fetchFromDocValues(mapperService, mappedField, format, date)); + assertEquals(List.of(date), fetchFromDocValues(mapperService, mappedField, format, 1589578382123L)); } public void testFormatPreserveNanos() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "date_nanos"))); - DateFieldMapper.DateFieldType ft = (DateFieldMapper.DateFieldType) mapperService.fieldType("field"); + MappedField mappedField = mapperService.mappedField("field"); + DateFieldMapper.DateFieldType ft = (DateFieldMapper.DateFieldType) mappedField.type(); assertEquals(ft.dateTimeFormatter, DateFieldMapper.DEFAULT_DATE_TIME_NANOS_FORMATTER); - DocValueFormat format = ft.docValueFormat(null, null); + DocValueFormat format = mappedField.docValueFormat(null, null); String date = "2020-05-15T21:33:02.123456789Z"; - assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, date)); + assertEquals(List.of(date), fetchFromDocValues(mapperService, mappedField, format, date)); } public void testFetchDocValuesNanos() throws IOException { MapperService mapperService = createMapperService( fieldMapping(b -> b.field("type", "date_nanos").field("format", "strict_date_time||epoch_millis")) ); - MappedFieldType ft = mapperService.fieldType("field"); - DocValueFormat format = ft.docValueFormat(null, null); + MappedField mappedField = mapperService.mappedField("field"); + DocValueFormat format = mappedField.docValueFormat(null, null); String date = "2020-05-15T21:33:02.123456789Z"; - assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, date)); - assertEquals(List.of("2020-05-15T21:33:02.123Z"), fetchFromDocValues(mapperService, ft, format, 1589578382123L)); + assertEquals(List.of(date), fetchFromDocValues(mapperService, mappedField, format, date)); + assertEquals(List.of("2020-05-15T21:33:02.123Z"), fetchFromDocValues(mapperService, mappedField, format, 1589578382123L)); } public void testResolutionRounding() { @@ -711,8 +712,8 @@ public void testLegacyField() throws Exception { b.field("format", "unknown-format"); b.endObject(); })); - assertThat(service.fieldType("mydate"), instanceOf(DateFieldType.class)); - assertEquals(DEFAULT_DATE_TIME_FORMATTER, ((DateFieldType) service.fieldType("mydate")).dateTimeFormatter); + assertThat(service.mappedField("mydate").type(), instanceOf(DateFieldType.class)); + assertEquals(DEFAULT_DATE_TIME_FORMATTER, ((DateFieldType) service.mappedField("mydate").type()).dateTimeFormatter); // check that date format can be updated merge(service, mapping(b -> { @@ -721,7 +722,7 @@ public void testLegacyField() throws Exception { b.field("format", "YYYY/MM/dd"); b.endObject(); })); - assertThat(service.fieldType("mydate"), instanceOf(DateFieldType.class)); - assertNotEquals(DEFAULT_DATE_TIME_FORMATTER, ((DateFieldType) service.fieldType("mydate")).dateTimeFormatter); + assertThat(service.mappedField("mydate").type(), instanceOf(DateFieldType.class)); + assertNotEquals(DEFAULT_DATE_TIME_FORMATTER, ((DateFieldType) service.mappedField("mydate").type()).dateTimeFormatter); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 28606d9e99840..64f0b79442e88 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -56,31 +56,31 @@ public class DateFieldTypeTests extends FieldTypeTestCase { public void testIsFieldWithinRangeEmptyReader() throws IOException { QueryRewriteContext context = new QueryRewriteContext(parserConfig(), writableRegistry(), null, () -> nowInMillis); IndexReader reader = new MultiReader(); - DateFieldType ft = new DateFieldType("my_date"); + DateFieldType ft = new DateFieldType(); assertEquals( Relation.DISJOINT, - ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", randomBoolean(), randomBoolean(), null, null, context) + ft.isFieldWithinQuery("my_date", reader, "2015-10-12", "2016-04-03", randomBoolean(), randomBoolean(), null, null, context) ); } public void testIsFieldWithinRangeOnlyDocValues() throws IOException { QueryRewriteContext context = new QueryRewriteContext(parserConfig(), writableRegistry(), null, () -> nowInMillis); IndexReader reader = new MultiReader(); - DateFieldType ft = new DateFieldType("my_date", false); + DateFieldType ft = new DateFieldType(false); // in case of only doc-values, we can't establish disjointness assertEquals( Relation.INTERSECTS, - ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", randomBoolean(), randomBoolean(), null, null, context) + ft.isFieldWithinQuery("my_date", reader, "2015-10-12", "2016-04-03", randomBoolean(), randomBoolean(), null, null, context) ); } public void testIsFieldWithinQueryDateMillis() throws IOException { - DateFieldType ft = new DateFieldType("my_date"); + DateFieldType ft = new DateFieldType(); isFieldWithinRangeTestCase(ft); } public void testIsFieldWithinQueryDateNanos() throws IOException { - DateFieldType ft = new DateFieldType("my_date", Resolution.NANOSECONDS); + DateFieldType ft = new DateFieldType(Resolution.NANOSECONDS); isFieldWithinRangeTestCase(ft); } @@ -105,9 +105,12 @@ public void isFieldWithinRangeTestCase(DateFieldType ft) throws IOException { QueryRewriteContext context = new QueryRewriteContext(parserConfig(), writableRegistry(), null, () -> nowInMillis); // Fields with no value indexed. - DateFieldType ft2 = new DateFieldType("my_date2"); + DateFieldType ft2 = new DateFieldType(); - assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context)); + assertEquals( + Relation.DISJOINT, + ft2.isFieldWithinQuery("my_date2", reader, "2015-10-09", "2016-01-02", false, false, null, null, context) + ); IOUtils.close(reader, w, dir); } @@ -117,51 +120,63 @@ private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, QueryRewriteContext context = new QueryRewriteContext(parserConfig(), writableRegistry(), null, () -> nowInMillis); assertEquals( Relation.INTERSECTS, - ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", randomBoolean(), randomBoolean(), zone, null, context) + ft.isFieldWithinQuery("my_date", reader, "2015-10-09", "2016-01-02", randomBoolean(), randomBoolean(), zone, null, context) ); assertEquals( Relation.INTERSECTS, - ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20", randomBoolean(), randomBoolean(), zone, null, context) + ft.isFieldWithinQuery("my_date", reader, "2016-01-02", "2016-06-20", randomBoolean(), randomBoolean(), zone, null, context) ); assertEquals( Relation.INTERSECTS, - ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12", randomBoolean(), randomBoolean(), zone, null, context) + ft.isFieldWithinQuery("my_date", reader, "2016-01-02", "2016-02-12", randomBoolean(), randomBoolean(), zone, null, context) ); assertEquals( Relation.DISJOINT, - ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12", randomBoolean(), randomBoolean(), zone, null, context) + ft.isFieldWithinQuery("my_date", reader, "2014-01-02", "2015-02-12", randomBoolean(), randomBoolean(), zone, null, context) ); assertEquals( Relation.DISJOINT, - ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30", randomBoolean(), randomBoolean(), zone, null, context) + ft.isFieldWithinQuery("my_date", reader, "2016-05-11", "2016-08-30", randomBoolean(), randomBoolean(), zone, null, context) + ); + assertEquals( + Relation.WITHIN, + ft.isFieldWithinQuery("my_date", reader, "2015-09-25", "2016-05-29", randomBoolean(), randomBoolean(), zone, null, context) ); assertEquals( Relation.WITHIN, - ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29", randomBoolean(), randomBoolean(), zone, null, context) + ft.isFieldWithinQuery("my_date", reader, "2015-10-12", "2016-04-03", true, true, zone, null, context) + ); + assertEquals( + Relation.INTERSECTS, + ft.isFieldWithinQuery("my_date", reader, "2015-10-12", "2016-04-03", false, false, zone, null, context) + ); + assertEquals( + Relation.INTERSECTS, + ft.isFieldWithinQuery("my_date", reader, "2015-10-12", "2016-04-03", false, true, zone, null, context) + ); + assertEquals( + Relation.INTERSECTS, + ft.isFieldWithinQuery("my_date", reader, "2015-10-12", "2016-04-03", true, false, zone, null, context) ); - assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", true, true, zone, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", false, false, zone, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", false, true, zone, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", true, false, zone, null, context)); } public void testValueFormat() { - MappedFieldType ft = new DateFieldType("field"); + MappedFieldType ft = new DateFieldType(); long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12T14:10:55")) .toInstant() .toEpochMilli(); - assertEquals("2015-10-12T14:10:55.000Z", ft.docValueFormat(null, ZoneOffset.UTC).format(instant)); - assertEquals("2015-10-12T15:10:55.000+01:00", ft.docValueFormat(null, ZoneOffset.ofHours(1)).format(instant)); - assertEquals("2015", new DateFieldType("field").docValueFormat("YYYY", ZoneOffset.UTC).format(instant)); - assertEquals(instant, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", false, null)); - assertEquals(instant + 999, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", true, null)); + assertEquals("2015-10-12T14:10:55.000Z", ft.docValueFormat("my_date", null, ZoneOffset.UTC).format(instant)); + assertEquals("2015-10-12T15:10:55.000+01:00", ft.docValueFormat("my_date", null, ZoneOffset.ofHours(1)).format(instant)); + assertEquals("2015", new DateFieldType().docValueFormat("my_date", "YYYY", ZoneOffset.UTC).format(instant)); + assertEquals(instant, ft.docValueFormat("my_date", null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", false, null)); + assertEquals(instant + 999, ft.docValueFormat("my_date", null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", true, null)); long i = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-13")).toInstant().toEpochMilli(); - assertEquals(i - 1, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12||/d", true, null)); + assertEquals(i - 1, ft.docValueFormat("my_date", null, ZoneOffset.UTC).parseLong("2015-10-12||/d", true, null)); } public void testValueForSearch() { - MappedFieldType ft = new DateFieldType("field"); + MappedFieldType ft = new DateFieldType(); String date = "2015-10-12T12:09:55.000Z"; long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); assertEquals(date, ft.valueForDisplay(instant)); @@ -194,21 +209,20 @@ public void testTermQuery() { null, emptyMap() ); - MappedFieldType ft = new DateFieldType("field"); + MappedFieldType ft = new DateFieldType(); String date = "2015-10-12T14:10:55"; long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli(); Query expected = new IndexOrDocValuesQuery( LongPoint.newRangeQuery("field", instant, instant + 999), SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant + 999) ); - assertEquals(expected, ft.termQuery(date, context)); + assertEquals(expected, ft.termQuery("field", date, context)); - ft = new DateFieldType("field", false); + ft = new DateFieldType(false); expected = SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant + 999); - assertEquals(expected, ft.termQuery(date, context)); + assertEquals(expected, ft.termQuery("field", date, context)); MappedFieldType unsearchable = new DateFieldType( - "field", false, false, false, @@ -218,7 +232,7 @@ public void testTermQuery() { null, Collections.emptyMap() ); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery(date, context)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", date, context)); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } @@ -249,7 +263,7 @@ public void testRangeQuery() throws IOException { null, emptyMap() ); - MappedFieldType ft = new DateFieldType("field"); + MappedFieldType ft = new DateFieldType(); String date1 = "2015-10-12T14:10:55"; String date2 = "2016-04-28T11:33:52"; long instant1 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli(); @@ -258,11 +272,11 @@ public void testRangeQuery() throws IOException { LongPoint.newRangeQuery("field", instant1, instant2), SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2) ); - assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context).rewrite(new MultiReader())); + assertEquals(expected, ft.rangeQuery("field", date1, date2, true, true, null, null, null, context).rewrite(new MultiReader())); - MappedFieldType ft2 = new DateFieldType("field", false); + MappedFieldType ft2 = new DateFieldType(false); Query expected2 = SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2); - assertEquals(expected2, ft2.rangeQuery(date1, date2, true, true, null, null, null, context).rewrite(new MultiReader())); + assertEquals(expected2, ft2.rangeQuery("field", date1, date2, true, true, null, null, null, context).rewrite(new MultiReader())); instant1 = nowInMillis; instant2 = instant1 + 100; @@ -272,13 +286,12 @@ public void testRangeQuery() throws IOException { SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2) ) ); - assertEquals(expected, ft.rangeQuery("now", instant2, true, true, null, null, null, context)); + assertEquals(expected, ft.rangeQuery("field", "now", instant2, true, true, null, null, null, context)); expected2 = new DateRangeIncludingNowQuery(SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2)); - assertEquals(expected2, ft2.rangeQuery("now", instant2, true, true, null, null, null, context)); + assertEquals(expected2, ft2.rangeQuery("field", "now", instant2, true, true, null, null, null, context)); MappedFieldType unsearchable = new DateFieldType( - "field", false, false, false, @@ -290,7 +303,7 @@ public void testRangeQuery() throws IOException { ); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.rangeQuery(date1, date2, true, true, null, null, null, context) + () -> unsearchable.rangeQuery("field", date1, date2, true, true, null, null, null, context) ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } @@ -328,7 +341,7 @@ public void testRangeQueryWithIndexSort() { emptyMap() ); - MappedFieldType ft = new DateFieldType("field"); + MappedFieldType ft = new DateFieldType(); String date1 = "2015-10-12T14:10:55"; String date2 = "2016-04-28T11:33:52"; long instant1 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli(); @@ -342,11 +355,11 @@ public void testRangeQueryWithIndexSort() { instant2, new IndexOrDocValuesQuery(pointQuery, dvQuery) ); - assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context)); + assertEquals(expected, ft.rangeQuery("field", date1, date2, true, true, null, null, null, context)); - ft = new DateFieldType("field", false); + ft = new DateFieldType(false); expected = new IndexSortSortedNumericDocValuesRangeQuery("field", instant1, instant2, dvQuery); - assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context)); + assertEquals(expected, ft.rangeQuery("field", date1, date2, true, true, null, null, null, context)); } public void testDateNanoDocValues() throws IOException { @@ -382,34 +395,37 @@ private Instant instant(String str) { return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(str)).toInstant(); } - private static DateFieldType fieldType(Resolution resolution, String format, String nullValue) { + private static MappedField field(Resolution resolution, String format, String nullValue) { DateFormatter formatter = DateFormatter.forPattern(format); - return new DateFieldType("field", true, false, true, formatter, resolution, nullValue, null, Collections.emptyMap()); + return new MappedField( + "field", + new DateFieldType(true, false, true, formatter, resolution, nullValue, null, Collections.emptyMap()) + ); } public void testFetchSourceValue() throws IOException { - MappedFieldType fieldType = new DateFieldType("field", Resolution.MILLISECONDS); + MappedField mappedField = new MappedField("field", new DateFieldType(Resolution.MILLISECONDS)); String date = "2020-05-15T21:33:02.000Z"; - assertEquals(List.of(date), fetchSourceValue(fieldType, date)); - assertEquals(List.of(date), fetchSourceValue(fieldType, 1589578382000L)); + assertEquals(List.of(date), fetchSourceValue(mappedField, date)); + assertEquals(List.of(date), fetchSourceValue(mappedField, 1589578382000L)); - MappedFieldType fieldWithFormat = fieldType(Resolution.MILLISECONDS, "yyyy/MM/dd||epoch_millis", null); + MappedField fieldWithFormat = field(Resolution.MILLISECONDS, "yyyy/MM/dd||epoch_millis", null); String dateInFormat = "1990/12/29"; assertEquals(List.of(dateInFormat), fetchSourceValue(fieldWithFormat, dateInFormat)); assertEquals(List.of(dateInFormat), fetchSourceValue(fieldWithFormat, 662428800000L)); - MappedFieldType millis = fieldType(Resolution.MILLISECONDS, "epoch_millis", null); + MappedField millis = field(Resolution.MILLISECONDS, "epoch_millis", null); String dateInMillis = "662428800000"; assertEquals(List.of(dateInMillis), fetchSourceValue(millis, dateInMillis)); assertEquals(List.of(dateInMillis), fetchSourceValue(millis, 662428800000L)); String nullValueDate = "2020-05-15T21:33:02.000Z"; - MappedFieldType nullFieldType = fieldType(Resolution.MILLISECONDS, "strict_date_time", nullValueDate); + MappedField nullFieldType = field(Resolution.MILLISECONDS, "strict_date_time", nullValueDate); assertEquals(List.of(nullValueDate), fetchSourceValue(nullFieldType, null)); } public void testParseSourceValueWithFormat() throws IOException { - MappedFieldType mapper = fieldType(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z"); + MappedField mapper = field(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z"); String date = "1990-12-29T00:00:00.000Z"; assertEquals(List.of("1990/12/29"), fetchSourceValue(mapper, date, "yyyy/MM/dd")); assertEquals(List.of("662428800000"), fetchSourceValue(mapper, date, "epoch_millis")); @@ -417,13 +433,13 @@ public void testParseSourceValueWithFormat() throws IOException { } public void testParseSourceValueNanos() throws IOException { - MappedFieldType mapper = fieldType(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", null); + MappedField mapper = field(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", null); String date = "2020-05-15T21:33:02.123456789Z"; assertEquals(List.of("2020-05-15T21:33:02.123456789Z"), fetchSourceValue(mapper, date)); assertEquals(List.of("2020-05-15T21:33:02.123Z"), fetchSourceValue(mapper, 1589578382123L)); String nullValueDate = "2020-05-15T21:33:02.123456789Z"; - MappedFieldType nullValueMapper = fieldType(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", nullValueDate); + MappedField nullValueMapper = field(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", nullValueDate); assertEquals(List.of(nullValueDate), fetchSourceValue(nullValueMapper, null)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java index d3b121d5550c5..a14c79b6b083c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java @@ -68,9 +68,9 @@ public void testFromSource() throws IOException { MapperService mapperService = createMapperService(runtimeFieldMapping(b -> b.field("type", "date"))); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", 1545))); withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> { - MappedFieldType ft = mapperService.fieldType("field"); + MappedField mappedField = mapperService.mappedField("field"); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - Query rangeQuery = ft.rangeQuery("1200-01-01", "2020-01-01", false, false, ShapeRelation.CONTAINS, null, null, sec); + Query rangeQuery = mappedField.rangeQuery("1200-01-01", "2020-01-01", false, false, ShapeRelation.CONTAINS, null, null, sec); IndexSearcher searcher = new IndexSearcher(ir); assertEquals(1, searcher.count(rangeQuery)); }); @@ -82,8 +82,8 @@ public void testDateWithFormat() throws IOException { b.field("format", "yyyy-MM-dd"); }); MapperService mapperService = createMapperService(mapping.get()); - MappedFieldType fieldType = mapperService.fieldType("field"); - assertThat(fieldType, instanceOf(DateScriptFieldType.class)); + MappedField mappedField = mapperService.mappedField("field"); + assertThat(mappedField.type(), instanceOf(DateScriptFieldType.class)); assertEquals(Strings.toString(mapping.get()), Strings.toString(mapperService.documentMapper().mapping())); } @@ -93,8 +93,8 @@ public void testDateWithLocale() throws IOException { b.field("locale", "en_GB"); }); MapperService mapperService = createMapperService(mapping.get()); - MappedFieldType fieldType = mapperService.fieldType("field"); - assertThat(fieldType, instanceOf(DateScriptFieldType.class)); + MappedField mappedField = mapperService.mappedField("field"); + assertThat(mappedField.type(), instanceOf(DateScriptFieldType.class)); assertEquals(Strings.toString(mapping.get()), Strings.toString(mapperService.documentMapper().mapping())); } @@ -104,23 +104,23 @@ public void testDateWithLocaleAndFormat() throws IOException { b.field("format", "yyyy-MM-dd").field("locale", "en_GB"); }); MapperService mapperService = createMapperService(mapping.get()); - MappedFieldType fieldType = mapperService.fieldType("field"); - assertThat(fieldType, instanceOf(DateScriptFieldType.class)); + MappedField mappedField = mapperService.mappedField("field"); + assertThat(mappedField.type(), instanceOf(DateScriptFieldType.class)); assertEquals(Strings.toString(mapping.get()), Strings.toString(mapperService.documentMapper().mapping())); } public void testFormat() throws IOException { - assertThat(simpleMappedFieldType().docValueFormat("date", null).format(1595432181354L), equalTo("2020-07-22")); + assertThat(simpleMappedField().docValueFormat("date", null).format(1595432181354L), equalTo("2020-07-22")); assertThat( - simpleMappedFieldType().docValueFormat("strict_date_optional_time", null).format(1595432181354L), + simpleMappedField().docValueFormat("strict_date_optional_time", null).format(1595432181354L), equalTo("2020-07-22T15:36:21.354Z") ); assertThat( - simpleMappedFieldType().docValueFormat("strict_date_optional_time", ZoneId.of("America/New_York")).format(1595432181354L), + simpleMappedField().docValueFormat("strict_date_optional_time", ZoneId.of("America/New_York")).format(1595432181354L), equalTo("2020-07-22T11:36:21.354-04:00") ); assertThat( - simpleMappedFieldType().docValueFormat(null, ZoneId.of("America/New_York")).format(1595432181354L), + simpleMappedField().docValueFormat(null, ZoneId.of("America/New_York")).format(1595432181354L), equalTo("2020-07-22T11:36:21.354-04:00") ); assertThat(coolFormattedFieldType().docValueFormat(null, null).format(1595432181354L), equalTo("2020-07-22(-■_■)15:36:21.354Z")); @@ -128,8 +128,8 @@ public void testFormat() throws IOException { public void testFormatDuel() throws IOException { DateFormatter formatter = DateFormatter.forPattern(randomDateFormatterPattern()).withLocale(randomLocale(random())); - DateScriptFieldType scripted = build(new Script(ScriptType.INLINE, "test", "read_timestamp", Map.of()), formatter); - DateFieldMapper.DateFieldType indexed = new DateFieldMapper.DateFieldType("test", formatter); + MappedField scripted = build(new Script(ScriptType.INLINE, "test", "read_timestamp", Map.of()), formatter); + MappedField indexed = new MappedField("test", new DateFieldMapper.DateFieldType(formatter)); for (int i = 0; i < 100; i++) { long date = randomDate(); assertThat(indexed.docValueFormat(null, null).format(date), equalTo(scripted.docValueFormat(null, null).format(date))); @@ -149,8 +149,9 @@ public void testDocValues() throws IOException { List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - DateScriptFieldType ft = build("add_days", Map.of("days", 1)); - DateScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); + MappedField mappedField = build("add_days", Map.of("days", 1)); + DateScriptFieldData ifd = (DateScriptFieldData) mappedField.fielddataBuilder("test", mockContext()::lookup) + .build(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { @@ -188,7 +189,8 @@ public void testSort() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - DateScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); + DateScriptFieldData ifd = (DateScriptFieldData) simpleMappedField().fielddataBuilder("test", mockContext()::lookup) + .build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181351]}")); @@ -209,7 +211,7 @@ public void testUsedInScript() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); + SearchExecutionContext searchContext = mockContext(true, simpleMappedField()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { @@ -243,7 +245,7 @@ public void testDistanceFeatureQuery() throws IOException { ); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - Query query = simpleMappedFieldType().distanceFeatureQuery(1595432181354L, "1ms", mockContext()); + Query query = simpleMappedField().distanceFeatureQuery(1595432181354L, "1ms", mockContext()); TopDocs docs = searcher.search(query, 4); assertThat(docs.scoreDocs, arrayWithSize(3)); assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181354]}")); @@ -271,8 +273,8 @@ public void testDistanceFeatureQueryInLoop() throws IOException { checkLoop(this::randomDistanceFeatureQuery); } - private Query randomDistanceFeatureQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.distanceFeatureQuery(randomDate(), randomTimeValue(), ctx); + private Query randomDistanceFeatureQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.distanceFeatureQuery(randomDate(), randomTimeValue(), ctx); } @Override @@ -282,7 +284,7 @@ public void testExistsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().existsQuery(mockContext())), equalTo(1)); } } } @@ -295,38 +297,38 @@ public void testRangeQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - MappedFieldType ft = simpleMappedFieldType(); + MappedField f = simpleMappedField(); assertThat( searcher.count( - ft.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", true, true, null, null, null, mockContext()) + f.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", true, true, null, null, null, mockContext()) ), equalTo(1) ); assertThat( searcher.count( - ft.rangeQuery("2020-07-22T00:00:00.00Z", "2020-07-22T15:36:21.354Z", true, true, null, null, null, mockContext()) + f.rangeQuery("2020-07-22T00:00:00.00Z", "2020-07-22T15:36:21.354Z", true, true, null, null, null, mockContext()) ), equalTo(2) ); assertThat( - searcher.count(ft.rangeQuery(1595432181351L, 1595432181356L, true, true, null, null, null, mockContext())), + searcher.count(f.rangeQuery(1595432181351L, 1595432181356L, true, true, null, null, null, mockContext())), equalTo(3) ); assertThat( searcher.count( - ft.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", true, false, null, null, null, mockContext()) + f.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", true, false, null, null, null, mockContext()) ), equalTo(1) ); assertThat( searcher.count( - ft.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", false, false, null, null, null, mockContext()) + f.rangeQuery("2020-07-22T15:36:21.356Z", "2020-07-23T00:00:00.000Z", false, false, null, null, null, mockContext()) ), equalTo(0) ); checkBadDate( () -> searcher.count( - ft.rangeQuery( + f.rangeQuery( "2020-07-22(-■_■)00:00:00.000Z", "2020-07-23(-■_■)00:00:00.000Z", false, @@ -358,7 +360,7 @@ public void testRangeQuery() throws IOException { } @Override - protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { + protected Query randomRangeQuery(MappedField mappedField, SearchExecutionContext ctx) { long d1 = randomDate(); long d2 = randomValueOtherThan(d1, DateScriptFieldTypeTests::randomDate); if (d1 > d2) { @@ -366,7 +368,7 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) d2 = d1; d1 = backup; } - return ft.rangeQuery(d1, d2, randomBoolean(), randomBoolean(), null, null, null, ctx); + return mappedField.rangeQuery(d1, d2, randomBoolean(), randomBoolean(), null, null, null, ctx); } @Override @@ -376,23 +378,23 @@ public void testTermQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termQuery("2020-07-22T15:36:21.354Z", mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery("1595432181355", mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(1595432181354L, mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(2595432181354L, mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termQuery("2020-07-22T15:36:21.354Z", mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery("1595432181355", mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(1595432181354L, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(2595432181354L, mockContext())), equalTo(0)); assertThat( searcher.count(build("add_days", Map.of("days", 1)).termQuery("2020-07-23T15:36:21.354Z", mockContext())), equalTo(1) ); - checkBadDate(() -> searcher.count(simpleMappedFieldType().termQuery("2020-07-22(-■_■)15:36:21.354Z", mockContext()))); + checkBadDate(() -> searcher.count(simpleMappedField().termQuery("2020-07-22(-■_■)15:36:21.354Z", mockContext()))); assertThat(searcher.count(coolFormattedFieldType().termQuery("2020-07-22(-■_■)15:36:21.354Z", mockContext())), equalTo(1)); } } } @Override - protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termQuery(randomDate(), ctx); + protected Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termQuery(randomDate(), ctx); } @Override @@ -401,21 +403,18 @@ public void testTermsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); try (DirectoryReader reader = iw.getReader()) { - MappedFieldType ft = simpleMappedFieldType(); + MappedField mappedField = simpleMappedField(); IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(ft.termsQuery(List.of("2020-07-22T15:36:21.354Z"), mockContext())), equalTo(1)); - assertThat(searcher.count(ft.termsQuery(List.of("1595432181354"), mockContext())), equalTo(1)); - assertThat(searcher.count(ft.termsQuery(List.of(1595432181354L), mockContext())), equalTo(1)); - assertThat(searcher.count(ft.termsQuery(List.of(2595432181354L), mockContext())), equalTo(0)); - assertThat(searcher.count(ft.termsQuery(List.of(1595432181354L, 2595432181354L), mockContext())), equalTo(1)); - assertThat(searcher.count(ft.termsQuery(List.of(2595432181354L, 1595432181354L), mockContext())), equalTo(1)); - assertThat(searcher.count(ft.termsQuery(List.of(1595432181355L, 1595432181354L), mockContext())), equalTo(2)); + assertThat(searcher.count(mappedField.termsQuery(List.of("2020-07-22T15:36:21.354Z"), mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.termsQuery(List.of("1595432181354"), mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.termsQuery(List.of(1595432181354L), mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.termsQuery(List.of(2595432181354L), mockContext())), equalTo(0)); + assertThat(searcher.count(mappedField.termsQuery(List.of(1595432181354L, 2595432181354L), mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.termsQuery(List.of(2595432181354L, 1595432181354L), mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.termsQuery(List.of(1595432181355L, 1595432181354L), mockContext())), equalTo(2)); checkBadDate( () -> searcher.count( - simpleMappedFieldType().termsQuery( - List.of("2020-07-22T15:36:21.354Z", "2020-07-22(-■_■)15:36:21.354Z"), - mockContext() - ) + simpleMappedField().termsQuery(List.of("2020-07-22T15:36:21.354Z", "2020-07-22(-■_■)15:36:21.354Z"), mockContext()) ) ); assertThat( @@ -432,22 +431,23 @@ public void testTermsQuery() throws IOException { } @Override - protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termsQuery(randomList(1, 100, DateScriptFieldTypeTests::randomDate), ctx); + protected Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termsQuery(randomList(1, 100, DateScriptFieldTypeTests::randomDate), ctx); } @Override - protected DateScriptFieldType simpleMappedFieldType() { + protected MappedField simpleMappedField() { return build("read_timestamp"); } @Override - protected MappedFieldType loopFieldType() { + protected MappedField loopField() { return build("loop"); } - private DateScriptFieldType coolFormattedFieldType() { - return build(simpleMappedFieldType().script, DateFormatter.forPattern("yyyy-MM-dd(-■_■)HH:mm:ss.SSSz||epoch_millis")); + private MappedField coolFormattedFieldType() { + MappedField simple = simpleMappedField(); + return build(((DateScriptFieldType) simple.type()).script, DateFormatter.forPattern("yyyy-MM-dd(-■_■)HH:mm:ss.SSSz||epoch_millis")); } @Override @@ -455,11 +455,11 @@ protected String typeName() { return "date"; } - private static DateScriptFieldType build(String code) { + private static MappedField build(String code) { return build(code, Map.of()); } - private static DateScriptFieldType build(String code, Map params) { + private static MappedField build(String code, Map params) { return build(new Script(ScriptType.INLINE, "test", code, params), DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); } @@ -506,8 +506,8 @@ public void execute() { }; } - private static DateScriptFieldType build(Script script, DateFormatter dateTimeFormatter) { - return new DateScriptFieldType("test", factory(script), dateTimeFormatter, script, emptyMap()); + private static MappedField build(Script script, DateFormatter dateTimeFormatter) { + return new MappedField("test", new DateScriptFieldType(factory(script), dateTimeFormatter, script, emptyMap())); } private static long randomDate() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DefaultAnalyzersTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DefaultAnalyzersTests.java index 84ca112e6c7d8..ddfde2b807b11 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DefaultAnalyzersTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DefaultAnalyzersTests.java @@ -48,32 +48,32 @@ public void testDefaultSearchAnalyzer() throws IOException { { setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("default", ft.getTextSearchInfo().searchAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("default", mappedField.getTextSearchInfo().searchAnalyzer().name()); } { setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("search_analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchAnalyzer().name()); } { setDefaultSearchAnalyzer = true; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("default_search", ft.getTextSearchInfo().searchAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("default_search", mappedField.getTextSearchInfo().searchAnalyzer().name()); } { setDefaultSearchAnalyzer = true; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("search_analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchAnalyzer().name()); } { setDefaultSearchAnalyzer = true; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchAnalyzer().name()); } } @@ -83,71 +83,71 @@ public void testDefaultSearchQuoteAnalyzer() throws IOException { setDefaultSearchQuoteAnalyzer = false; setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("default", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("default", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = false; setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("search_quote_analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = true; setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("default_search_quote", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("default_search_quote", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = true; setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("search_quote_analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = false; setDefaultSearchAnalyzer = true; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("default_search", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("default_search", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = false; setDefaultSearchAnalyzer = true; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("search_quote_analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = true; setDefaultSearchAnalyzer = true; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("default_search_quote", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("default_search_quote", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = true; setDefaultSearchAnalyzer = true; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("search_quote_analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = true; setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } { setDefaultSearchQuoteAnalyzer = true; setDefaultSearchAnalyzer = false; MapperService ms = createMapperService(fieldMapping(b -> b.field("type", "text").field("search_analyzer", "configured"))); - MappedFieldType ft = ms.fieldType("field"); - assertEquals("configured", ft.getTextSearchInfo().searchQuoteAnalyzer().name()); + MappedField mappedField = ms.mappedField("field"); + assertEquals("configured", mappedField.getTextSearchInfo().searchQuoteAnalyzer().name()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldTypeTests.java index 64bbdb8faf924..9cd5e5fa2db4b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldTypeTests.java @@ -16,7 +16,7 @@ public class DocCountFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { MappedFieldType ft = new DocCountFieldMapper.DocCountFieldType(); - QueryShardException e = expectThrows(QueryShardException.class, () -> ft.termQuery(10L, randomMockContext())); + QueryShardException e = expectThrows(QueryShardException.class, () -> ft.termQuery("_doc_count", 10L, randomMockContext())); assertEquals("Field [_doc_count] of type [_doc_count] is not searchable", e.getMessage()); } @@ -24,22 +24,22 @@ public void testRangeQuery() { MappedFieldType ft = new DocCountFieldMapper.DocCountFieldType(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, null) + () -> ft.rangeQuery("_doc_count", null, null, randomBoolean(), randomBoolean(), null, null, null, null) ); assertEquals("Field [_doc_count] of type [_doc_count] does not support range queries", e.getMessage()); } public void testExistsQuery() { MappedFieldType ft = new DocCountFieldMapper.DocCountFieldType(); - QueryShardException e = expectThrows(QueryShardException.class, () -> ft.existsQuery(randomMockContext())); + QueryShardException e = expectThrows(QueryShardException.class, () -> ft.existsQuery("_doc_count", randomMockContext())); assertEquals("Field [_doc_count] of type [_doc_count] does not support exists queries", e.getMessage()); } public void testFetchSourceValue() throws IOException { - MappedFieldType fieldType = new DocCountFieldMapper.DocCountFieldType(); - assertEquals(Arrays.asList(14), fetchSourceValue(fieldType, 14)); - assertEquals(Arrays.asList(14), fetchSourceValue(fieldType, "14")); - assertEquals(Arrays.asList(1), fetchSourceValue(fieldType, "")); - assertEquals(Arrays.asList(1), fetchSourceValue(fieldType, null)); + MappedField mappedField = new MappedField(DocCountFieldMapper.NAME, new DocCountFieldMapper.DocCountFieldType()); + assertEquals(Arrays.asList(14), fetchSourceValue(mappedField, 14)); + assertEquals(Arrays.asList(14), fetchSourceValue(mappedField, "14")); + assertEquals(Arrays.asList(1), fetchSourceValue(mappedField, "")); + assertEquals(Arrays.asList(1), fetchSourceValue(mappedField, null)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index d0b6591738e14..6c44697aedd70 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -125,14 +125,14 @@ public void testMergeSearchAnalyzer() throws Exception { b.field("search_analyzer", "whitespace"); })); - assertThat(mapperService.fieldType("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("whitespace")); + assertThat(mapperService.mappedField("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("whitespace")); merge(mapperService, fieldMapping(b -> { b.field("type", "text"); b.field("analyzer", "default"); b.field("search_analyzer", "keyword"); })); - assertThat(mapperService.fieldType("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("keyword")); + assertThat(mapperService.mappedField("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("keyword")); } public void testChangeSearchAnalyzerToDefault() throws Exception { @@ -143,14 +143,14 @@ public void testChangeSearchAnalyzerToDefault() throws Exception { b.field("search_analyzer", "whitespace"); })); - assertThat(mapperService.fieldType("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("whitespace")); + assertThat(mapperService.mappedField("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("whitespace")); merge(mapperService, fieldMapping(b -> { b.field("type", "text"); b.field("analyzer", "default"); })); - assertThat(mapperService.fieldType("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("default")); + assertThat(mapperService.mappedField("field").getTextSearchInfo().searchAnalyzer().name(), equalTo("default")); } public void testConcurrentMergeTest() throws Throwable { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 6c3f50c30de35..0b8d0cd78eeaa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -2363,7 +2363,7 @@ private static final class MockMetadataMapper extends MetadataFieldMapper { private static final String FIELD_NAME = "_mock_metadata"; protected MockMetadataMapper() { - super(new KeywordFieldMapper.KeywordFieldType(FIELD_NAME)); + super(new MappedField(FIELD_NAME, new KeywordFieldMapper.KeywordFieldType())); } @Override @@ -2371,7 +2371,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) { context.doc().add(new StringField(FIELD_NAME, context.parser().text(), Field.Store.YES)); } else { - throw new IllegalArgumentException("Field [" + fieldType().name() + "] must be a string."); + throw new IllegalArgumentException("Field [" + field().name() + "] must be a string."); } } @@ -2396,8 +2396,8 @@ protected RuntimeField createRuntimeField(MappingParserContext parserContext) { return new TestRuntimeField( n, List.of( - new TestRuntimeField.TestRuntimeFieldType(n + ".foo", KeywordFieldMapper.CONTENT_TYPE), - new TestRuntimeField.TestRuntimeFieldType(n + ".bar", KeywordFieldMapper.CONTENT_TYPE) + new MappedField(n + ".foo", new TestRuntimeField.TestRuntimeFieldType(KeywordFieldMapper.CONTENT_TYPE)), + new MappedField(n + ".bar", new TestRuntimeField.TestRuntimeFieldType(KeywordFieldMapper.CONTENT_TYPE)) ) ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 9cb7927f7ff18..b295bbcced18d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -36,25 +36,25 @@ public void testDoubleIndexingSameDoc() throws Exception { iw.addDocument(doc.rootDoc()); }, reader -> { IndexSearcher searcher = new IndexSearcher(reader); - TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10); + TopDocs topDocs = searcher.search(mapperService.mappedField("field1").termQuery("value1", context), 10); assertThat(topDocs.totalHits.value, equalTo(2L)); - topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10); + topDocs = searcher.search(mapperService.mappedField("field2").termQuery("1", context), 10); assertThat(topDocs.totalHits.value, equalTo(2L)); - topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10); + topDocs = searcher.search(mapperService.mappedField("field3").termQuery("1.1", context), 10); assertThat(topDocs.totalHits.value, equalTo(2L)); - topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10); + topDocs = searcher.search(mapperService.mappedField("field4").termQuery("2010-01-01", context), 10); assertThat(topDocs.totalHits.value, equalTo(2L)); - topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10); + topDocs = searcher.search(mapperService.mappedField("field5").termQuery("1", context), 10); assertThat(topDocs.totalHits.value, equalTo(2L)); - topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10); + topDocs = searcher.search(mapperService.mappedField("field5").termQuery("2", context), 10); assertThat(topDocs.totalHits.value, equalTo(2L)); - topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10); + topDocs = searcher.search(mapperService.mappedField("field5").termQuery("3", context), 10); assertThat(topDocs.totalHits.value, equalTo(2L)); }); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index c8d8a975afd2e..c9b53feb8b633 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -48,12 +48,12 @@ public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase { public void testFormat() throws IOException { - assertThat(simpleMappedFieldType().docValueFormat("#.0", null).format(1), equalTo("1.0")); - assertThat(simpleMappedFieldType().docValueFormat("#.0", null).format(1.2), equalTo("1.2")); - assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(11), equalTo("11")); - assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(1123), equalTo("1,123")); - assertThat(simpleMappedFieldType().docValueFormat("#,##0.00", null).format(1123), equalTo("1,123.00")); - assertThat(simpleMappedFieldType().docValueFormat("#,##0.00", null).format(1123.1), equalTo("1,123.10")); + assertThat(simpleMappedField().docValueFormat("#.0", null).format(1), equalTo("1.0")); + assertThat(simpleMappedField().docValueFormat("#.0", null).format(1.2), equalTo("1.2")); + assertThat(simpleMappedField().docValueFormat("#,##0.##", null).format(11), equalTo("11")); + assertThat(simpleMappedField().docValueFormat("#,##0.##", null).format(1123), equalTo("1,123")); + assertThat(simpleMappedField().docValueFormat("#,##0.00", null).format(1123), equalTo("1,123.00")); + assertThat(simpleMappedField().docValueFormat("#,##0.00", null).format(1123.1), equalTo("1,123.10")); } @Override @@ -64,8 +64,9 @@ public void testDocValues() throws IOException { List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - DoubleScriptFieldType ft = build("add_param", Map.of("param", 1)); - DoubleScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); + MappedField mappedField = build("add_param", Map.of("param", 1)); + DoubleScriptFieldData ifd = (DoubleScriptFieldData) mappedField.fielddataBuilder("test", mockContext()::lookup) + .build(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { @@ -103,7 +104,8 @@ public void testSort() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); + DoubleScriptFieldData ifd = (DoubleScriptFieldData) simpleMappedField().fielddataBuilder("test", mockContext()::lookup) + .build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1.1]}")); @@ -121,7 +123,7 @@ public void testUsedInScript() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); + SearchExecutionContext searchContext = mockContext(true, simpleMappedField()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { @@ -150,7 +152,7 @@ public void testExistsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().existsQuery(mockContext())), equalTo(1)); } } } @@ -163,21 +165,21 @@ public void testRangeQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.5]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - MappedFieldType ft = simpleMappedFieldType(); - assertThat(searcher.count(ft.rangeQuery("2", "3", true, true, null, null, null, mockContext())), equalTo(2)); - assertThat(searcher.count(ft.rangeQuery(2, 3, true, true, null, null, null, mockContext())), equalTo(2)); - assertThat(searcher.count(ft.rangeQuery(1.1, 3, true, true, null, null, null, mockContext())), equalTo(2)); - assertThat(searcher.count(ft.rangeQuery(1.1, 3, false, true, null, null, null, mockContext())), equalTo(2)); - assertThat(searcher.count(ft.rangeQuery(2, 3, false, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(2.5, 3, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(2.5, 3, false, true, null, null, null, mockContext())), equalTo(0)); + MappedField mappedField = simpleMappedField(); + assertThat(searcher.count(mappedField.rangeQuery("2", "3", true, true, null, null, null, mockContext())), equalTo(2)); + assertThat(searcher.count(mappedField.rangeQuery(2, 3, true, true, null, null, null, mockContext())), equalTo(2)); + assertThat(searcher.count(mappedField.rangeQuery(1.1, 3, true, true, null, null, null, mockContext())), equalTo(2)); + assertThat(searcher.count(mappedField.rangeQuery(1.1, 3, false, true, null, null, null, mockContext())), equalTo(2)); + assertThat(searcher.count(mappedField.rangeQuery(2, 3, false, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.rangeQuery(2.5, 3, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.rangeQuery(2.5, 3, false, true, null, null, null, mockContext())), equalTo(0)); } } } @Override - protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.rangeQuery(randomLong(), randomLong(), randomBoolean(), randomBoolean(), null, null, null, ctx); + protected Query randomRangeQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.rangeQuery(randomLong(), randomLong(), randomBoolean(), randomBoolean(), null, null, null, ctx); } @Override @@ -187,17 +189,17 @@ public void testTermQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(1, mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(1.1, mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termQuery("1", mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(1, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(1.1, mockContext())), equalTo(0)); assertThat(searcher.count(build("add_param", Map.of("param", 1)).termQuery(2, mockContext())), equalTo(1)); } } } @Override - protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termQuery(randomLong(), ctx); + protected Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termQuery(randomLong(), ctx); } @Override @@ -207,27 +209,27 @@ public void testTermsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1"), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(1), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(1.1), mockContext())), equalTo(0)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(1.1, 2.1), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(2.1, 1), mockContext())), equalTo(2)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of("1"), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(1), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(1.1), mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(1.1, 2.1), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(2.1, 1), mockContext())), equalTo(2)); } } } @Override - protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termsQuery(List.of(randomLong()), ctx); + protected Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termsQuery(List.of(randomLong()), ctx); } @Override - protected DoubleScriptFieldType simpleMappedFieldType() { + protected MappedField simpleMappedField() { return build("read_foo", Map.of()); } @Override - protected MappedFieldType loopFieldType() { + protected MappedField loopField() { return build("loop", Map.of()); } @@ -236,7 +238,7 @@ protected String typeName() { return "double"; } - private static DoubleScriptFieldType build(String code, Map params) { + private static MappedField build(String code, Map params) { return build(new Script(ScriptType.INLINE, "test", code, params)); } @@ -267,7 +269,7 @@ public void execute() { }; } - private static DoubleScriptFieldType build(Script script) { - return new DoubleScriptFieldType("test", factory(script), script, emptyMap()); + private static MappedField build(Script script) { + return new MappedField("test", new DoubleScriptFieldType(factory(script), script, emptyMap())); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index bdd31b90096f1..8b6d1ea06a12b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -176,8 +176,8 @@ public void testDynamicMappingOnEmptyString() throws Exception { ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("empty_field", ""))); assertNotNull(doc.rootDoc().getField("empty_field")); merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate())); - MappedFieldType fieldType = mapperService.fieldType("empty_field"); - assertNotNull(fieldType); + MappedField mappedField = mapperService.mappedField("empty_field"); + assertNotNull(mappedField); } public void testDynamicRuntimeMappingOnEmptyString() throws Exception { @@ -185,8 +185,8 @@ public void testDynamicRuntimeMappingOnEmptyString() throws Exception { ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("empty_field", ""))); assertNull(doc.rootDoc().getField("empty_field")); merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate())); - MappedFieldType fieldType = mapperService.fieldType("empty_field"); - assertNotNull(fieldType); + MappedField mappedField = mapperService.mappedField("empty_field"); + assertNotNull(mappedField); } public void testDynamicMappingsNotNeeded() throws Exception { @@ -616,8 +616,8 @@ public void testNumericDetectionEnabledDynamicRuntime() throws Exception { assertNotNull(doc.dynamicMappingsUpdate()); merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate())); - assertThat(mapperService.fieldType("s_long").typeName(), equalTo("long")); - assertThat(mapperService.fieldType("s_double").typeName(), equalTo("double")); + assertThat(mapperService.mappedField("s_long").typeName(), equalTo("long")); + assertThat(mapperService.mappedField("s_double").typeName(), equalTo("double")); } public void testNumericDetectionDefault() throws Exception { @@ -647,8 +647,8 @@ public void testNumericDetectionDefaultDynamicRuntime() throws Exception { assertNotNull(doc.dynamicMappingsUpdate()); merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate())); - assertThat(mapperService.fieldType("s_long").typeName(), equalTo("keyword")); - assertThat(mapperService.fieldType("s_double").typeName(), equalTo("keyword")); + assertThat(mapperService.mappedField("s_long").typeName(), equalTo("keyword")); + assertThat(mapperService.mappedField("s_double").typeName(), equalTo("keyword")); } public void testDynamicRuntimeLeafFields() throws IOException { @@ -936,11 +936,11 @@ public void testSubobjectsFalseRootDynamicUpdate() throws Exception { merge(mapperService, dynamicMapping(mappingsUpdate)); - assertNotNull(mapperService.fieldType("time")); - assertNotNull(mapperService.fieldType("time.max")); - assertNotNull(mapperService.fieldType("time.min")); - assertNotNull(mapperService.fieldType("host.id")); - assertNotNull(mapperService.fieldType("host.name")); + assertNotNull(mapperService.mappedField("time")); + assertNotNull(mapperService.mappedField("time.max")); + assertNotNull(mapperService.mappedField("time.min")); + assertNotNull(mapperService.mappedField("host.id")); + assertNotNull(mapperService.mappedField("host.name")); assertEquals(0, mapperService.mappingLookup().objectMappers().size()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index ad02f39e509ed..e0142a12321cd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -60,13 +60,13 @@ public void testMatchTypeOnly() throws Exception { })); merge(mapperService, dynamicMapping(parsedDoc.dynamicMappingsUpdate())); - assertThat(mapperService.fieldType("s"), notNullValue()); - assertFalse(mapperService.fieldType("s").isIndexed()); - assertFalse(mapperService.fieldType("s").isSearchable()); + assertThat(mapperService.mappedField("s"), notNullValue()); + assertFalse(mapperService.mappedField("s").isIndexed()); + assertFalse(mapperService.mappedField("s").isSearchable()); - assertThat(mapperService.fieldType("l"), notNullValue()); - assertFalse(mapperService.fieldType("s").isIndexed()); - assertTrue(mapperService.fieldType("l").isSearchable()); + assertThat(mapperService.mappedField("l"), notNullValue()); + assertFalse(mapperService.mappedField("s").isIndexed()); + assertTrue(mapperService.mappedField("l").isSearchable()); } public void testSimple() throws Exception { @@ -813,7 +813,7 @@ public void testDynamicTemplateOrder() throws IOException { ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("foo", "abc"))); assertNotNull(doc.dynamicMappingsUpdate()); merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate())); - assertThat(mapperService.fieldType("foo"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertThat(mapperService.mappedField("foo").type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); } public void testDynamicTemplateRuntimeMatchMappingType() throws Exception { @@ -1099,11 +1099,11 @@ private MapperService createDynamicTemplateNoSubobjects() throws IOException { } private static void assertNoSubobjects(MapperService mapperService) { - assertThat(mapperService.fieldType("foo.bar.baz").typeName(), equalTo("long")); + assertThat(mapperService.mappedField("foo.bar.baz").typeName(), equalTo("long")); assertNotNull(mapperService.mappingLookup().objectMappers().get("foo.bar")); - assertThat(mapperService.fieldType("foo.metric.count").typeName(), equalTo("long")); - assertThat(mapperService.fieldType("foo.metric.count.min").typeName(), equalTo("long")); - assertThat(mapperService.fieldType("foo.metric.count.max").typeName(), equalTo("long")); + assertThat(mapperService.mappedField("foo.metric.count").typeName(), equalTo("long")); + assertThat(mapperService.mappedField("foo.metric.count.min").typeName(), equalTo("long")); + assertThat(mapperService.mappedField("foo.metric.count.max").typeName(), equalTo("long")); assertNotNull(mapperService.mappingLookup().objectMappers().get("foo.metric")); assertNull(mapperService.mappingLookup().objectMappers().get("foo.metric.count")); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java index 02c438688d768..2b61bc0bd64a8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java @@ -18,7 +18,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { static final String FIELD_VALUE = "true"; protected ExternalMetadataMapper() { - super(new BooleanFieldMapper.BooleanFieldType(FIELD_NAME)); + super(new MappedField(FIELD_NAME, new BooleanFieldMapper.BooleanFieldType())); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java index 07f4c3c1346c4..89fea7261844f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java @@ -71,9 +71,9 @@ public void testMerge() throws IOException { b.endObject(); })); - MappedFieldType firstFieldType = mapperService.fieldType("alias-field"); - assertEquals("first-field", firstFieldType.name()); - assertTrue(firstFieldType instanceof KeywordFieldMapper.KeywordFieldType); + MappedField firstField = mapperService.mappedField("alias-field"); + assertEquals("first-field", firstField.name()); + assertTrue(firstField.type() instanceof KeywordFieldMapper.KeywordFieldType); merge(mapperService, mapping(b -> { b.startObject("second-field").field("type", "text").endObject(); @@ -85,9 +85,9 @@ public void testMerge() throws IOException { b.endObject(); })); - MappedFieldType secondFieldType = mapperService.fieldType("alias-field"); - assertEquals("second-field", secondFieldType.name()); - assertTrue(secondFieldType instanceof TextFieldMapper.TextFieldType); + MappedField secondField = mapperService.mappedField("alias-field"); + assertEquals("second-field", secondField.name()); + assertTrue(secondField.type() instanceof TextFieldMapper.TextFieldType); } public void testMergeFailure() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index df8e09684f4fc..179d88c997bfa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -26,15 +26,15 @@ public class FieldNamesFieldTypeTests extends ESTestCase { public void testTermQuery() { - FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = FieldNamesFieldMapper.FieldNamesFieldType.get(true); - KeywordFieldMapper.KeywordFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("field_name"); + MappedField fieldNamesField = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(true)); + MappedField mappedField = new MappedField("field_name", new KeywordFieldMapper.KeywordFieldType()); Settings settings = settings(Version.CURRENT).build(); IndexSettings indexSettings = new IndexSettings( new IndexMetadata.Builder("foo").settings(settings).numberOfShards(1).numberOfReplicas(0).build(), settings ); - List mappers = Stream.of(fieldNamesFieldType, fieldType).map(MockFieldMapper::new).toList(); + List mappers = Stream.of(fieldNamesField, mappedField).map(MockFieldMapper::new).toList(); MappingLookup mappingLookup = MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList(), emptyList()); SearchExecutionContext searchExecutionContext = new SearchExecutionContext( 0, @@ -57,12 +57,12 @@ public void testTermQuery() { null, emptyMap() ); - Query termQuery = fieldNamesFieldType.termQuery("field_name", searchExecutionContext); + Query termQuery = fieldNamesField.termQuery("field_name", searchExecutionContext); assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery); assertWarnings("terms query on the _field_names field is deprecated and will be removed, use exists query instead"); FieldNamesFieldMapper.FieldNamesFieldType unsearchable = FieldNamesFieldMapper.FieldNamesFieldType.get(false); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> unsearchable.termQuery("field_name", null)); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> unsearchable.termQuery("field_name", "field_name", null)); assertEquals("Cannot run [exists] queries if the [_field_names] field is disabled", e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java index aa3a90a84460a..03c501fb8e302 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -28,7 +28,7 @@ public void testSimple() throws Exception { assertThat(f.stringValue(), equalTo("some name")); assertThat(f.fieldType().stored(), equalTo(true)); - assertTrue(mapperService.fieldType("name").isStored()); + assertTrue(mapperService.mappedField("name").isStored()); boolean stored = false; for (IndexableField field : doc.getFields("age")) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index f4d60cc6625be..39208c0341f43 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -419,10 +419,11 @@ public void testInvalidGeopointValuesIgnored() throws Exception { ); } - protected void assertSearchable(MappedFieldType fieldType) { + @Override + protected void assertSearchable(MappedField mappedField) { // always searchable even if it uses TextSearchInfo.NONE - assertTrue(fieldType.isIndexed()); - assertTrue(fieldType.isSearchable()); + assertTrue(mappedField.isIndexed()); + assertTrue(mappedField.isSearchable()); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java index 3d0fe92a0f532..f71c7302a4bbd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java @@ -24,9 +24,9 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { boolean ignoreMalformed = randomBoolean(); - MappedFieldType mapper = new GeoPointFieldMapper.Builder("field", ScriptCompiler.NONE, ignoreMalformed, Version.CURRENT).build( + MappedField mapper = new GeoPointFieldMapper.Builder("field", ScriptCompiler.NONE, ignoreMalformed, Version.CURRENT).build( MapperBuilderContext.ROOT - ).fieldType(); + ).field(); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(42.0, 27.1)); Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); @@ -84,9 +84,9 @@ public void testFetchSourceValue() throws IOException { } public void testFetchVectorTile() throws IOException { - MappedFieldType mapper = new GeoPointFieldMapper.Builder("field", ScriptCompiler.NONE, false, Version.CURRENT).build( + MappedField mapper = new GeoPointFieldMapper.Builder("field", ScriptCompiler.NONE, false, Version.CURRENT).build( MapperBuilderContext.ROOT - ).fieldType(); + ).field(); final int z = randomIntBetween(1, 10); int x = randomIntBetween(0, (1 << z) - 1); int y = randomIntBetween(0, (1 << z) - 1); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java index 93ad371364c7c..83e1dbfc8027d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java @@ -64,8 +64,9 @@ public void testDocValues() throws IOException { List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - GeoPointScriptFieldType ft = build("fromLatLon", Map.of()); - GeoPointScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); + MappedField mappedField = build("fromLatLon", Map.of()); + GeoPointScriptFieldData ifd = (GeoPointScriptFieldData) mappedField.fielddataBuilder("test", mockContext()::lookup) + .build(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { @@ -98,7 +99,8 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { - GeoPointScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); + GeoPointScriptFieldData ifd = (GeoPointScriptFieldData) simpleMappedField().fielddataBuilder("test", mockContext()::lookup) + .build(null, null); Exception e = expectThrows(IllegalArgumentException.class, () -> ifd.sortField(null, MultiValueMode.MIN, null, false)); assertThat(e.getMessage(), equalTo("can't sort on geo_point field without using specific sorting feature, like geo_distance")); } @@ -110,7 +112,7 @@ public void testUsedInScript() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); + SearchExecutionContext searchContext = mockContext(true, simpleMappedField()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { @@ -139,7 +141,7 @@ public void testExistsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(2)); + assertThat(searcher.count(simpleMappedField().existsQuery(mockContext())), equalTo(2)); } } } @@ -148,19 +150,19 @@ public void testExistsQuery() throws IOException { public void testRangeQuery() { Exception e = expectThrows( IllegalArgumentException.class, - () -> simpleMappedFieldType().rangeQuery("0.0", "45.0", false, false, null, null, null, mockContext()) + () -> simpleMappedField().rangeQuery("0.0", "45.0", false, false, null, null, null, mockContext()) ); assertThat(e.getMessage(), equalTo("Runtime field [test] of type [" + typeName() + "] does not support range queries")); } @Override - protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { + protected Query randomRangeQuery(MappedField mappedField, SearchExecutionContext ctx) { throw new IllegalArgumentException("Unsupported"); } @Override public void testTermQuery() { - Exception e = expectThrows(IllegalArgumentException.class, () -> simpleMappedFieldType().termQuery("0.0,0.0", mockContext())); + Exception e = expectThrows(IllegalArgumentException.class, () -> simpleMappedField().termQuery("0.0,0.0", mockContext())); assertThat( e.getMessage(), equalTo("Geometry fields do not support exact searching, use dedicated geometry queries instead: [test]") @@ -168,7 +170,7 @@ public void testTermQuery() { } @Override - protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { + protected Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx) { throw new IllegalArgumentException("Unsupported"); } @@ -176,7 +178,7 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) public void testTermsQuery() { Exception e = expectThrows( IllegalArgumentException.class, - () -> simpleMappedFieldType().termsQuery(List.of("0.0,0.0", "45.0,45.0"), mockContext()) + () -> simpleMappedField().termsQuery(List.of("0.0,0.0", "45.0,45.0"), mockContext()) ); assertThat( @@ -187,17 +189,17 @@ public void testTermsQuery() { } @Override - protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termsQuery(randomList(100, GeometryTestUtils::randomPoint), mockContext()); + protected Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termsQuery(randomList(100, GeometryTestUtils::randomPoint), mockContext()); } @Override - protected GeoPointScriptFieldType simpleMappedFieldType() { + protected MappedField simpleMappedField() { return build("fromLatLon", Map.of()); } @Override - protected MappedFieldType loopFieldType() { + protected MappedField loopField() { return build("loop", Map.of()); } @@ -206,7 +208,7 @@ protected String typeName() { return "geo_point"; } - private static GeoPointScriptFieldType build(String code, Map params) { + private static MappedField build(String code, Map params) { return build(new Script(ScriptType.INLINE, "test", code, params)); } @@ -228,7 +230,7 @@ public void execute() { }; } - private static GeoPointScriptFieldType build(Script script) { - return new GeoPointScriptFieldType("test", factory(script), script, emptyMap()); + private static MappedField build(Script script) { + return new MappedField("test", new GeoPointScriptFieldType(factory(script), script, emptyMap())); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 407a33b9ac776..ebd2b94986d7f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -211,10 +211,11 @@ protected boolean supportsMeta() { return false; } - protected void assertSearchable(MappedFieldType fieldType) { + @Override + protected void assertSearchable(MappedField mappedField) { // always searchable even if it uses TextSearchInfo.NONE - assertTrue(fieldType.isIndexed()); - assertTrue(fieldType.isSearchable()); + assertTrue(mappedField.isIndexed()); + assertTrue(mappedField.isSearchable()); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java index bffe27f4cc521..32cc7b1dd4519 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java @@ -15,7 +15,7 @@ public class GeoShapeFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new GeoShapeFieldMapper.Builder("field", true, true).build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new GeoShapeFieldMapper.Builder("field", true, true).build(MapperBuilderContext.ROOT).field(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java index 9320a6c5de2ea..f8bd7d3a29053 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java @@ -26,7 +26,7 @@ public void testRangeQuery() { : new TsidExtractingIdFieldMapper.IdFieldType(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, null) + () -> ft.rangeQuery("_id", null, null, randomBoolean(), randomBoolean(), null, null, null, null) ); assertEquals("Field [_id] of type [_id] does not support range queries", e.getMessage()); } @@ -48,18 +48,18 @@ public void testTermsQuery() { Mockito.when(context.getIndexSettings()).thenReturn(mockSettings); Mockito.when(context.indexVersionCreated()).thenReturn(Version.CURRENT); MappedFieldType ft = new ProvidedIdFieldMapper.IdFieldType(() -> false); - Query query = ft.termQuery("id", context); + Query query = ft.termQuery("_id", "id", context); assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); } public void testIsAggregatable() { MappedFieldType ft = new ProvidedIdFieldMapper.IdFieldType(() -> false); - assertFalse(ft.isAggregatable()); + assertFalse(ft.isAggregatable("_id")); ft = new ProvidedIdFieldMapper.IdFieldType(() -> true); - assertTrue(ft.isAggregatable()); + assertTrue(ft.isAggregatable("_id")); ft = new TsidExtractingIdFieldMapper.IdFieldType(); - assertFalse(ft.isAggregatable()); + assertFalse(ft.isAggregatable("_id")); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldMapperTests.java index 9a5b0ee44ccf6..98471bd98e8ab 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldMapperTests.java @@ -50,11 +50,11 @@ public void testFetchIgnoredFieldValue() throws IOException { mapperService, iw -> { iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field("field", "value"))).rootDoc()); }, iw -> { - SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); + SearchLookup lookup = new SearchLookup(mapperService::mappedField, fieldDataLookup()); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.lookup()).thenReturn(lookup); - IgnoredFieldMapper.IgnoredFieldType ft = (IgnoredFieldMapper.IgnoredFieldType) mapperService.fieldType("_ignored"); - ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null); + MappedField mappedField = mapperService.mappedField("_ignored"); + ValueFetcher valueFetcher = mappedField.valueFetcher(searchExecutionContext, null); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); lookup.source().setSegmentAndDocument(context, 0); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java index 3052bfdb3400d..2730cf8f2ab6b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredFieldTypeTests.java @@ -19,14 +19,14 @@ public class IgnoredFieldTypeTests extends FieldTypeTestCase { public void testPrefixQuery() { - MappedFieldType ft = IgnoredFieldMapper.IgnoredFieldType.INSTANCE; + MappedField mappedField = new MappedField(IgnoredFieldMapper.NAME, IgnoredFieldMapper.IgnoredFieldType.INSTANCE); Query expected = new PrefixQuery(new Term("_ignored", new BytesRef("foo*"))); - assertEquals(expected, ft.prefixQuery("foo*", null, MOCK_CONTEXT)); + assertEquals(expected, mappedField.prefixQuery("foo*", null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.prefixQuery("foo*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> mappedField.prefixQuery("foo*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " @@ -36,27 +36,27 @@ public void testPrefixQuery() { } public void testRegexpQuery() { - MappedFieldType ft = IgnoredFieldMapper.IgnoredFieldType.INSTANCE; + MappedField mappedField = new MappedField(IgnoredFieldMapper.NAME, IgnoredFieldMapper.IgnoredFieldType.INSTANCE); Query expected = new RegexpQuery(new Term("_ignored", new BytesRef("foo?"))); - assertEquals(expected, ft.regexpQuery("foo?", 0, 0, 10, null, MOCK_CONTEXT)); + assertEquals(expected, mappedField.regexpQuery("foo?", 0, 0, 10, null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.regexpQuery("foo?", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> mappedField.regexpQuery("foo?", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testWildcardQuery() { - MappedFieldType ft = IgnoredFieldMapper.IgnoredFieldType.INSTANCE; + MappedField mappedField = new MappedField(IgnoredFieldMapper.NAME, IgnoredFieldMapper.IgnoredFieldType.INSTANCE); Query expected = new WildcardQuery(new Term("_ignored", new BytesRef("foo*"))); - assertEquals(expected, ft.wildcardQuery("foo*", null, MOCK_CONTEXT)); + assertEquals(expected, mappedField.wildcardQuery("foo*", null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> mappedField.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java index 0b20c730d4933..af3bc40dbeb2a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java @@ -46,10 +46,10 @@ public void testFetchFieldValue() throws IOException { SourceToParse source = source(index, "id", b -> b.field("field", "value"), "", Map.of()); iw.addDocument(mapperService.documentMapper().parse(source).rootDoc()); }, iw -> { - IndexFieldMapper.IndexFieldType ft = (IndexFieldMapper.IndexFieldType) mapperService.fieldType("_index"); - SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); + MappedField mappedField = mapperService.mappedField("_index"); + SearchLookup lookup = new SearchLookup(mapperService::mappedField, fieldDataLookup()); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); - ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null); + ValueFetcher valueFetcher = mappedField.valueFetcher(searchExecutionContext, null); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); lookup.source().setSegmentAndDocument(context, 0); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index dd90380e02af8..5a184e0efb812 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -28,17 +28,17 @@ public class IndexFieldTypeTests extends ESTestCase { public void testPrefixQuery() { MappedFieldType ft = IndexFieldMapper.IndexFieldType.INSTANCE; - assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("ind", null, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("other_ind", null, createContext())); + assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("field", "ind", null, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("field", "other_ind", null, createContext())); } public void testWildcardQuery() { MappedFieldType ft = IndexFieldMapper.IndexFieldType.INSTANCE; - assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("ind*x", null, createContext())); - assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("iNd*x", null, true, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("other_ind*x", null, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("Other_ind*x", null, true, createContext())); + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("field", "ind*x", null, createContext())); + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("field", "iNd*x", null, true, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("field", "other_ind*x", null, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("field", "Other_ind*x", null, true, createContext())); } public void testRegexpQuery() { @@ -46,7 +46,7 @@ public void testRegexpQuery() { QueryShardException e = expectThrows( QueryShardException.class, - () -> assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 0, 10, null, createContext())) + () -> assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("field", "ind.x", 0, 0, 10, null, createContext())) ); assertThat(e.getMessage(), containsString("Can only use regexp queries on keyword and text fields")); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index ad434a52ca45a..e7dc0b163c212 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -115,7 +115,7 @@ public void testNoDocValues() throws Exception { assertEquals("field", fields[0].stringValue()); FieldMapper m = (FieldMapper) mapper.mappers().getMapper("field"); - Query existsQuery = m.fieldType().existsQuery(null); + Query existsQuery = m.field().existsQuery(null); assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.NAME, "field")), existsQuery); } @@ -214,7 +214,7 @@ public void testNullValue() throws IOException { public void testDimension() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - IpFieldMapper.IpFieldType ft = (IpFieldMapper.IpFieldType) mapperService.fieldType("field"); + IpFieldMapper.IpFieldType ft = (IpFieldMapper.IpFieldType) mapperService.mappedField("field").type(); assertFalse(ft.isDimension()); assertDimension(true, IpFieldMapper.IpFieldType::isDimension); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java index ec0348d23146f..4e3c9f1400c71 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java @@ -28,18 +28,18 @@ public class IpFieldTypeTests extends FieldTypeTestCase { public void testValueFormat() throws Exception { - MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); + MappedFieldType ft = new IpFieldMapper.IpFieldType(); String ip = "2001:db8::2:1"; BytesRef asBytes = new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip))); - assertEquals(ip, ft.docValueFormat(null, null).format(asBytes)); + assertEquals(ip, ft.docValueFormat("field", null, null).format(asBytes)); ip = "192.168.1.7"; asBytes = new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip))); - assertEquals(ip, ft.docValueFormat(null, null).format(asBytes)); + assertEquals(ip, ft.docValueFormat("field", null, null).format(asBytes)); } public void testValueForSearch() { - MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); + MappedFieldType ft = new IpFieldMapper.IpFieldType(); String ip = "2001:db8::2:1"; BytesRef asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip))); assertEquals(ip, ft.valueForDisplay(asBytes)); @@ -50,138 +50,139 @@ public void testValueForSearch() { } public void testTermQuery() { - MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); + MappedFieldType ft = new IpFieldMapper.IpFieldType(); String ip = "2001:db8::2:1"; - assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, MOCK_CONTEXT)); + assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery("field", ip, MOCK_CONTEXT)); ip = "192.168.1.7"; - assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, MOCK_CONTEXT)); + assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery("field", ip, MOCK_CONTEXT)); ip = "2001:db8::2:1"; String prefix = ip + "/64"; - assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, MOCK_CONTEXT)); + assertEquals( + InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), + ft.termQuery("field", prefix, MOCK_CONTEXT) + ); ip = "192.168.1.7"; prefix = ip + "/16"; - assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, MOCK_CONTEXT)); + assertEquals( + InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), + ft.termQuery("field", prefix, MOCK_CONTEXT) + ); - ft = new IpFieldMapper.IpFieldType("field", false); + ft = new IpFieldMapper.IpFieldType(false); ip = "2001:db8::2:1"; assertEquals( convertToDocValuesQuery(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip))), - ft.termQuery(ip, MOCK_CONTEXT) + ft.termQuery("field", ip, MOCK_CONTEXT) ); ip = "192.168.1.7"; assertEquals( convertToDocValuesQuery(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip))), - ft.termQuery(ip, MOCK_CONTEXT) + ft.termQuery("field", ip, MOCK_CONTEXT) ); ip = "2001:db8::2:1"; prefix = ip + "/64"; assertEquals( convertToDocValuesQuery(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64)), - ft.termQuery(prefix, MOCK_CONTEXT) + ft.termQuery("field", prefix, MOCK_CONTEXT) ); ip = "192.168.1.7"; prefix = ip + "/16"; assertEquals( convertToDocValuesQuery(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16)), - ft.termQuery(prefix, MOCK_CONTEXT) + ft.termQuery("field", prefix, MOCK_CONTEXT) ); - MappedFieldType unsearchable = new IpFieldMapper.IpFieldType( - "field", - false, - false, - false, - null, - null, - Collections.emptyMap(), - false + MappedFieldType unsearchable = new IpFieldMapper.IpFieldType(false, false, false, null, null, Collections.emptyMap(), false); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> unsearchable.termQuery("field", "::1", MOCK_CONTEXT) ); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("::1", MOCK_CONTEXT)); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } public void testTermsQuery() { - MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); + MappedFieldType ft = new IpFieldMapper.IpFieldType(); assertEquals( InetAddressPoint.newSetQuery("field", InetAddresses.forString("::2"), InetAddresses.forString("::5")), - ft.termsQuery(Arrays.asList(InetAddresses.forString("::2"), InetAddresses.forString("::5")), MOCK_CONTEXT) + ft.termsQuery("field", Arrays.asList(InetAddresses.forString("::2"), InetAddresses.forString("::5")), MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newSetQuery("field", InetAddresses.forString("::2"), InetAddresses.forString("::5")), - ft.termsQuery(Arrays.asList("::2", "::5"), MOCK_CONTEXT) + ft.termsQuery("field", Arrays.asList("::2", "::5"), MOCK_CONTEXT) ); // if the list includes a prefix query we fallback to a bool query assertEquals( new ConstantScoreQuery( - new BooleanQuery.Builder().add(ft.termQuery("::42", MOCK_CONTEXT), Occur.SHOULD) - .add(ft.termQuery("::2/16", null), Occur.SHOULD) + new BooleanQuery.Builder().add(ft.termQuery("field", "::42", MOCK_CONTEXT), Occur.SHOULD) + .add(ft.termQuery("field", "::2/16", null), Occur.SHOULD) .build() ), - ft.termsQuery(Arrays.asList("::42", "::2/16"), MOCK_CONTEXT) + ft.termsQuery("field", Arrays.asList("::42", "::2/16"), MOCK_CONTEXT) ); } public void testRangeQuery() { - MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); + MappedFieldType ft = new IpFieldMapper.IpFieldType(); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddressPoint.MAX_VALUE), - ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", null, null, randomBoolean(), randomBoolean(), null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.2.0")), - ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", null, "192.168.2.0", randomBoolean(), true, null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.1.255")), - ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", null, "192.168.2.0", randomBoolean(), false, null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddressPoint.MAX_VALUE), - ft.rangeQuery("2001:db8::", null, true, randomBoolean(), null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", null, true, randomBoolean(), null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddressPoint.MAX_VALUE), - ft.rangeQuery("2001:db8::", null, false, randomBoolean(), null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", null, false, randomBoolean(), null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddresses.forString("2001:db8::ffff")), - ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", "2001:db8::ffff", true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddresses.forString("2001:db8::fffe")), - ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", "2001:db8::ffff", false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::2"), InetAddresses.forString("2001:db8::")), // same lo/hi values but inclusive=false so this won't match anything - ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::1", "2001:db8::1", false, false, null, null, null, MOCK_CONTEXT) ); // Upper bound is the min IP and is not inclusive - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("::", "::", true, false, null, null, null, MOCK_CONTEXT)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("field", "::", "::", true, false, null, null, null, MOCK_CONTEXT)); // Lower bound is the max IP and is not inclusive assertEquals( new MatchNoDocsQuery(), ft.rangeQuery( + "field", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, @@ -196,68 +197,78 @@ public void testRangeQuery() { assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("::fffe:ffff:ffff")), // same lo/hi values but inclusive=false so this won't match anything - ft.rangeQuery("::", "0.0.0.0", true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "::", "0.0.0.0", true, false, null, null, null, MOCK_CONTEXT) ); assertEquals( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::1:0:0:0"), InetAddressPoint.MAX_VALUE), // same lo/hi values but inclusive=false so this won't match anything - ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery( + "field", + "255.255.255.255", + "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + false, + true, + null, + null, + null, + MOCK_CONTEXT + ) ); assertEquals( // lower bound is ipv4, upper bound is ipv6 InetAddressPoint.newRangeQuery("field", InetAddresses.forString("192.168.1.7"), InetAddresses.forString("2001:db8::")), - ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "::ffff:c0a8:107", "2001:db8::", true, true, null, null, null, MOCK_CONTEXT) ); - ft = new IpFieldMapper.IpFieldType("field", false); + ft = new IpFieldMapper.IpFieldType(false); assertEquals( convertToDocValuesQuery(InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddressPoint.MAX_VALUE)), - ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", null, null, randomBoolean(), randomBoolean(), null, null, null, MOCK_CONTEXT) ); assertEquals( convertToDocValuesQuery( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.2.0")) ), - ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", null, "192.168.2.0", randomBoolean(), true, null, null, null, MOCK_CONTEXT) ); assertEquals( convertToDocValuesQuery( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.1.255")) ), - ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", null, "192.168.2.0", randomBoolean(), false, null, null, null, MOCK_CONTEXT) ); assertEquals( convertToDocValuesQuery( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddressPoint.MAX_VALUE) ), - ft.rangeQuery("2001:db8::", null, true, randomBoolean(), null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", null, true, randomBoolean(), null, null, null, MOCK_CONTEXT) ); assertEquals( convertToDocValuesQuery( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddressPoint.MAX_VALUE) ), - ft.rangeQuery("2001:db8::", null, false, randomBoolean(), null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", null, false, randomBoolean(), null, null, null, MOCK_CONTEXT) ); assertEquals( convertToDocValuesQuery( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddresses.forString("2001:db8::ffff")) ), - ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", "2001:db8::ffff", true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( convertToDocValuesQuery( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddresses.forString("2001:db8::fffe")) ), - ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::", "2001:db8::ffff", false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( @@ -265,16 +276,17 @@ public void testRangeQuery() { InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::2"), InetAddresses.forString("2001:db8::")) ), // same lo/hi values but inclusive=false so this won't match anything - ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "2001:db8::1", "2001:db8::1", false, false, null, null, null, MOCK_CONTEXT) ); // Upper bound is the min IP and is not inclusive - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("::", "::", true, false, null, null, null, MOCK_CONTEXT)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("field", "::", "::", true, false, null, null, null, MOCK_CONTEXT)); // Lower bound is the max IP and is not inclusive assertEquals( new MatchNoDocsQuery(), ft.rangeQuery( + "field", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, @@ -291,7 +303,7 @@ public void testRangeQuery() { InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("::fffe:ffff:ffff")) ), // same lo/hi values but inclusive=false so this won't match anything - ft.rangeQuery("::", "0.0.0.0", true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "::", "0.0.0.0", true, false, null, null, null, MOCK_CONTEXT) ); assertEquals( @@ -299,7 +311,17 @@ public void testRangeQuery() { InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::1:0:0:0"), InetAddressPoint.MAX_VALUE) ), // same lo/hi values but inclusive=false so this won't match anything - ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery( + "field", + "255.255.255.255", + "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + false, + true, + null, + null, + null, + MOCK_CONTEXT + ) ); assertEquals( @@ -307,37 +329,27 @@ public void testRangeQuery() { convertToDocValuesQuery( InetAddressPoint.newRangeQuery("field", InetAddresses.forString("192.168.1.7"), InetAddresses.forString("2001:db8::")) ), - ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "::ffff:c0a8:107", "2001:db8::", true, true, null, null, null, MOCK_CONTEXT) ); - MappedFieldType unsearchable = new IpFieldMapper.IpFieldType( - "field", - false, - false, - false, - null, - null, - Collections.emptyMap(), - false - ); + MappedFieldType unsearchable = new IpFieldMapper.IpFieldType(false, false, false, null, null, Collections.emptyMap(), false); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.rangeQuery("::1", "2001::", true, true, null, null, null, MOCK_CONTEXT) + () -> unsearchable.rangeQuery("field", "::1", "2001::", true, true, null, null, null, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, Version.CURRENT).build( - MapperBuilderContext.ROOT - ).fieldType(); + MappedField mapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, Version.CURRENT).build(MapperBuilderContext.ROOT) + .field(); assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8::2:1")); assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8:0:0:0:0:2:1")); assertEquals(List.of("::1"), fetchSourceValue(mapper, "0:0:0:0:0:0:0:1")); - MappedFieldType nullValueMapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, Version.CURRENT).nullValue( + MappedField nullValueMapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, Version.CURRENT).nullValue( "2001:db8:0:0:0:0:2:7" - ).build(MapperBuilderContext.ROOT).fieldType(); + ).build(MapperBuilderContext.ROOT).field(); assertEquals(List.of("2001:db8::2:7"), fetchSourceValue(nullValueMapper, null)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java index d0aac9d81a44b..1c06df07c71fc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldTypeTests.java @@ -17,7 +17,7 @@ public class IpRangeFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { RangeFieldMapper mapper = new RangeFieldMapper.Builder("field", RangeType.IP, true).build(MapperBuilderContext.ROOT); Map range = Map.of("gte", "2001:db8:0:0:0:0:2:1"); - assertEquals(List.of(Map.of("gte", "2001:db8::2:1")), fetchSourceValue(mapper.fieldType(), range)); - assertEquals(List.of("2001:db8::2:1/32"), fetchSourceValue(mapper.fieldType(), "2001:db8:0:0:0:0:2:1/32")); + assertEquals(List.of(Map.of("gte", "2001:db8::2:1")), fetchSourceValue(mapper.field(), range)); + assertEquals(List.of("2001:db8::2:1/32"), fetchSourceValue(mapper.field(), "2001:db8:0:0:0:0:2:1/32")); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index c8c013b2e7408..13c00b643e742 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -51,10 +51,10 @@ public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase { public void testFormat() throws IOException { - assertThat(simpleMappedFieldType().docValueFormat(null, null), sameInstance(DocValueFormat.IP)); - Exception e = expectThrows(IllegalArgumentException.class, () -> simpleMappedFieldType().docValueFormat("ASDFA", null)); + assertThat(simpleMappedField().docValueFormat(null, null), sameInstance(DocValueFormat.IP)); + Exception e = expectThrows(IllegalArgumentException.class, () -> simpleMappedField().docValueFormat("ASDFA", null)); assertThat(e.getMessage(), equalTo("Field [test] of type [ip] does not support custom formats")); - e = expectThrows(IllegalArgumentException.class, () -> simpleMappedFieldType().docValueFormat(null, ZoneId.of("America/New_York"))); + e = expectThrows(IllegalArgumentException.class, () -> simpleMappedField().docValueFormat(null, ZoneId.of("America/New_York"))); assertThat(e.getMessage(), equalTo("Field [test] of type [ip] does not support custom time zones")); } @@ -66,9 +66,10 @@ public void testDocValues() throws IOException { List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - IpScriptFieldType ft = build("append_param", Map.of("param", ".1")); - BinaryScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); - DocValueFormat format = ft.docValueFormat(null, null); + MappedField mappedField = build("append_param", Map.of("param", ".1")); + BinaryScriptFieldData ifd = (BinaryScriptFieldData) mappedField.fielddataBuilder("test", mockContext()::lookup) + .build(null, null); + DocValueFormat format = mappedField.docValueFormat(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { @@ -106,7 +107,8 @@ public void testSort() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); + BinaryScriptFieldData ifd = (BinaryScriptFieldData) simpleMappedField().fielddataBuilder("test", mockContext()::lookup) + .build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat( @@ -133,7 +135,7 @@ public void testUsedInScript() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); + SearchExecutionContext searchContext = mockContext(true, simpleMappedField()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { @@ -162,7 +164,7 @@ public void testExistsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().existsQuery(mockContext())), equalTo(1)); } } } @@ -176,9 +178,7 @@ public void testRangeQuery() throws IOException { try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( - searcher.count( - simpleMappedFieldType().rangeQuery("192.0.0.0", "200.0.0.0", false, false, null, null, null, mockContext()) - ), + searcher.count(simpleMappedField().rangeQuery("192.0.0.0", "200.0.0.0", false, false, null, null, null, mockContext())), equalTo(1) ); } @@ -186,8 +186,8 @@ public void testRangeQuery() throws IOException { } @Override - protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.rangeQuery("192.0.0.0", "200.0.0.0", randomBoolean(), randomBoolean(), null, null, null, ctx); + protected Query randomRangeQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.rangeQuery("192.0.0.0", "200.0.0.0", randomBoolean(), randomBoolean(), null, null, null, ctx); } @Override @@ -198,18 +198,18 @@ public void testTermQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - IpScriptFieldType fieldType = build("append_param", Map.of("param", ".1")); - assertThat(searcher.count(fieldType.termQuery("192.168.0.1", mockContext())), equalTo(1)); - assertThat(searcher.count(fieldType.termQuery("192.168.0.7", mockContext())), equalTo(0)); - assertThat(searcher.count(fieldType.termQuery("192.168.0.0/16", mockContext())), equalTo(2)); - assertThat(searcher.count(fieldType.termQuery("10.168.0.0/16", mockContext())), equalTo(0)); + MappedField mappedField = build("append_param", Map.of("param", ".1")); + assertThat(searcher.count(mappedField.termQuery("192.168.0.1", mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.termQuery("192.168.0.7", mockContext())), equalTo(0)); + assertThat(searcher.count(mappedField.termQuery("192.168.0.0/16", mockContext())), equalTo(2)); + assertThat(searcher.count(mappedField.termQuery("10.168.0.0/16", mockContext())), equalTo(0)); } } } @Override - protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termQuery(randomIp(randomBoolean()), ctx); + protected Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termQuery(randomIp(randomBoolean()), ctx); } @Override @@ -221,30 +221,24 @@ public void testTermsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat( - searcher.count(simpleMappedFieldType().termsQuery(List.of("192.168.0.1", "1.1.1.1"), mockContext())), - equalTo(2) - ); - assertThat( - searcher.count(simpleMappedFieldType().termsQuery(List.of("192.168.0.0/16", "1.1.1.1"), mockContext())), - equalTo(3) - ); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of("192.168.0.1", "1.1.1.1"), mockContext())), equalTo(2)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of("192.168.0.0/16", "1.1.1.1"), mockContext())), equalTo(3)); } } } @Override - protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termsQuery(randomList(100, () -> randomIp(randomBoolean())), ctx); + protected Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termsQuery(randomList(100, () -> randomIp(randomBoolean())), ctx); } @Override - protected IpScriptFieldType simpleMappedFieldType() { + protected MappedField simpleMappedField() { return build("read_foo", Map.of()); } @Override - protected MappedFieldType loopFieldType() { + protected MappedField loopField() { return build("loop", Map.of()); } @@ -253,7 +247,7 @@ protected String typeName() { return "ip"; } - private static IpScriptFieldType build(String code, Map params) { + private static MappedField build(String code, Map params) { return build(new Script(ScriptType.INLINE, "test", code, params)); } @@ -284,7 +278,7 @@ public void execute() { }; } - private static IpScriptFieldType build(Script script) { - return new IpScriptFieldType("test", factory(script), script, emptyMap()); + private static MappedField build(Script script) { + return new MappedField("test", new IpScriptFieldType(factory(script), script, emptyMap())); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java index a422fb58182d0..34755c6d47dca 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java @@ -24,8 +24,8 @@ public void testMergeMultiField() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createMapperService(mapping); - assertTrue(mapperService.fieldType("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), nullValue()); + assertTrue(mapperService.mappedField("name").isSearchable()); + assertThat(mapperService.mappedField("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); LuceneDocument doc = mapperService.documentMapper().parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc(); @@ -37,12 +37,12 @@ public void testMergeMultiField() throws Exception { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertTrue(mapperService.fieldType("name").isSearchable()); + assertTrue(mapperService.mappedField("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); - assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); + assertThat(mapperService.mappedField("name.indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed2"), nullValue()); + assertThat(mapperService.mappedField("name.not_indexed3"), nullValue()); doc = mapperService.documentMapper().parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); @@ -53,30 +53,30 @@ public void testMergeMultiField() throws Exception { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertTrue(mapperService.fieldType("name").isSearchable()); + assertTrue(mapperService.mappedField("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed2"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); + assertThat(mapperService.mappedField("name.indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed2"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertTrue(mapperService.fieldType("name").isSearchable()); + assertTrue(mapperService.mappedField("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed2"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed3"), notNullValue()); + assertThat(mapperService.mappedField("name.indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed2"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed3"), notNullValue()); } public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createMapperService(mapping); - assertTrue(mapperService.fieldType("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), nullValue()); + assertTrue(mapperService.mappedField("name").isSearchable()); + assertThat(mapperService.mappedField("name.indexed"), nullValue()); LuceneDocument doc = mapperService.documentMapper().parse(source(b -> b.field("name", "some name"))).rootDoc(); IndexableField f = doc.getField("name"); @@ -87,12 +87,12 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertTrue(mapperService.fieldType("name").isSearchable()); + assertTrue(mapperService.mappedField("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); - assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); + assertThat(mapperService.mappedField("name.indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed2"), nullValue()); + assertThat(mapperService.mappedField("name.not_indexed3"), nullValue()); doc = mapperService.documentMapper().parse(source(b -> b.field("name", "some name"))).rootDoc(); f = doc.getField("name"); @@ -103,12 +103,12 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertTrue(mapperService.fieldType("name").isSearchable()); + assertTrue(mapperService.mappedField("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed2"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); + assertThat(mapperService.mappedField("name.indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed2"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json"); try { @@ -120,10 +120,10 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { } // There are conflicts, so the `name.not_indexed3` has not been added - assertTrue(mapperService.fieldType("name").isSearchable()); - assertThat(mapperService.fieldType("name.indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed2"), notNullValue()); - assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); + assertTrue(mapperService.mappedField("name").isSearchable()); + assertThat(mapperService.mappedField("name.indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed2"), notNullValue()); + assertThat(mapperService.mappedField("name.not_indexed3"), nullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index fcdbbd3a7d268..46498b7d7273f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -307,8 +307,8 @@ public void testEnableNorms() throws IOException { public void testDimension() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - KeywordFieldMapper.KeywordFieldType ft = (KeywordFieldMapper.KeywordFieldType) mapperService.fieldType("field"); - assertFalse(ft.isDimension()); + MappedField mappedField = mapperService.mappedField("field"); + assertFalse(mappedField.isDimension()); assertDimension(true, KeywordFieldMapper.KeywordFieldType::isDimension); assertDimension(false, KeywordFieldMapper.KeywordFieldType::isDimension); @@ -395,8 +395,8 @@ public void testDimensionExtraLongKeyword() throws IOException { public void testConfigureSimilarity() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "keyword").field("similarity", "boolean"))); - MappedFieldType ft = mapperService.documentMapper().mappers().fieldTypesLookup().get("field"); - assertEquals("boolean", ft.getTextSearchInfo().similarity().name()); + MappedField mappedField = mapperService.documentMapper().mappers().mappedFieldsLookup().get("field"); + assertEquals("boolean", mappedField.getTextSearchInfo().similarity().name()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -514,15 +514,15 @@ public void testSplitQueriesOnWhitespace() throws IOException { b.endObject(); })); - MappedFieldType fieldType = mapperService.fieldType("field"); - assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - KeywordFieldMapper.KeywordFieldType ft = (KeywordFieldMapper.KeywordFieldType) fieldType; + MappedField mappedField = mapperService.mappedField("field"); + assertThat(mappedField.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + KeywordFieldMapper.KeywordFieldType ft = (KeywordFieldMapper.KeywordFieldType) mappedField.type(); Analyzer a = ft.getTextSearchInfo().searchAnalyzer(); assertTokenStreamContents(a.tokenStream("", "Hello World"), new String[] { "Hello World" }); - fieldType = mapperService.fieldType("field_with_normalizer"); - assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - ft = (KeywordFieldMapper.KeywordFieldType) fieldType; + mappedField = mapperService.mappedField("field_with_normalizer"); + assertThat(mappedField.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + ft = (KeywordFieldMapper.KeywordFieldType) mappedField.type(); assertThat(ft.getTextSearchInfo().searchAnalyzer().name(), equalTo("lowercase")); assertTokenStreamContents( ft.getTextSearchInfo().searchAnalyzer().analyzer().tokenStream("", "Hello World"), @@ -542,17 +542,17 @@ public void testSplitQueriesOnWhitespace() throws IOException { b.endObject(); })); - fieldType = mapperService.fieldType("field"); - assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - ft = (KeywordFieldMapper.KeywordFieldType) fieldType; + mappedField = mapperService.mappedField("field"); + assertThat(mappedField.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + ft = (KeywordFieldMapper.KeywordFieldType) mappedField.type(); assertTokenStreamContents( ft.getTextSearchInfo().searchAnalyzer().analyzer().tokenStream("", "Hello World"), new String[] { "Hello", "World" } ); - fieldType = mapperService.fieldType("field_with_normalizer"); - assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - ft = (KeywordFieldMapper.KeywordFieldType) fieldType; + mappedField = mapperService.mappedField("field_with_normalizer"); + assertThat(mappedField.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + ft = (KeywordFieldMapper.KeywordFieldType) mappedField.type(); assertThat(ft.getTextSearchInfo().searchAnalyzer().name(), equalTo("lowercase")); assertTokenStreamContents( ft.getTextSearchInfo().searchAnalyzer().analyzer().tokenStream("", "Hello World"), @@ -707,8 +707,8 @@ public void testLegacyField() throws Exception { b.field("normalizer", "unknown-normalizer"); b.endObject(); })); - assertThat(service.fieldType("mykeyw"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - assertEquals(Lucene.KEYWORD_ANALYZER, ((KeywordFieldMapper.KeywordFieldType) service.fieldType("mykeyw")).normalizer()); + assertThat(service.mappedField("mykeyw").type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertEquals(Lucene.KEYWORD_ANALYZER, ((KeywordFieldMapper.KeywordFieldType) service.mappedField("mykeyw").type()).normalizer()); // check that normalizer can be updated merge(service, mapping(b -> { @@ -717,7 +717,7 @@ public void testLegacyField() throws Exception { b.field("normalizer", "lowercase"); b.endObject(); })); - assertThat(service.fieldType("mykeyw"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - assertNotEquals(Lucene.KEYWORD_ANALYZER, ((KeywordFieldMapper.KeywordFieldType) service.fieldType("mykeyw")).normalizer()); + assertThat(service.mappedField("" + "mykeyw").type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertNotEquals(Lucene.KEYWORD_ANALYZER, ((KeywordFieldMapper.KeywordFieldType) service.mappedField("mykeyw").type()).normalizer()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index dce91c14b5523..74bb572e9d262 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -54,11 +54,12 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase { public void testIsFieldWithinQuery() throws IOException { - KeywordFieldType ft = new KeywordFieldType("field", randomBoolean(), randomBoolean(), Map.of()); + KeywordFieldType ft = new KeywordFieldType(randomBoolean(), randomBoolean(), Map.of()); // current impl ignores args and should always return INTERSECTS assertEquals( Relation.INTERSECTS, ft.isFieldWithinQuery( + "field", null, RandomStrings.randomAsciiLettersOfLengthBetween(random(), 0, 5), RandomStrings.randomAsciiLettersOfLengthBetween(random(), 0, 5), @@ -72,14 +73,17 @@ public void testIsFieldWithinQuery() throws IOException { } public void testTermQuery() { - MappedFieldType ft = new KeywordFieldType("field"); - assertEquals(new TermQuery(new Term("field", "foo")), ft.termQuery("foo", MOCK_CONTEXT)); + MappedFieldType ft = new KeywordFieldType(); + assertEquals(new TermQuery(new Term("field", "foo")), ft.termQuery("field", "foo", MOCK_CONTEXT)); - MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); - assertEquals(SortedSetDocValuesField.newSlowExactQuery("field", new BytesRef("foo")), ft2.termQuery("foo", MOCK_CONTEXT)); + MappedFieldType ft2 = new KeywordFieldType(false, true, Map.of()); + assertEquals(SortedSetDocValuesField.newSlowExactQuery("field", new BytesRef("foo")), ft2.termQuery("field", "foo", MOCK_CONTEXT)); - MappedFieldType unsearchable = new KeywordFieldType("field", false, false, Collections.emptyMap()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("bar", MOCK_CONTEXT)); + MappedFieldType unsearchable = new KeywordFieldType(false, false, Collections.emptyMap()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> unsearchable.termQuery("field", "bar", MOCK_CONTEXT) + ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } @@ -97,65 +101,65 @@ protected TokenStream normalize(String fieldName, TokenStream in) { return new LowerCaseFilter(in); } }; - MappedFieldType ft = new KeywordFieldType("field", new NamedAnalyzer("my_normalizer", AnalyzerScope.INDEX, normalizer)); - assertEquals(new TermQuery(new Term("field", "foo bar")), ft.termQuery("fOo BaR", MOCK_CONTEXT)); + MappedFieldType ft = new KeywordFieldType(new NamedAnalyzer("my_normalizer", AnalyzerScope.INDEX, normalizer)); + assertEquals(new TermQuery(new Term("field", "foo bar")), ft.termQuery("field", "fOo BaR", MOCK_CONTEXT)); } public void testTermsQuery() { - MappedFieldType ft = new KeywordFieldType("field"); + MappedFieldType ft = new KeywordFieldType(); List terms = new ArrayList<>(); terms.add(new BytesRef("foo")); terms.add(new BytesRef("bar")); - assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT)); + assertEquals(new TermInSetQuery("field", terms), ft.termsQuery("field", Arrays.asList("foo", "bar"), MOCK_CONTEXT)); - MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); - assertEquals(new DocValuesTermsQuery("field", terms), ft2.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT)); + MappedFieldType ft2 = new KeywordFieldType(false, true, Map.of()); + assertEquals(new DocValuesTermsQuery("field", terms), ft2.termsQuery("field", Arrays.asList("foo", "bar"), MOCK_CONTEXT)); - MappedFieldType unsearchable = new KeywordFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new KeywordFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT) + () -> unsearchable.termsQuery("field", Arrays.asList("foo", "bar"), MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); } public void testExistsQuery() { { - KeywordFieldType ft = new KeywordFieldType("field"); - assertEquals(new FieldExistsQuery("field"), ft.existsQuery(MOCK_CONTEXT)); + KeywordFieldType ft = new KeywordFieldType(); + assertEquals(new FieldExistsQuery("field"), ft.existsQuery("field", MOCK_CONTEXT)); } { - KeywordFieldType ft = new KeywordFieldType("field", false, true, Map.of()); - assertEquals(new FieldExistsQuery("field"), ft.existsQuery(MOCK_CONTEXT)); + KeywordFieldType ft = new KeywordFieldType(false, true, Map.of()); + assertEquals(new FieldExistsQuery("field"), ft.existsQuery("field", MOCK_CONTEXT)); } { FieldType fieldType = new FieldType(); fieldType.setOmitNorms(false); - KeywordFieldType ft = new KeywordFieldType("field", fieldType); - assertEquals(new FieldExistsQuery("field"), ft.existsQuery(MOCK_CONTEXT)); + KeywordFieldType ft = new KeywordFieldType(fieldType); + assertEquals(new FieldExistsQuery("field"), ft.existsQuery("field", MOCK_CONTEXT)); } { - KeywordFieldType ft = new KeywordFieldType("field", true, false, Collections.emptyMap()); - assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.NAME, "field")), ft.existsQuery(MOCK_CONTEXT)); + KeywordFieldType ft = new KeywordFieldType(true, false, Collections.emptyMap()); + assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.NAME, "field")), ft.existsQuery("field", MOCK_CONTEXT)); } } public void testRangeQuery() { - MappedFieldType ft = new KeywordFieldType("field"); + MappedFieldType ft = new KeywordFieldType(); assertEquals( new TermRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), - ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT) ); - MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); + MappedFieldType ft2 = new KeywordFieldType(false, true, Map.of()); assertEquals( SortedSetDocValuesField.newSlowRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), - ft2.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT) + ft2.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT) ); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[range] queries on [text] or [keyword] fields cannot be executed when " + "'search.allow_expensive_queries' is set to false.", @@ -164,40 +168,41 @@ public void testRangeQuery() { } public void testRegexpQuery() { - MappedFieldType ft = new KeywordFieldType("field"); - assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_CONTEXT)); + MappedFieldType ft = new KeywordFieldType(); + assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("field", "foo.*", 0, 0, 10, null, MOCK_CONTEXT)); - MappedFieldType unsearchable = new KeywordFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new KeywordFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.regexpQuery("foo.*", 0, 0, 10, null, MOCK_CONTEXT) + () -> unsearchable.regexpQuery("field", "foo.*", 0, 0, 10, null, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.regexpQuery("foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.regexpQuery("field", "foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testFuzzyQuery() { - MappedFieldType ft = new KeywordFieldType("field"); + MappedFieldType ft = new KeywordFieldType(); assertEquals( new FuzzyQuery(new Term("field", "foo"), 2, 1, 50, true), - ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) + ft.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) ); - MappedFieldType unsearchable = new KeywordFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new KeywordFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) + () -> unsearchable.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); ElasticsearchException ee = expectThrows( ElasticsearchException.class, () -> ft.fuzzyQuery( + "field", "foo", Fuzziness.AUTO, randomInt(10) + 1, @@ -210,14 +215,14 @@ public void testFuzzyQuery() { } public void testNormalizeQueries() { - MappedFieldType ft = new KeywordFieldType("field"); - assertEquals(new TermQuery(new Term("field", new BytesRef("FOO"))), ft.termQuery("FOO", null)); - ft = new KeywordFieldType("field", Lucene.STANDARD_ANALYZER); - assertEquals(new TermQuery(new Term("field", new BytesRef("foo"))), ft.termQuery("FOO", null)); + MappedFieldType ft = new KeywordFieldType(); + assertEquals(new TermQuery(new Term("field", new BytesRef("FOO"))), ft.termQuery("field", "FOO", null)); + ft = new KeywordFieldType(Lucene.STANDARD_ANALYZER); + assertEquals(new TermQuery(new Term("field", new BytesRef("foo"))), ft.termQuery("field", "FOO", null)); } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new KeywordFieldMapper.Builder("field", Version.CURRENT).build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new KeywordFieldMapper.Builder("field", Version.CURRENT).build(MapperBuilderContext.ROOT).field(); assertEquals(List.of("value"), fetchSourceValue(mapper, "value")); assertEquals(List.of("42"), fetchSourceValue(mapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(mapper, true)); @@ -225,26 +230,24 @@ public void testFetchSourceValue() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> fetchSourceValue(mapper, "value", "format")); assertEquals("Field [field] of type [keyword] doesn't support formats.", e.getMessage()); - MappedFieldType ignoreAboveMapper = new KeywordFieldMapper.Builder("field", Version.CURRENT).ignoreAbove(4) + MappedField ignoreAboveMapper = new KeywordFieldMapper.Builder("field", Version.CURRENT).ignoreAbove(4) .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper, "value")); assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(ignoreAboveMapper, true)); - MappedFieldType normalizerMapper = new KeywordFieldMapper.Builder( - "field", - createIndexAnalyzers(), - ScriptCompiler.NONE, - Version.CURRENT - ).normalizer("lowercase").build(MapperBuilderContext.ROOT).fieldType(); + MappedField normalizerMapper = new KeywordFieldMapper.Builder("field", createIndexAnalyzers(), ScriptCompiler.NONE, Version.CURRENT) + .normalizer("lowercase") + .build(MapperBuilderContext.ROOT) + .field(); assertEquals(List.of("value"), fetchSourceValue(normalizerMapper, "VALUE")); assertEquals(List.of("42"), fetchSourceValue(normalizerMapper, 42L)); assertEquals(List.of("value"), fetchSourceValue(normalizerMapper, "value")); - MappedFieldType nullValueMapper = new KeywordFieldMapper.Builder("field", Version.CURRENT).nullValue("NULL") + MappedField nullValueMapper = new KeywordFieldMapper.Builder("field", Version.CURRENT).nullValue("NULL") .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper, null)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index 0a06aca4add6e..b70bd469b5c69 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -59,8 +59,9 @@ public void testDocValues() throws IOException { List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - KeywordScriptFieldType ft = build("append_param", Map.of("param", "-suffix")); - StringScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); + MappedField mappedField = build("append_param", Map.of("param", "-suffix")); + StringScriptFieldData ifd = (StringScriptFieldData) mappedField.fielddataBuilder("test", mockContext()::lookup) + .build(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { @@ -98,7 +99,8 @@ public void testSort() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); + BinaryScriptFieldData ifd = (BinaryScriptFieldData) simpleMappedField().fielddataBuilder("test", mockContext()::lookup) + .build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"a\"]}")); @@ -116,7 +118,7 @@ public void testUsedInScript() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aa\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); + SearchExecutionContext searchContext = mockContext(true, simpleMappedField()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { @@ -145,7 +147,7 @@ public void testExistsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().existsQuery(mockContext())), equalTo(1)); } } } @@ -159,10 +161,7 @@ public void testFuzzyQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); // Totally wrong, no match try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat( - searcher.count(simpleMappedFieldType().fuzzyQuery("cat", Fuzziness.AUTO, 0, 1, true, mockContext())), - equalTo(3) - ); + assertThat(searcher.count(simpleMappedField().fuzzyQuery("cat", Fuzziness.AUTO, 0, 1, true, mockContext())), equalTo(3)); } } } @@ -175,8 +174,8 @@ public void testFuzzyQueryInLoop() { checkLoop(this::randomFuzzyQuery); } - private Query randomFuzzyQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.fuzzyQuery( + private Query randomFuzzyQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.fuzzyQuery( randomAlphaOfLengthBetween(1, 1000), randomFrom(Fuzziness.AUTO, Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO), randomInt(), @@ -193,7 +192,7 @@ public void testPrefixQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().prefixQuery("cat", null, mockContext())), equalTo(2)); + assertThat(searcher.count(simpleMappedField().prefixQuery("cat", null, mockContext())), equalTo(2)); } } } @@ -206,8 +205,8 @@ public void testPrefixQueryInLoop() { checkLoop(this::randomPrefixQuery); } - private Query randomPrefixQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.prefixQuery(randomAlphaOfLengthBetween(1, 1000), null, ctx); + private Query randomPrefixQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.prefixQuery(randomAlphaOfLengthBetween(1, 1000), null, ctx); } @Override @@ -219,19 +218,19 @@ public void testRangeQuery() throws IOException { try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( - searcher.count(simpleMappedFieldType().rangeQuery("cat", "d", false, false, null, null, null, mockContext())), + searcher.count(simpleMappedField().rangeQuery("cat", "d", false, false, null, null, null, mockContext())), equalTo(1) ); assertThat( - searcher.count(simpleMappedFieldType().rangeQuery(null, "d", true, false, null, null, null, mockContext())), + searcher.count(simpleMappedField().rangeQuery(null, "d", true, false, null, null, null, mockContext())), equalTo(2) ); assertThat( - searcher.count(simpleMappedFieldType().rangeQuery("cat", null, false, true, null, null, null, mockContext())), + searcher.count(simpleMappedField().rangeQuery("cat", null, false, true, null, null, null, mockContext())), equalTo(2) ); assertThat( - searcher.count(simpleMappedFieldType().rangeQuery(null, null, true, true, null, null, null, mockContext())), + searcher.count(simpleMappedField().rangeQuery(null, null, true, true, null, null, null, mockContext())), equalTo(3) ); } @@ -239,10 +238,10 @@ public void testRangeQuery() throws IOException { } @Override - protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { + protected Query randomRangeQuery(MappedField mappedField, SearchExecutionContext ctx) { boolean lowerNull = randomBoolean(); boolean upperNull = randomBoolean(); - return ft.rangeQuery( + return mappedField.rangeQuery( lowerNull ? null : randomAlphaOfLengthBetween(0, 1000), upperNull ? null : randomAlphaOfLengthBetween(0, 1000), lowerNull || randomBoolean(), @@ -263,7 +262,7 @@ public void testRegexpQuery() throws IOException { IndexSearcher searcher = newSearcher(reader); assertThat( searcher.count( - simpleMappedFieldType().regexpQuery("ca.+", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext()) + simpleMappedField().regexpQuery("ca.+", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext()) ), equalTo(2) ); @@ -275,8 +274,8 @@ public void testRegexpQueryInLoop() throws IOException { checkLoop(this::randomRegexpQuery); } - private Query randomRegexpQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.regexpQuery(randomAlphaOfLengthBetween(1, 1000), randomInt(0xFF), 0, Integer.MAX_VALUE, null, ctx); + private Query randomRegexpQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.regexpQuery(randomAlphaOfLengthBetween(1, 1000), randomInt(0xFF), 0, Integer.MAX_VALUE, null, ctx); } @Override @@ -286,15 +285,15 @@ public void testTermQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-suffix")); - assertThat(searcher.count(fieldType.termQuery("1-suffix", mockContext())), equalTo(1)); + MappedField mappedField = build("append_param", Map.of("param", "-suffix")); + assertThat(searcher.count(mappedField.termQuery("1-suffix", mockContext())), equalTo(1)); } } } @Override - protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termQuery(randomAlphaOfLengthBetween(1, 1000), ctx); + protected Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termQuery(randomAlphaOfLengthBetween(1, 1000), ctx); } @Override @@ -306,14 +305,14 @@ public void testTermsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1", "2"), mockContext())), equalTo(2)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of("1", "2"), mockContext())), equalTo(2)); } } } @Override - protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termsQuery(randomList(100, () -> randomAlphaOfLengthBetween(1, 1000)), ctx); + protected Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termsQuery(randomList(100, () -> randomAlphaOfLengthBetween(1, 1000)), ctx); } public void testWildcardQuery() throws IOException { @@ -322,7 +321,7 @@ public void testWildcardQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().wildcardQuery("a*b", null, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().wildcardQuery("a*b", null, mockContext())), equalTo(1)); } } } @@ -334,7 +333,7 @@ public void testNormalizedWildcardQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().normalizedWildcardQuery("a*b", null, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().normalizedWildcardQuery("a*b", null, mockContext())), equalTo(1)); } } } @@ -347,8 +346,8 @@ public void testWildcardQueryInLoop() { checkLoop(this::randomWildcardQuery); } - private Query randomWildcardQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.wildcardQuery(randomAlphaOfLengthBetween(1, 1000), null, ctx); + private Query randomWildcardQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.wildcardQuery(randomAlphaOfLengthBetween(1, 1000), null, ctx); } public void testMatchQuery() throws IOException { @@ -357,8 +356,8 @@ public void testMatchQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-Suffix")); - SearchExecutionContext searchExecutionContext = mockContext(true, fieldType); + MappedField mappedField = build("append_param", Map.of("param", "-Suffix")); + SearchExecutionContext searchExecutionContext = mockContext(true, mappedField); Query query = new MatchQueryBuilder("test", "1-Suffix").toQuery(searchExecutionContext); assertThat(searcher.count(query), equalTo(1)); } @@ -366,12 +365,12 @@ public void testMatchQuery() throws IOException { } @Override - protected KeywordScriptFieldType simpleMappedFieldType() { + protected MappedField simpleMappedField() { return build("read_foo", Map.of()); } @Override - protected KeywordScriptFieldType loopFieldType() { + protected MappedField loopField() { return build("loop", Map.of()); } @@ -380,7 +379,7 @@ protected String typeName() { return "keyword"; } - private static KeywordScriptFieldType build(String code, Map params) { + private static MappedField build(String code, Map params) { return build(new Script(ScriptType.INLINE, "test", code, params)); } @@ -411,7 +410,7 @@ public void execute() { }; } - private static KeywordScriptFieldType build(Script script) { - return new KeywordScriptFieldType("test", factory(script), script, emptyMap()); + private static MappedField build(Script script) { + return new MappedField("test", new KeywordScriptFieldType(factory(script), script, emptyMap())); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index 539fed677a4e5..7bfa130411730 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -51,18 +51,18 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase { public void testFormat() throws IOException { - assertThat(simpleMappedFieldType().docValueFormat("#.0", null).format(1), equalTo("1.0")); - assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(11), equalTo("11")); - assertThat(simpleMappedFieldType().docValueFormat("#,##0.##", null).format(1123), equalTo("1,123")); + assertThat(simpleMappedField().docValueFormat("#.0", null).format(1), equalTo("1.0")); + assertThat(simpleMappedField().docValueFormat("#,##0.##", null).format(11), equalTo("11")); + assertThat(simpleMappedField().docValueFormat("#,##0.##", null).format(1123), equalTo("1,123")); } public void testLongFromSource() throws IOException { MapperService mapperService = createMapperService(runtimeFieldMapping(b -> b.field("type", "long"))); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "9223372036854775806.00"))); withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> { - MappedFieldType ft = mapperService.fieldType("field"); + MappedField mappedField = mapperService.mappedField("field"); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - Query rangeQuery = ft.rangeQuery(0, 9223372036854775807L, false, false, ShapeRelation.CONTAINS, null, null, sec); + Query rangeQuery = mappedField.rangeQuery(0, 9223372036854775807L, false, false, ShapeRelation.CONTAINS, null, null, sec); IndexSearcher searcher = new IndexSearcher(ir); assertEquals(1, searcher.count(rangeQuery)); }); @@ -76,8 +76,9 @@ public void testDocValues() throws IOException { List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - LongScriptFieldType ft = build("add_param", Map.of("param", 1)); - LongScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); + MappedField mappedField = build("add_param", Map.of("param", 1)); + LongScriptFieldData ifd = (LongScriptFieldData) mappedField.fielddataBuilder("test", mockContext()::lookup) + .build(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { @@ -115,7 +116,8 @@ public void testSort() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); + LongScriptFieldData ifd = (LongScriptFieldData) simpleMappedField().fielddataBuilder("test", mockContext()::lookup) + .build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1]}")); @@ -132,7 +134,10 @@ public void testNow() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - LongScriptFieldData ifd = build("millis_ago", Map.of()).fielddataBuilder("test", mockContext()::lookup).build(null, null); + LongScriptFieldData ifd = (LongScriptFieldData) build("millis_ago", Map.of()).fielddataBuilder( + "test", + mockContext()::lookup + ).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181356]}")); @@ -156,7 +161,7 @@ public void testUsedInScript() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); + SearchExecutionContext searchContext = mockContext(true, simpleMappedField()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { @@ -185,7 +190,7 @@ public void testExistsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().existsQuery(mockContext())), equalTo(1)); } } } @@ -197,19 +202,19 @@ public void testRangeQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - MappedFieldType ft = simpleMappedFieldType(); - assertThat(searcher.count(ft.rangeQuery("2", "3", true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(2, 3, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(1.1, 3, true, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(1.1, 3, false, true, null, null, null, mockContext())), equalTo(1)); - assertThat(searcher.count(ft.rangeQuery(2, 3, false, true, null, null, null, mockContext())), equalTo(0)); + MappedField mappedField = simpleMappedField(); + assertThat(searcher.count(mappedField.rangeQuery("2", "3", true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.rangeQuery(2, 3, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.rangeQuery(1.1, 3, true, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.rangeQuery(1.1, 3, false, true, null, null, null, mockContext())), equalTo(1)); + assertThat(searcher.count(mappedField.rangeQuery(2, 3, false, true, null, null, null, mockContext())), equalTo(0)); } } } @Override - protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.rangeQuery(randomLong(), randomLong(), randomBoolean(), randomBoolean(), null, null, null, ctx); + protected Query randomRangeQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.rangeQuery(randomLong(), randomLong(), randomBoolean(), randomBoolean(), null, null, null, ctx); } @Override @@ -219,17 +224,17 @@ public void testTermQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(1, mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termQuery(1.1, mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termQuery("1", mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(1, mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termQuery(1.1, mockContext())), equalTo(0)); assertThat(searcher.count(build("add_param", Map.of("param", 1)).termQuery(2, mockContext())), equalTo(1)); } } } @Override - protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termQuery(randomLong(), ctx); + protected Query randomTermQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termQuery(randomLong(), ctx); } @Override @@ -239,27 +244,27 @@ public void testTermsQuery() throws IOException { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1"), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(1), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(1.1), mockContext())), equalTo(0)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(1.1, 2), mockContext())), equalTo(1)); - assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(2, 1), mockContext())), equalTo(2)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of("1"), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(1), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(1.1), mockContext())), equalTo(0)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(1.1, 2), mockContext())), equalTo(1)); + assertThat(searcher.count(simpleMappedField().termsQuery(List.of(2, 1), mockContext())), equalTo(2)); } } } @Override - protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { - return ft.termsQuery(List.of(randomLong()), ctx); + protected Query randomTermsQuery(MappedField mappedField, SearchExecutionContext ctx) { + return mappedField.termsQuery(List.of(randomLong()), ctx); } @Override - protected LongScriptFieldType simpleMappedFieldType() { + protected MappedField simpleMappedField() { return build("read_foo", Map.of()); } @Override - protected LongScriptFieldType loopFieldType() { + protected MappedField loopField() { return build("loop", Map.of()); } @@ -268,7 +273,7 @@ protected String typeName() { return "long"; } - private static LongScriptFieldType build(String code, Map params) { + private static MappedField build(String code, Map params) { return build(new Script(ScriptType.INLINE, "test", code, params)); } @@ -314,7 +319,7 @@ public void execute() { } } - private static LongScriptFieldType build(Script script) { - return new LongScriptFieldType("test", factory(script), script, emptyMap()); + private static MappedField build(Script script) { + return new MappedField("test", new LongScriptFieldType(factory(script), script, emptyMap())); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LookupRuntimeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LookupRuntimeFieldTypeTests.java index 186d8ee543a03..af9f6bc73dae3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LookupRuntimeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LookupRuntimeFieldTypeTests.java @@ -57,8 +57,8 @@ public void testFetchValues() throws IOException { XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("foo", List.of("f1", "f2")).endObject(); SourceLookup sourceLookup = new SourceLookup(); sourceLookup.setSource(BytesReference.bytes(source)); - MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); - ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null); + MappedField mappedField = mapperService.mappedField("foo_lookup_field"); + ValueFetcher valueFetcher = mappedField.valueFetcher(createSearchExecutionContext(mapperService), null); DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", sourceLookup); assertNotNull(doc); assertThat(doc.getName(), equalTo("foo_lookup_field")); @@ -113,8 +113,8 @@ public void testEmptyInputField() throws IOException { source.endObject(); SourceLookup sourceLookup = new SourceLookup(); sourceLookup.setSource(BytesReference.bytes(source)); - MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); - ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null); + MappedField mappedField = mapperService.mappedField("foo_lookup_field"); + ValueFetcher valueFetcher = mappedField.valueFetcher(createSearchExecutionContext(mapperService), null); DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", sourceLookup); assertNull(doc); } @@ -136,12 +136,12 @@ public void testInputFieldDoesNotExist() throws IOException { } """; var mapperService = createMapperService(mapping); - MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); + MappedField mappedField = mapperService.mappedField("foo_lookup_field"); // fails if unmapped_fields is not QueryShardException error = expectThrows(QueryShardException.class, () -> { SearchExecutionContext context = createSearchExecutionContext(mapperService); context.setAllowUnmappedFields(randomBoolean()); - fieldType.valueFetcher(context, null); + mappedField.valueFetcher(context, null); }); assertThat(error.getMessage(), containsString("No field mapping can be found for the field with name [barbaz]")); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappedFieldsLookupTests.java similarity index 66% rename from server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java rename to server/src/test/java/org/elasticsearch/index/mapper/MappedFieldsLookupTests.java index d031b4ef53dc9..7ce55ad65b7bd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappedFieldsLookupTests.java @@ -27,10 +27,10 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; -public class FieldTypeLookupTests extends ESTestCase { +public class MappedFieldsLookupTests extends ESTestCase { public void testEmpty() { - FieldTypeLookup lookup = new FieldTypeLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + MappedFieldsLookup lookup = new MappedFieldsLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertNull(lookup.get("foo")); Collection names = lookup.getMatchingFieldNames("foo"); assertNotNull(names); @@ -39,23 +39,23 @@ public void testEmpty() { public void testAddNewField() { MockFieldMapper f = new MockFieldMapper("foo"); - FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f), emptyList(), Collections.emptyList()); + MappedFieldsLookup lookup = new MappedFieldsLookup(Collections.singletonList(f), emptyList(), Collections.emptyList()); assertNull(lookup.get("bar")); - assertEquals(f.fieldType(), lookup.get("foo")); + assertEquals(f.field(), lookup.get("foo")); } public void testAddFieldAlias() { MockFieldMapper field = new MockFieldMapper("foo"); FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "foo"); - FieldTypeLookup lookup = new FieldTypeLookup( + MappedFieldsLookup lookup = new MappedFieldsLookup( Collections.singletonList(field), Collections.singletonList(alias), Collections.emptyList() ); - MappedFieldType aliasType = lookup.get("alias"); - assertEquals(field.fieldType(), aliasType); + MappedField aliasField = lookup.get("alias"); + assertEquals(field.fieldType(), aliasField.type()); } public void testGetMatchingFieldNames() { @@ -71,12 +71,12 @@ public void testGetMatchingFieldNames() { TestRuntimeField multi = new TestRuntimeField( "flat", List.of( - new TestRuntimeField.TestRuntimeFieldType("flat.first", "first"), - new TestRuntimeField.TestRuntimeFieldType("flat.second", "second") + new MappedField("flat.first", new TestRuntimeField.TestRuntimeFieldType("first")), + new MappedField("flat.second", new TestRuntimeField.TestRuntimeFieldType("second")) ) ); - FieldTypeLookup lookup = new FieldTypeLookup( + MappedFieldsLookup lookup = new MappedFieldsLookup( List.of(field1, field2, field3, flattened), List.of(alias1, alias2), List.of(runtimeField, multi) @@ -121,7 +121,7 @@ public void testSourcePathWithMultiFields() { .addMultiField(new MockFieldMapper.Builder("field.subfield2")) .build(MapperBuilderContext.ROOT); - FieldTypeLookup lookup = new FieldTypeLookup(singletonList(field), emptyList(), emptyList()); + MappedFieldsLookup lookup = new MappedFieldsLookup(singletonList(field), emptyList(), emptyList()); assertEquals(Set.of("field"), lookup.sourcePaths("field")); assertEquals(Set.of("field"), lookup.sourcePaths("field.subfield1")); @@ -134,7 +134,7 @@ public void testSourcePathsWithCopyTo() { MockFieldMapper otherField = new MockFieldMapper.Builder("other_field").copyTo("field").build(MapperBuilderContext.ROOT); - FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field, otherField), emptyList(), emptyList()); + MappedFieldsLookup lookup = new MappedFieldsLookup(Arrays.asList(field, otherField), emptyList(), emptyList()); assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field")); assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field.subfield1")); @@ -147,23 +147,27 @@ public void testRuntimeFieldsLookup() { TestRuntimeField multi = new TestRuntimeField( "multi", List.of( - new TestRuntimeField.TestRuntimeFieldType("multi.string", "string"), - new TestRuntimeField.TestRuntimeFieldType("multi.long", "long") + new MappedField("multi.string", new TestRuntimeField.TestRuntimeFieldType("string")), + new MappedField("multi.long", new TestRuntimeField.TestRuntimeFieldType("long")) ) ); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(concrete), emptyList(), List.of(runtime, runtimeLong, multi)); - assertThat(fieldTypeLookup.get("concrete"), instanceOf(MockFieldMapper.FakeFieldType.class)); - assertThat(fieldTypeLookup.get("string"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); - assertThat(fieldTypeLookup.get("string").typeName(), equalTo("type")); - assertThat(fieldTypeLookup.get("multi"), nullValue()); - assertThat(fieldTypeLookup.get("multi.string"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); - assertThat(fieldTypeLookup.get("multi.string").typeName(), equalTo("string")); - assertThat(fieldTypeLookup.get("multi.long"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); - assertThat(fieldTypeLookup.get("multi.long").typeName(), equalTo("long")); - assertThat(fieldTypeLookup.get("multi.outside"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); - assertThat(fieldTypeLookup.get("multi.outside").typeName(), equalTo("date")); - assertThat(fieldTypeLookup.get("multi.anything"), nullValue()); + MappedFieldsLookup mappedFieldsLookup = new MappedFieldsLookup( + List.of(concrete), + emptyList(), + List.of(runtime, runtimeLong, multi) + ); + assertThat(mappedFieldsLookup.get("concrete").type(), instanceOf(MockFieldMapper.FakeFieldType.class)); + assertThat(mappedFieldsLookup.get("string").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("string").typeName(), equalTo("type")); + assertThat(mappedFieldsLookup.get("multi"), nullValue()); + assertThat(mappedFieldsLookup.get("multi.string").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("multi.string").typeName(), equalTo("string")); + assertThat(mappedFieldsLookup.get("multi.long").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("multi.long").typeName(), equalTo("long")); + assertThat(mappedFieldsLookup.get("multi.outside").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("multi.outside").typeName(), equalTo("date")); + assertThat(mappedFieldsLookup.get("multi.anything"), nullValue()); } public void testRuntimeFieldsOverrideConcreteFields() { @@ -174,25 +178,25 @@ public void testRuntimeFieldsOverrideConcreteFields() { TestRuntimeField fieldOverride = new TestRuntimeField("field", "string"); TestRuntimeField subfieldOverride = new TestRuntimeField( "object", - Collections.singleton(new TestRuntimeField.TestRuntimeFieldType("object.subfield", "leaf")) + Collections.singleton(new MappedField("object.subfield", new TestRuntimeField.TestRuntimeFieldType("leaf"))) ); TestRuntimeField runtime = new TestRuntimeField("runtime", "type"); TestRuntimeField flattenedRuntime = new TestRuntimeField("flattened.runtime", "type"); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup( + MappedFieldsLookup mappedFieldsLookup = new MappedFieldsLookup( List.of(field, concrete, subfield, flattened), emptyList(), List.of(fieldOverride, runtime, subfieldOverride, flattenedRuntime) ); - assertThat(fieldTypeLookup.get("field"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); - assertThat(fieldTypeLookup.get("field").typeName(), equalTo("string")); - assertThat(fieldTypeLookup.get("object.subfield"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); - assertThat(fieldTypeLookup.get("object.subfield").typeName(), equalTo("leaf")); - assertThat(fieldTypeLookup.get("concrete"), instanceOf(MockFieldMapper.FakeFieldType.class)); - assertThat(fieldTypeLookup.get("runtime"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); - assertThat(fieldTypeLookup.get("runtime").typeName(), equalTo("type")); - assertThat(fieldTypeLookup.get("flattened.anything"), instanceOf(FlattenedFieldMapper.KeyedFlattenedFieldType.class)); - assertThat(fieldTypeLookup.get("flattened.runtime"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("field").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("field").typeName(), equalTo("string")); + assertThat(mappedFieldsLookup.get("object.subfield").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("object.subfield").typeName(), equalTo("leaf")); + assertThat(mappedFieldsLookup.get("concrete").type(), instanceOf(MockFieldMapper.FakeFieldType.class)); + assertThat(mappedFieldsLookup.get("runtime").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappedFieldsLookup.get("runtime").typeName(), equalTo("type")); + assertThat(mappedFieldsLookup.get("flattened.anything").type(), instanceOf(FlattenedFieldMapper.KeyedFlattenedFieldType.class)); + assertThat(mappedFieldsLookup.get("flattened.runtime").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); } public void testRuntimeFieldsSourcePaths() { @@ -203,19 +207,19 @@ public void testRuntimeFieldsSourcePaths() { TestRuntimeField field2 = new TestRuntimeField("field2", "type"); TestRuntimeField subfield = new TestRuntimeField("object.subfield", "type"); - FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(field1, concrete), emptyList(), List.of(field2, subfield)); + MappedFieldsLookup mappedFieldsLookup = new MappedFieldsLookup(List.of(field1, concrete), emptyList(), List.of(field2, subfield)); { - Set sourcePaths = fieldTypeLookup.sourcePaths("field1"); + Set sourcePaths = mappedFieldsLookup.sourcePaths("field1"); assertEquals(1, sourcePaths.size()); assertTrue(sourcePaths.contains("field1")); } { - Set sourcePaths = fieldTypeLookup.sourcePaths("field2"); + Set sourcePaths = mappedFieldsLookup.sourcePaths("field2"); assertEquals(1, sourcePaths.size()); assertTrue(sourcePaths.contains("field2")); } { - Set sourcePaths = fieldTypeLookup.sourcePaths("object.subfield"); + Set sourcePaths = mappedFieldsLookup.sourcePaths("object.subfield"); assertEquals(1, sourcePaths.size()); assertTrue(sourcePaths.contains("object.subfield")); } @@ -225,16 +229,16 @@ public void testFlattenedLookup() { String fieldName = "object1.object2.field"; FlattenedFieldMapper mapper = createFlattenedMapper(fieldName); - FieldTypeLookup lookup = new FieldTypeLookup(singletonList(mapper), emptyList(), emptyList()); - assertEquals(mapper.fieldType(), lookup.get(fieldName)); + MappedFieldsLookup lookup = new MappedFieldsLookup(singletonList(mapper), emptyList(), emptyList()); + assertEquals(mapper.field(), lookup.get(fieldName)); String objectKey = "key1.key2"; String searchFieldName = fieldName + "." + objectKey; - MappedFieldType searchFieldType = lookup.get(searchFieldName); - assertNotNull(searchFieldType); - assertThat(searchFieldType, Matchers.instanceOf(FlattenedFieldMapper.KeyedFlattenedFieldType.class)); - FlattenedFieldMapper.KeyedFlattenedFieldType keyedFieldType = (FlattenedFieldMapper.KeyedFlattenedFieldType) searchFieldType; + MappedField searchField = lookup.get(searchFieldName); + assertNotNull(searchField); + assertThat(searchField.type(), Matchers.instanceOf(FlattenedFieldMapper.KeyedFlattenedFieldType.class)); + FlattenedFieldMapper.KeyedFlattenedFieldType keyedFieldType = (FlattenedFieldMapper.KeyedFlattenedFieldType) searchField.type(); assertEquals(objectKey, keyedFieldType.key()); assertThat(lookup.getMatchingFieldNames("object1.*"), contains("object1.object2.field")); @@ -251,16 +255,16 @@ public void testFlattenedLookupWithAlias() { String aliasName = "alias"; FieldAliasMapper alias = new FieldAliasMapper(aliasName, aliasName, fieldName); - FieldTypeLookup lookup = new FieldTypeLookup(singletonList(mapper), singletonList(alias), emptyList()); - assertEquals(mapper.fieldType(), lookup.get(aliasName)); + MappedFieldsLookup lookup = new MappedFieldsLookup(singletonList(mapper), singletonList(alias), emptyList()); + assertEquals(mapper.field(), lookup.get(aliasName)); String objectKey = "key1.key2"; String searchFieldName = aliasName + "." + objectKey; - MappedFieldType searchFieldType = lookup.get(searchFieldName); - assertNotNull(searchFieldType); - assertThat(searchFieldType, Matchers.instanceOf(FlattenedFieldMapper.KeyedFlattenedFieldType.class)); - FlattenedFieldMapper.KeyedFlattenedFieldType keyedFieldType = (FlattenedFieldMapper.KeyedFlattenedFieldType) searchFieldType; + MappedField searchField = lookup.get(searchFieldName); + assertNotNull(searchField); + assertThat(searchField.type(), Matchers.instanceOf(FlattenedFieldMapper.KeyedFlattenedFieldType.class)); + FlattenedFieldMapper.KeyedFlattenedFieldType keyedFieldType = (FlattenedFieldMapper.KeyedFlattenedFieldType) searchField.type(); assertEquals(objectKey, keyedFieldType.key()); } @@ -273,31 +277,31 @@ public void testFlattenedLookupWithMultipleFields() { FlattenedFieldMapper mapper2 = createFlattenedMapper(field2); FlattenedFieldMapper mapper3 = createFlattenedMapper(field3); - FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(mapper1, mapper2), emptyList(), emptyList()); + MappedFieldsLookup lookup = new MappedFieldsLookup(Arrays.asList(mapper1, mapper2), emptyList(), emptyList()); assertNotNull(lookup.get(field1 + ".some.key")); assertNotNull(lookup.get(field2 + ".some.key")); - lookup = new FieldTypeLookup(Arrays.asList(mapper1, mapper2, mapper3), emptyList(), emptyList()); + lookup = new MappedFieldsLookup(Arrays.asList(mapper1, mapper2, mapper3), emptyList(), emptyList()); assertNotNull(lookup.get(field1 + ".some.key")); assertNotNull(lookup.get(field2 + ".some.key")); assertNotNull(lookup.get(field3 + ".some.key")); } public void testUnmappedLookupWithDots() { - FieldTypeLookup lookup = new FieldTypeLookup(emptyList(), emptyList(), emptyList()); + MappedFieldsLookup lookup = new MappedFieldsLookup(emptyList(), emptyList(), emptyList()); assertNull(lookup.get("object.child")); } public void testMaxDynamicKeyDepth() { { - FieldTypeLookup lookup = new FieldTypeLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + MappedFieldsLookup lookup = new MappedFieldsLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertEquals(0, lookup.getMaxParentPathDots()); } // Add a flattened object field. { String name = "object1.object2.field"; - FieldTypeLookup lookup = new FieldTypeLookup( + MappedFieldsLookup lookup = new MappedFieldsLookup( Collections.singletonList(createFlattenedMapper(name)), Collections.emptyList(), Collections.emptyList() @@ -308,7 +312,7 @@ public void testMaxDynamicKeyDepth() { // Add a short alias to that field. { String name = "object1.object2.field"; - FieldTypeLookup lookup = new FieldTypeLookup( + MappedFieldsLookup lookup = new MappedFieldsLookup( Collections.singletonList(createFlattenedMapper(name)), Collections.singletonList(new FieldAliasMapper("alias", "alias", "object1.object2.field")), Collections.emptyList() @@ -319,7 +323,7 @@ public void testMaxDynamicKeyDepth() { // Add a longer alias to that field. { String name = "object1.object2.field"; - FieldTypeLookup lookup = new FieldTypeLookup( + MappedFieldsLookup lookup = new MappedFieldsLookup( Collections.singletonList(createFlattenedMapper(name)), Collections.singletonList(new FieldAliasMapper("alias", "object1.object2.object3.alias", "object1.object2.field")), Collections.emptyList() @@ -332,7 +336,7 @@ public void testRuntimeFieldNameClashes() { { IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, - () -> new FieldTypeLookup( + () -> new MappedFieldsLookup( Collections.emptySet(), Collections.emptySet(), List.of(new TestRuntimeField("field", "type"), new TestRuntimeField("field", "long")) @@ -343,12 +347,12 @@ public void testRuntimeFieldNameClashes() { { TestRuntimeField multi = new TestRuntimeField( "multi", - Collections.singleton(new TestRuntimeField.TestRuntimeFieldType("multi.first", "leaf")) + Collections.singleton(new MappedField("multi.first", new TestRuntimeField.TestRuntimeFieldType("leaf"))) ); TestRuntimeField runtime = new TestRuntimeField("multi.first", "runtime"); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, - () -> new FieldTypeLookup(Collections.emptySet(), Collections.emptySet(), List.of(multi, runtime)) + () -> new MappedFieldsLookup(Collections.emptySet(), Collections.emptySet(), List.of(multi, runtime)) ); assertEquals(iae.getMessage(), "Found two runtime fields with same name [multi.first]"); } @@ -356,14 +360,14 @@ public void testRuntimeFieldNameClashes() { TestRuntimeField multi = new TestRuntimeField( "multi", List.of( - new TestRuntimeField.TestRuntimeFieldType("multi", "leaf"), - new TestRuntimeField.TestRuntimeFieldType("multi", "leaf") + new MappedField("multi", new TestRuntimeField.TestRuntimeFieldType("leaf")), + new MappedField("multi", new TestRuntimeField.TestRuntimeFieldType("leaf")) ) ); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, - () -> new FieldTypeLookup(Collections.emptySet(), Collections.emptySet(), List.of(multi)) + () -> new MappedFieldsLookup(Collections.emptySet(), Collections.emptySet(), List.of(multi)) ); assertEquals(iae.getMessage(), "Found two runtime fields with same name [multi]"); } @@ -374,14 +378,14 @@ public void testRuntimeFieldNameOutsideContext() { TestRuntimeField multi = new TestRuntimeField( "multi", List.of( - new TestRuntimeField.TestRuntimeFieldType("first", "leaf"), - new TestRuntimeField.TestRuntimeFieldType("second", "leaf"), - new TestRuntimeField.TestRuntimeFieldType("multi.third", "leaf") + new MappedField("first", new TestRuntimeField.TestRuntimeFieldType("leaf")), + new MappedField("second", new TestRuntimeField.TestRuntimeFieldType("leaf")), + new MappedField("multi.third", new TestRuntimeField.TestRuntimeFieldType("leaf")) ) ); IllegalStateException ise = expectThrows( IllegalStateException.class, - () -> new FieldTypeLookup(Collections.emptySet(), Collections.emptySet(), Collections.singletonList(multi)) + () -> new MappedFieldsLookup(Collections.emptySet(), Collections.emptySet(), Collections.singletonList(multi)) ); assertEquals("Found sub-fields with name not belonging to the parent field they are part of [first, second]", ise.getMessage()); } @@ -389,13 +393,13 @@ public void testRuntimeFieldNameOutsideContext() { TestRuntimeField multi = new TestRuntimeField( "multi", List.of( - new TestRuntimeField.TestRuntimeFieldType("multi.", "leaf"), - new TestRuntimeField.TestRuntimeFieldType("multi.f", "leaf") + new MappedField("multi.", new TestRuntimeField.TestRuntimeFieldType("leaf")), + new MappedField("multi.f", new TestRuntimeField.TestRuntimeFieldType("leaf")) ) ); IllegalStateException ise = expectThrows( IllegalStateException.class, - () -> new FieldTypeLookup(Collections.emptySet(), Collections.emptySet(), Collections.singletonList(multi)) + () -> new MappedFieldsLookup(Collections.emptySet(), Collections.emptySet(), Collections.singletonList(multi)) ); assertEquals("Found sub-fields with name not belonging to the parent field they are part of [multi.]", ise.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index f3771510d8da9..ba6f95ee0125e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -34,20 +34,20 @@ public class MapperServiceTests extends MapperServiceTestCase { public void testPreflightUpdateDoesNotChangeMapping() throws Throwable { final MapperService mapperService = createMapperService(mapping(b -> {})); merge(mapperService, MergeReason.MAPPING_UPDATE_PREFLIGHT, mapping(b -> createMappingSpecifyingNumberOfFields(b, 1))); - assertThat("field was not created by preflight check", mapperService.fieldType("field0"), nullValue()); + assertThat("field was not created by preflight check", mapperService.mappedField("field0"), nullValue()); merge(mapperService, MergeReason.MAPPING_UPDATE, mapping(b -> createMappingSpecifyingNumberOfFields(b, 1))); - assertThat("field was not created by mapping update", mapperService.fieldType("field0"), notNullValue()); + assertThat("field was not created by mapping update", mapperService.mappedField("field0"), notNullValue()); } public void testMappingLookup() throws IOException { MapperService service = createMapperService(mapping(b -> {})); MappingLookup oldLookup = service.mappingLookup(); - assertThat(oldLookup.fieldTypesLookup().get("cat"), nullValue()); + assertThat(oldLookup.mappedFieldsLookup().get("cat"), nullValue()); merge(service, mapping(b -> b.startObject("cat").field("type", "keyword").endObject())); MappingLookup newLookup = service.mappingLookup(); - assertThat(newLookup.fieldTypesLookup().get("cat"), not(nullValue())); - assertThat(oldLookup.fieldTypesLookup().get("cat"), nullValue()); + assertThat(newLookup.mappedFieldsLookup().get("cat"), not(nullValue())); + assertThat(oldLookup.mappedFieldsLookup().get("cat"), nullValue()); } /** @@ -315,7 +315,7 @@ public void testEagerGlobalOrdinals() throws IOException { })); List eagerFieldNames = StreamSupport.stream(mapperService.getEagerGlobalOrdinalsFields().spliterator(), false) - .map(MappedFieldType::name) + .map(MappedField::name) .toList(); assertThat(eagerFieldNames, containsInAnyOrder("eager1", "eager2")); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index 287663d2c3a12..51e74090fac57 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -55,7 +55,7 @@ public void testOnlyRuntimeField() { assertEquals(0, size(mappingLookup.fieldMappers())); assertEquals(0, mappingLookup.objectMappers().size()); assertNull(mappingLookup.getMapper("test")); - assertThat(mappingLookup.fieldTypesLookup().get("test"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappingLookup.mappedFieldsLookup().get("test").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); } public void testRuntimeFieldLeafOverride() { @@ -68,7 +68,7 @@ public void testRuntimeFieldLeafOverride() { assertThat(mappingLookup.getMapper("test"), instanceOf(MockFieldMapper.class)); assertEquals(1, size(mappingLookup.fieldMappers())); assertEquals(0, mappingLookup.objectMappers().size()); - assertThat(mappingLookup.fieldTypesLookup().get("test"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat(mappingLookup.mappedFieldsLookup().get("test").type(), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); } public void testSubfieldOverride() { @@ -89,15 +89,18 @@ public void testSubfieldOverride() { assertThat(mappingLookup.getMapper("object.subfield"), instanceOf(MockFieldMapper.class)); assertEquals(1, size(mappingLookup.fieldMappers())); assertEquals(1, mappingLookup.objectMappers().size()); - assertThat(mappingLookup.fieldTypesLookup().get("object.subfield"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class)); + assertThat( + mappingLookup.mappedFieldsLookup().get("object.subfield").type(), + instanceOf(TestRuntimeField.TestRuntimeFieldType.class) + ); } public void testAnalyzers() throws IOException { - FakeFieldType fieldType1 = new FakeFieldType("field1"); - FieldMapper fieldMapper1 = new FakeFieldMapper(fieldType1, "index1"); + FakeField field1 = new FakeField("field1"); + FieldMapper fieldMapper1 = new FakeFieldMapper(field1, "index1"); - FakeFieldType fieldType2 = new FakeFieldType("field2"); - FieldMapper fieldMapper2 = new FakeFieldMapper(fieldType2, "index2"); + FakeField field2 = new FakeField("field2"); + FieldMapper fieldMapper2 = new FakeFieldMapper(field2, "index2"); MappingLookup mappingLookup = createMappingLookup(Arrays.asList(fieldMapper1, fieldMapper2), emptyList(), emptyList()); @@ -120,7 +123,7 @@ public void testEmptyMappingLookup() { } public void testValidateDoesNotShadow() { - FakeFieldType dim = new FakeFieldType("dim") { + FakeField dim = new FakeField("dim") { @Override public boolean isDimension() { return true; @@ -129,7 +132,7 @@ public boolean isDimension() { FieldMapper dimMapper = new FakeFieldMapper(dim, "index1"); MetricType metricType = randomFrom(MetricType.values()); - FakeFieldType metric = new FakeFieldType("metric") { + FakeField metric = new FakeField("metric") { @Override public MetricType getMetricType() { return metricType; @@ -137,7 +140,7 @@ public MetricType getMetricType() { }; FieldMapper metricMapper = new FakeFieldMapper(metric, "index1"); - FakeFieldType plain = new FakeFieldType("plain"); + FakeField plain = new FakeField("plain"); FieldMapper plainMapper = new FakeFieldMapper(plain, "index1"); MappingLookup mappingLookup = createMappingLookup(List.of(dimMapper, metricMapper, plainMapper), emptyList(), emptyList()); @@ -150,7 +153,7 @@ public MetricType getMetricType() { } public void testShadowingOnConstruction() { - FakeFieldType dim = new FakeFieldType("dim") { + FakeField dim = new FakeField("dim") { @Override public boolean isDimension() { return true; @@ -159,7 +162,7 @@ public boolean isDimension() { FieldMapper dimMapper = new FakeFieldMapper(dim, "index1"); MetricType metricType = randomFrom(MetricType.values()); - FakeFieldType metric = new FakeFieldType("metric") { + FakeField metric = new FakeField("metric") { @Override public MetricType getMetricType() { return metricType; @@ -229,10 +232,21 @@ public boolean incrementToken() { } - static class FakeFieldType extends TermBasedFieldType { + static class FakeField extends MappedField { + + private FakeField(String name) { + super(name, new TermBasedFieldType(true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()) { + + @Override + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + throw new UnsupportedOperationException(); + } - private FakeFieldType(String name) { - super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + @Override + public String typeName() { + return "fake"; + } + }); } @Override @@ -250,14 +264,14 @@ static class FakeFieldMapper extends FieldMapper { final String indexedValue; - FakeFieldMapper(FakeFieldType fieldType, String indexedValue) { - super(fieldType.name(), fieldType, MultiFields.empty(), CopyTo.empty()); + FakeFieldMapper(FakeField fakeField, String indexedValue) { + super(fakeField.name(), fakeField, MultiFields.empty(), CopyTo.empty()); this.indexedValue = indexedValue; } @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), new NamedAnalyzer("fake", AnalyzerScope.INDEX, new FakeAnalyzer(indexedValue))); + return Map.of(mappedField.name(), new NamedAnalyzer("fake", AnalyzerScope.INDEX, new FakeAnalyzer(indexedValue))); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index d8fbe18a4ac78..ede29675a4430 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -66,43 +66,43 @@ public void testMultiFieldMultiFields() throws Exception { assertThat(f.name(), equalTo("object1.multi1.string")); assertThat(f.binaryValue(), equalTo(new BytesRef("2010-01-01"))); - assertThat(mapperService.fieldType("name"), notNullValue()); - assertThat(mapperService.fieldType("name"), instanceOf(TextFieldType.class)); - assertTrue(mapperService.fieldType("name").isIndexed()); - assertTrue(mapperService.fieldType("name").isSearchable()); - assertTrue(mapperService.fieldType("name").isStored()); - assertTrue(mapperService.fieldType("name").getTextSearchInfo().isTokenized()); - - assertThat(mapperService.fieldType("name.indexed"), notNullValue()); - assertThat(mapperService.fieldType("name"), instanceOf(TextFieldType.class)); - assertTrue(mapperService.fieldType("name.indexed").isIndexed()); - assertTrue(mapperService.fieldType("name.indexed").isSearchable()); - assertFalse(mapperService.fieldType("name.indexed").isStored()); - assertTrue(mapperService.fieldType("name.indexed").getTextSearchInfo().isTokenized()); - - assertThat(mapperService.fieldType("name.not_indexed"), notNullValue()); - assertThat(mapperService.fieldType("name"), instanceOf(TextFieldType.class)); - assertFalse(mapperService.fieldType("name.not_indexed").isIndexed()); - assertFalse(mapperService.fieldType("name.not_indexed").isSearchable()); - assertTrue(mapperService.fieldType("name.not_indexed").isStored()); - assertTrue(mapperService.fieldType("name.not_indexed").getTextSearchInfo().isTokenized()); - - assertThat(mapperService.fieldType("name.test1"), notNullValue()); - assertThat(mapperService.fieldType("name"), instanceOf(TextFieldType.class)); - assertTrue(mapperService.fieldType("name.test1").isIndexed()); - assertTrue(mapperService.fieldType("name.test1").isSearchable()); - assertTrue(mapperService.fieldType("name.test1").isStored()); - assertTrue(mapperService.fieldType("name.test1").getTextSearchInfo().isTokenized()); - assertThat(mapperService.fieldType("name.test1").eagerGlobalOrdinals(), equalTo(true)); - - assertThat(mapperService.fieldType("object1.multi1"), notNullValue()); - assertThat(mapperService.fieldType("object1.multi1"), instanceOf(DateFieldMapper.DateFieldType.class)); - assertThat(mapperService.fieldType("object1.multi1.string"), notNullValue()); - assertThat(mapperService.fieldType("object1.multi1.string"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - assertTrue(mapperService.fieldType("object1.multi1.string").isIndexed()); - assertTrue(mapperService.fieldType("object1.multi1.string").isSearchable()); - assertNotNull(mapperService.fieldType("object1.multi1.string").getTextSearchInfo()); - assertFalse(mapperService.fieldType("object1.multi1.string").getTextSearchInfo().isTokenized()); + assertThat(mapperService.mappedField("name"), notNullValue()); + assertThat(mapperService.mappedField("name").type(), instanceOf(TextFieldType.class)); + assertTrue(mapperService.mappedField("name").isIndexed()); + assertTrue(mapperService.mappedField("name").isSearchable()); + assertTrue(mapperService.mappedField("name").isStored()); + assertTrue(mapperService.mappedField("name").getTextSearchInfo().isTokenized()); + + assertThat(mapperService.mappedField("name.indexed"), notNullValue()); + assertThat(mapperService.mappedField("name").type(), instanceOf(TextFieldType.class)); + assertTrue(mapperService.mappedField("name.indexed").isIndexed()); + assertTrue(mapperService.mappedField("name.indexed").isSearchable()); + assertFalse(mapperService.mappedField("name.indexed").isStored()); + assertTrue(mapperService.mappedField("name.indexed").getTextSearchInfo().isTokenized()); + + assertThat(mapperService.mappedField("name.not_indexed"), notNullValue()); + assertThat(mapperService.mappedField("name").type(), instanceOf(TextFieldType.class)); + assertFalse(mapperService.mappedField("name.not_indexed").isIndexed()); + assertFalse(mapperService.mappedField("name.not_indexed").isSearchable()); + assertTrue(mapperService.mappedField("name.not_indexed").isStored()); + assertTrue(mapperService.mappedField("name.not_indexed").getTextSearchInfo().isTokenized()); + + assertThat(mapperService.mappedField("name.test1"), notNullValue()); + assertThat(mapperService.mappedField("name").type(), instanceOf(TextFieldType.class)); + assertTrue(mapperService.mappedField("name.test1").isIndexed()); + assertTrue(mapperService.mappedField("name.test1").isSearchable()); + assertTrue(mapperService.mappedField("name.test1").isStored()); + assertTrue(mapperService.mappedField("name.test1").getTextSearchInfo().isTokenized()); + assertThat(mapperService.mappedField("name.test1").eagerGlobalOrdinals(), equalTo(true)); + + assertThat(mapperService.mappedField("object1.multi1"), notNullValue()); + assertThat(mapperService.mappedField("object1.multi1").type(), instanceOf(DateFieldMapper.DateFieldType.class)); + assertThat(mapperService.mappedField("object1.multi1.string"), notNullValue()); + assertThat(mapperService.mappedField("object1.multi1.string").type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertTrue(mapperService.mappedField("object1.multi1.string").isIndexed()); + assertTrue(mapperService.mappedField("object1.multi1.string").isSearchable()); + assertNotNull(mapperService.mappedField("object1.multi1.string").getTextSearchInfo()); + assertFalse(mapperService.mappedField("object1.multi1.string").getTextSearchInfo().isTokenized()); } public void testBuildThenParse() throws Exception { @@ -219,7 +219,7 @@ public void testUnknownLegacyFieldsUnderKnownRootField() throws Exception { b.endObject(); b.endObject(); })); - assertThat(service.fieldType("name.subfield"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("name.subfield").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); } public void testUnmappedLegacyFieldsUnderKnownRootField() throws Exception { @@ -231,7 +231,7 @@ public void testUnmappedLegacyFieldsUnderKnownRootField() throws Exception { b.endObject(); b.endObject(); })); - assertThat(service.fieldType("name.subfield"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("name.subfield").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); } public void testFieldsUnderUnknownRootField() throws Exception { @@ -243,8 +243,8 @@ public void testFieldsUnderUnknownRootField() throws Exception { b.endObject(); b.endObject(); })); - assertThat(service.fieldType("name"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); - assertThat(service.fieldType("name.subfield"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertThat(service.mappedField("name").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("name.subfield").type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); } public void testFieldsUnderUnmappedRootField() throws Exception { @@ -256,7 +256,7 @@ public void testFieldsUnderUnmappedRootField() throws Exception { b.endObject(); b.endObject(); })); - assertThat(service.fieldType("name"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); - assertThat(service.fieldType("name.subfield"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertThat(service.mappedField("name").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("name.subfield").type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 6f8cb86f5201b..0a713ce4fc9ce 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -245,8 +245,8 @@ public void testOutOfRangeValues() throws IOException { public void testDimension() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - NumberFieldMapper.NumberFieldType ft = (NumberFieldMapper.NumberFieldType) mapperService.fieldType("field"); - assertFalse(ft.isDimension()); + MappedField mappedField = mapperService.mappedField("field"); + assertFalse(mappedField.isDimension()); // dimension = false is allowed assertDimension(false, NumberFieldMapper.NumberFieldType::isDimension); @@ -262,8 +262,8 @@ public void testDimension() throws IOException { public void testMetricType() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - NumberFieldMapper.NumberFieldType ft = (NumberFieldMapper.NumberFieldType) mapperService.fieldType("field"); - assertNull(ft.getMetricType()); + MappedField mappedField = mapperService.mappedField("field"); + assertNull(mappedField.getMetricType()); assertMetricType("gauge", NumberFieldMapper.NumberFieldType::getMetricType); assertMetricType("counter", NumberFieldMapper.NumberFieldType::getMetricType); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index eb5dfcfeb8ac8..19b2a4fb651d9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -74,74 +74,80 @@ public void pickType() { public void testEqualsWithDifferentNumberTypes() { NumberType type = randomFrom(NumberType.values()); - NumberFieldType fieldType = new NumberFieldType("foo", type); + NumberFieldType fieldType = new NumberFieldType(type); NumberType otherType = randomValueOtherThan(type, () -> randomFrom(NumberType.values())); - NumberFieldType otherFieldType = new NumberFieldType("foo", otherType); + NumberFieldType otherFieldType = new NumberFieldType(otherType); assertNotEquals(fieldType, otherFieldType); } public void testIsFieldWithinQuery() throws IOException { - MappedFieldType ft = new NumberFieldType("field", NumberType.INTEGER); + MappedFieldType ft = new NumberFieldType(NumberType.INTEGER); // current impl ignores args and should always return INTERSECTS assertEquals( Relation.INTERSECTS, - ft.isFieldWithinQuery(null, randomDouble(), randomDouble(), randomBoolean(), randomBoolean(), null, null, null) + ft.isFieldWithinQuery("field", null, randomDouble(), randomDouble(), randomBoolean(), randomBoolean(), null, null, null) ); } public void testIntegerTermsQueryWithDecimalPart() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.INTEGER); - assertEquals(IntPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1, 2.1), MOCK_CONTEXT)); - assertEquals(IntPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1.0, 2.1), MOCK_CONTEXT)); - assertTrue(ft.termsQuery(Arrays.asList(1.1, 2.1), MOCK_CONTEXT) instanceof MatchNoDocsQuery); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.INTEGER); + assertEquals(IntPoint.newSetQuery("field", 1), ft.termsQuery("field", Arrays.asList(1, 2.1), MOCK_CONTEXT)); + assertEquals(IntPoint.newSetQuery("field", 1), ft.termsQuery("field", Arrays.asList(1.0, 2.1), MOCK_CONTEXT)); + assertTrue(ft.termsQuery("field", Arrays.asList(1.1, 2.1), MOCK_CONTEXT) instanceof MatchNoDocsQuery); } public void testLongTermsQueryWithDecimalPart() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); - assertEquals(LongPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1, 2.1), MOCK_CONTEXT)); - assertEquals(LongPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1.0, 2.1), MOCK_CONTEXT)); - assertTrue(ft.termsQuery(Arrays.asList(1.1, 2.1), MOCK_CONTEXT) instanceof MatchNoDocsQuery); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.LONG); + assertEquals(LongPoint.newSetQuery("field", 1), ft.termsQuery("field", Arrays.asList(1, 2.1), MOCK_CONTEXT)); + assertEquals(LongPoint.newSetQuery("field", 1), ft.termsQuery("field", Arrays.asList(1.0, 2.1), MOCK_CONTEXT)); + assertTrue(ft.termsQuery("field", Arrays.asList(1.1, 2.1), MOCK_CONTEXT) instanceof MatchNoDocsQuery); } public void testByteTermQueryWithDecimalPart() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.BYTE, randomBoolean()); - assertTrue(ft.termQuery(42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.BYTE, randomBoolean()); + assertTrue(ft.termQuery("field", 42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); } public void testShortTermQueryWithDecimalPart() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.SHORT, randomBoolean()); - assertTrue(ft.termQuery(42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.SHORT, randomBoolean()); + assertTrue(ft.termQuery("field", 42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); } public void testIntegerTermQueryWithDecimalPart() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.INTEGER, randomBoolean()); - assertTrue(ft.termQuery(42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.INTEGER, randomBoolean()); + assertTrue(ft.termQuery("field", 42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); } public void testLongTermQueryWithDecimalPart() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG, randomBoolean()); - assertTrue(ft.termQuery(42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG, randomBoolean()); + assertTrue(ft.termQuery("field", 42.1, MOCK_CONTEXT) instanceof MatchNoDocsQuery); } private static MappedFieldType unsearchable() { - return new NumberFieldType("field", NumberType.LONG, false, false, false, true, null, Collections.emptyMap(), null, false, null); + return new NumberFieldType(NumberType.LONG, false, false, false, true, null, Collections.emptyMap(), null, false, null); } public void testTermQuery() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); - assertEquals(LongPoint.newExactQuery("field", 42), ft.termQuery("42", MOCK_CONTEXT)); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + assertEquals(LongPoint.newExactQuery("field", 42), ft.termQuery("field", "42", MOCK_CONTEXT)); - ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG, false); - assertEquals(SortedNumericDocValuesField.newSlowExactQuery("field", 42), ft.termQuery("42", MOCK_CONTEXT)); + ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG, false); + assertEquals(SortedNumericDocValuesField.newSlowExactQuery("field", 42), ft.termQuery("field", "42", MOCK_CONTEXT)); MappedFieldType unsearchable = unsearchable(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("42", MOCK_CONTEXT)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> unsearchable.termQuery("field", "42", MOCK_CONTEXT) + ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); - MappedFieldType ft2 = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG, false); - ElasticsearchException e2 = expectThrows(ElasticsearchException.class, () -> ft2.termQuery("42", MOCK_CONTEXT_DISALLOW_EXPENSIVE)); + MappedFieldType ft2 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG, false); + ElasticsearchException e2 = expectThrows( + ElasticsearchException.class, + () -> ft2.termQuery("field", "42", MOCK_CONTEXT_DISALLOW_EXPENSIVE) + ); assertEquals( "Cannot search on field [field] since it is not indexed and 'search.allow_expensive_queries' is set to false.", e2.getMessage() @@ -149,212 +155,212 @@ public void testTermQuery() { } public void testRangeQueryWithNegativeBounds() { - MappedFieldType ftInt = new NumberFieldMapper.NumberFieldType("field", NumberType.INTEGER, randomBoolean()); + MappedFieldType ftInt = new NumberFieldMapper.NumberFieldType(NumberType.INTEGER, randomBoolean()); assertEquals( - ftInt.rangeQuery(-3, -3, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-3.5, -2.5, true, true, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", -3, -3, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -3.5, -2.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(-3, -3, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-3.5, -2.5, false, false, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", -3, -3, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -3.5, -2.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(0, 0, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-0.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", 0, 0, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -0.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(0, 0, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-0.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", 0, 0, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -0.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(1, 2, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", 1, 2, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", 0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(1, 2, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", 1, 2, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", 0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(0, 2, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", 0, 2, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(0, 2, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", 0, 2, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(-2, 0, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-2.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", -2, 0, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -2.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(-2, 0, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-2.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", -2, 0, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -2.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(-2, -1, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-2.5, -0.5, true, true, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", -2, -1, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -2.5, -0.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftInt.rangeQuery(-2, -1, true, true, null, null, null, MOCK_CONTEXT), - ftInt.rangeQuery(-2.5, -0.5, false, false, null, null, null, MOCK_CONTEXT) + ftInt.rangeQuery("field", -2, -1, true, true, null, null, null, MOCK_CONTEXT), + ftInt.rangeQuery("field", -2.5, -0.5, false, false, null, null, null, MOCK_CONTEXT) ); - MappedFieldType ftLong = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG, randomBoolean()); + MappedFieldType ftLong = new NumberFieldMapper.NumberFieldType(NumberType.LONG, randomBoolean()); assertEquals( - ftLong.rangeQuery(-3, -3, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-3.5, -2.5, true, true, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", -3, -3, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -3.5, -2.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(-3, -3, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-3.5, -2.5, false, false, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", -3, -3, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -3.5, -2.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(0, 0, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-0.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", 0, 0, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -0.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(0, 0, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-0.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", 0, 0, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -0.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(1, 2, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", 1, 2, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", 0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(1, 2, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", 1, 2, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", 0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(0, 2, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", 0, 2, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -0.5, 2.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(0, 2, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", 0, 2, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -0.5, 2.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(-2, 0, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-2.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", -2, 0, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -2.5, 0.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(-2, 0, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-2.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", -2, 0, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -2.5, 0.5, false, false, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(-2, -1, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-2.5, -0.5, true, true, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", -2, -1, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -2.5, -0.5, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ftLong.rangeQuery(-2, -1, true, true, null, null, null, MOCK_CONTEXT), - ftLong.rangeQuery(-2.5, -0.5, false, false, null, null, null, MOCK_CONTEXT) + ftLong.rangeQuery("field", -2, -1, true, true, null, null, null, MOCK_CONTEXT), + ftLong.rangeQuery("field", -2.5, -0.5, false, false, null, null, null, MOCK_CONTEXT) ); } public void testByteRangeQueryWithDecimalParts() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.BYTE, randomBoolean()); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.BYTE, randomBoolean()); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, false, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, false, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, false, null, null, null, MOCK_CONTEXT) ); } public void testShortRangeQueryWithDecimalParts() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.SHORT, randomBoolean()); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.SHORT, randomBoolean()); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, false, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, false, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, false, null, null, null, MOCK_CONTEXT) ); } public void testIntegerRangeQueryWithDecimalParts() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.INTEGER, randomBoolean()); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.INTEGER, randomBoolean()); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, false, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, false, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, false, null, null, null, MOCK_CONTEXT) ); } public void testLongRangeQueryWithDecimalParts() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG, randomBoolean()); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberType.LONG, randomBoolean()); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(2, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1.1, 10, false, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 2, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1.1, 10, false, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, true, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, true, null, null, null, MOCK_CONTEXT) ); assertEquals( - ft.rangeQuery(1, 10, true, true, null, null, null, MOCK_CONTEXT), - ft.rangeQuery(1, 10.1, true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", 1, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery("field", 1, 10.1, true, false, null, null, null, MOCK_CONTEXT) ); } public void testRangeQuery() { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); + MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); Query expected = new IndexOrDocValuesQuery( LongPoint.newRangeQuery("field", 1, 3), SortedNumericDocValuesField.newSlowRangeQuery("field", 1, 3) ); - assertEquals(expected, ft.rangeQuery("1", "3", true, true, null, null, null, MOCK_CONTEXT)); + assertEquals(expected, ft.rangeQuery("field", "1", "3", true, true, null, null, null, MOCK_CONTEXT)); - ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG, false); + ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG, false); expected = SortedNumericDocValuesField.newSlowRangeQuery("field", 1, 3); - assertEquals(expected, ft.rangeQuery("1", "3", true, true, null, null, null, MOCK_CONTEXT)); + assertEquals(expected, ft.rangeQuery("field", "1", "3", true, true, null, null, null, MOCK_CONTEXT)); MappedFieldType unsearchable = unsearchable(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.rangeQuery("1", "3", true, true, null, null, null, MOCK_CONTEXT) + () -> unsearchable.rangeQuery("field", "1", "3", true, true, null, null, null, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage()); - MappedFieldType ft2 = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG, false); + MappedFieldType ft2 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG, false); ElasticsearchException e2 = expectThrows( ElasticsearchException.class, - () -> ft2.rangeQuery("1", "3", true, true, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft2.rangeQuery("field", "1", "3", true, true, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "Cannot search on field [field] since it is not indexed and 'search.allow_expensive_queries' is set to false.", @@ -581,8 +587,9 @@ public void doTestIndexSortRangeQueries(NumberType type, Supplier valueS IndexSettings indexSettings = new IndexSettings(indexMetadata, settings); // Create an index writer configured with the same index sort. - NumberFieldType fieldType = new NumberFieldType("field", type); + NumberFieldType fieldType = new NumberFieldType(type); IndexNumericFieldData fielddata = (IndexNumericFieldData) fieldType.fielddataBuilder( + "field", "index", () -> { throw new UnsupportedOperationException(); } ).build(null, null); @@ -740,37 +747,22 @@ public void write(XContentBuilder b) throws IOException { public void testDisplayValue() { for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) { - NumberFieldMapper.NumberFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", type); + NumberFieldMapper.NumberFieldType fieldType = new NumberFieldMapper.NumberFieldType(type); assertNull(fieldType.valueForDisplay(null)); } - assertEquals( - Byte.valueOf((byte) 3), - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.BYTE).valueForDisplay(3) - ); + assertEquals(Byte.valueOf((byte) 3), new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.BYTE).valueForDisplay(3)); assertEquals( Short.valueOf((short) 3), - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.SHORT).valueForDisplay(3) - ); - assertEquals( - Integer.valueOf(3), - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER).valueForDisplay(3) - ); - assertEquals( - Long.valueOf(3), - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG).valueForDisplay(3L) - ); - assertEquals( - Double.valueOf(1.2), - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.HALF_FLOAT).valueForDisplay(1.2) - ); - assertEquals( - Double.valueOf(1.2), - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.FLOAT).valueForDisplay(1.2) + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.SHORT).valueForDisplay(3) ); + assertEquals(Integer.valueOf(3), new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER).valueForDisplay(3)); + assertEquals(Long.valueOf(3), new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG).valueForDisplay(3L)); assertEquals( Double.valueOf(1.2), - new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE).valueForDisplay(1.2) + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.HALF_FLOAT).valueForDisplay(1.2) ); + assertEquals(Double.valueOf(1.2), new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.FLOAT).valueForDisplay(1.2)); + assertEquals(Double.valueOf(1.2), new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE).valueForDisplay(1.2)); } public void testParsePoint() { @@ -819,39 +811,34 @@ public void testParsePoint() { } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new NumberFieldMapper.Builder( - "field", - NumberType.INTEGER, - ScriptCompiler.NONE, - false, - true, - Version.CURRENT - ).build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new NumberFieldMapper.Builder("field", NumberType.INTEGER, ScriptCompiler.NONE, false, true, Version.CURRENT) + .build(MapperBuilderContext.ROOT) + .field(); assertEquals(List.of(3), fetchSourceValue(mapper, 3.14)); assertEquals(List.of(42), fetchSourceValue(mapper, "42.9")); assertEquals(List.of(3, 42), fetchSourceValues(mapper, 3.14, "foo", "42.9")); - MappedFieldType nullValueMapper = new NumberFieldMapper.Builder( + MappedField nullValueMapper = new NumberFieldMapper.Builder( "field", NumberType.FLOAT, ScriptCompiler.NONE, false, true, Version.CURRENT - ).nullValue(2.71f).build(MapperBuilderContext.ROOT).fieldType(); + ).nullValue(2.71f).build(MapperBuilderContext.ROOT).field(); assertEquals(List.of(2.71f), fetchSourceValue(nullValueMapper, "")); assertEquals(List.of(2.71f), fetchSourceValue(nullValueMapper, null)); } public void testFetchHalfFloatFromSource() throws IOException { - MappedFieldType mapper = new NumberFieldMapper.Builder( + MappedField mapper = new NumberFieldMapper.Builder( "field", NumberType.HALF_FLOAT, ScriptCompiler.NONE, false, true, Version.CURRENT - ).build(MapperBuilderContext.ROOT).fieldType(); + ).build(MapperBuilderContext.ROOT).field(); /* * Half float loses a fair bit of precision compared to float but * we still do floating point comparisons. The "funny" trailing diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 4a4bea2c02c3a..7ecb11b78072c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -337,7 +337,7 @@ public void testUnknownLegacyFields() throws Exception { b.field("unknown_setting", 5); b.endObject(); })); - assertThat(service.fieldType("name"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("name").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); } public void testUnmappedLegacyFields() throws Exception { @@ -347,7 +347,7 @@ public void testUnmappedLegacyFields() throws Exception { b.field("unknown_setting", 5); b.endObject(); })); - assertThat(service.fieldType("name"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("name").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); } public void testSubobjectsFalse() throws Exception { @@ -368,8 +368,8 @@ public void testSubobjectsFalse() throws Exception { } b.endObject(); })); - assertNotNull(mapperService.fieldType("metrics.service.time")); - assertNotNull(mapperService.fieldType("metrics.service.time.max")); + assertNotNull(mapperService.mappedField("metrics.service.time")); + assertNotNull(mapperService.mappedField("metrics.service.time.max")); } public void testSubobjectsFalseWithInnerObject() { @@ -436,8 +436,8 @@ public void testSubobjectsFalseRoot() throws Exception { } b.endObject(); })); - assertNotNull(mapperService.fieldType("metrics.service.time")); - assertNotNull(mapperService.fieldType("metrics.service.time.max")); + assertNotNull(mapperService.mappedField("metrics.service.time")); + assertNotNull(mapperService.mappedField("metrics.service.time.max")); } public void testExplicitDefaultSubobjects() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index 5abf460266c8e..965a0ebb69b55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -202,7 +202,7 @@ protected TestMapper( CopyTo copyTo, ParametrizedMapperTests.Builder builder ) { - super(simpleName, new KeywordFieldMapper.KeywordFieldType(fullName), multiFields, copyTo); + super(simpleName, new MappedField(fullName, new KeywordFieldMapper.KeywordFieldType()), multiFields, copyTo); this.fixed = builder.fixed.getValue(); this.fixed2 = builder.fixed2.getValue(); this.variable = builder.variable.getValue(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java index dd8843228f0e1..74c80287263fb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -32,21 +32,21 @@ public void testSimple() throws Exception { assertThat(f.stringValue(), equalTo("top_level")); assertThat(f.fieldType().stored(), equalTo(false)); - assertThat(mapperService.fieldType("name").isStored(), equalTo(false)); + assertThat(mapperService.mappedField("name").isStored(), equalTo(false)); f = doc.getField("obj1.name"); assertThat(f.name(), equalTo("obj1.name")); assertThat(f.fieldType().stored(), equalTo(true)); - assertThat(mapperService.fieldType("obj1.name").isStored(), equalTo(true)); + assertThat(mapperService.mappedField("obj1.name").isStored(), equalTo(true)); f = doc.getField("obj1.obj2.name"); assertThat(f.name(), equalTo("obj1.obj2.name")); assertThat(f.fieldType().stored(), equalTo(false)); - assertThat(mapperService.fieldType("obj1.obj2.name").isStored(), equalTo(false)); + assertThat(mapperService.mappedField("obj1.obj2.name").isStored(), equalTo(false)); // verify more complex path_match expressions - assertNotNull(mapperService.fieldType("obj3.obj4.prop1").getTextSearchInfo()); + assertNotNull(mapperService.mappedField("obj3.obj4.prop1").getTextSearchInfo()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapperTests.java index 915268b1896a9..5760c706707db 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/PlaceHolderFieldMapperTests.java @@ -37,7 +37,7 @@ public void testPreserveParams() throws Exception { b.endObject(); }); MapperService service = createMapperService(Version.fromString("5.0.0"), Settings.EMPTY, () -> false, mapping); - assertThat(service.fieldType("myfield"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("myfield").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); assertEquals(Strings.toString(mapping), Strings.toString(service.documentMapper().mapping())); // check that field can be updated @@ -48,7 +48,7 @@ public void testPreserveParams() throws Exception { b.endObject(); }); merge(service, mapping); - assertThat(service.fieldType("myfield"), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); + assertThat(service.mappedField("myfield").type(), instanceOf(PlaceHolderFieldMapper.PlaceHolderFieldType.class)); assertEquals(Strings.toString(mapping), Strings.toString(service.documentMapper().mapping())); } @@ -61,7 +61,7 @@ public void testFetchValue() throws Exception { .rootDoc() ); }, iw -> { - SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); + SearchLookup lookup = new SearchLookup(mapperService::mappedField, fieldDataLookup()); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); FieldFetcher fieldFetcher = FieldFetcher.create( searchExecutionContext, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapperTests.java index 8181e5c8f23a0..e627c96ee26c1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapperTests.java @@ -52,19 +52,19 @@ public void testEnableFieldData() throws IOException { boolean[] enabled = new boolean[1]; MapperService mapperService = createMapperService(() -> enabled[0], mapping(b -> {})); - ProvidedIdFieldMapper.IdFieldType ft = (ProvidedIdFieldMapper.IdFieldType) mapperService.fieldType("_id"); + MappedField mappedField = mapperService.mappedField("_id"); IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, - () -> ft.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }).build(null, null) + () -> mappedField.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }).build(null, null) ); assertThat(exc.getMessage(), containsString(IndicesService.INDICES_ID_FIELD_DATA_ENABLED_SETTING.getKey())); - assertFalse(ft.isAggregatable()); + assertFalse(mappedField.isAggregatable()); enabled[0] = true; - ft.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }).build(null, null); + mappedField.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }).build(null, null); assertWarnings(ProvidedIdFieldMapper.ID_FIELD_DATA_DEPRECATION_MESSAGE); - assertTrue(ft.isAggregatable()); + assertTrue(mappedField.isAggregatable()); } public void testFetchIdFieldValue() throws IOException { @@ -74,11 +74,11 @@ public void testFetchIdFieldValue() throws IOException { mapperService, iw -> { iw.addDocument(mapperService.documentMapper().parse(source(id, b -> b.field("field", "value"), null)).rootDoc()); }, iw -> { - SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); + SearchLookup lookup = new SearchLookup(mapperService::mappedField, fieldDataLookup()); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.lookup()).thenReturn(lookup); - ProvidedIdFieldMapper.IdFieldType ft = (ProvidedIdFieldMapper.IdFieldType) mapperService.fieldType("_id"); - ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null); + MappedField mappedField = mapperService.mappedField("_id"); + ValueFetcher valueFetcher = mappedField.valueFetcher(searchExecutionContext, null); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); lookup.source().setSegmentAndDocument(context, 0); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index c63727b0dfa9b..ab1eb7fa2790d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -128,8 +128,8 @@ public void testDoubleRangeQuery() throws Exception { public void testDateRangeQuery() throws Exception { SearchExecutionContext context = createSearchExecutionContext(); - RangeFieldMapper.RangeFieldType type = (RangeFieldMapper.RangeFieldType) context.getFieldType(DATE_RANGE_FIELD_NAME); - DateMathParser parser = type.dateMathParser; + MappedField mappedField = context.getMappedField(DATE_RANGE_FIELD_NAME); + DateMathParser parser = ((RangeFieldMapper.RangeFieldType) mappedField.type()).dateMathParser; Query query = new QueryStringQueryBuilder(DATE_RANGE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery( createSearchExecutionContext() ); @@ -151,8 +151,8 @@ public void testDateRangeQuery() throws Exception { assertEquals(new IndexOrDocValuesQuery(range, dv), query); // also make sure the produced bounds are the same as on a regular `date` field - DateFieldMapper.DateFieldType dateType = (DateFieldMapper.DateFieldType) context.getFieldType(DATE_FIELD_NAME); - parser = dateType.dateMathParser; + mappedField = context.getMappedField(DATE_RANGE_FIELD_NAME); + parser = ((RangeFieldMapper.RangeFieldType) mappedField.type()).dateMathParser; Query queryOnDateField = new QueryStringQueryBuilder(DATE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery( createSearchExecutionContext() ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 0f79f2e01c1b6..f1724ab94c2d3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -60,9 +60,9 @@ public void setupProperties() { private RangeFieldType createDefaultFieldType() { if (type == RangeType.DATE) { - return new RangeFieldType("field", RangeFieldMapper.Defaults.DATE_FORMATTER); + return new RangeFieldType(RangeFieldMapper.Defaults.DATE_FORMATTER); } - return new RangeFieldType("field", type); + return new RangeFieldType(type); } public void testRangeQuery() throws Exception { @@ -81,7 +81,7 @@ public void testRangeQuery() throws Exception { assertEquals( getExpectedRangeQuery(relation, from, to, includeLower, includeUpper), - ft.rangeQuery(from, to, includeLower, includeUpper, relation, null, null, context) + ft.rangeQuery("field", from, to, includeLower, includeUpper, relation, null, null, context) ); } @@ -133,7 +133,7 @@ public void testRangeQueryIntersectsAdjacentValues() throws Exception { to = nextTo(from); } } - Query rangeQuery = ft.rangeQuery(from, to, false, false, relation, null, null, context); + Query rangeQuery = ft.rangeQuery("field", from, to, false, false, relation, null, null, context); assertThat(rangeQuery, instanceOf(IndexOrDocValuesQuery.class)); assertThat(((IndexOrDocValuesQuery) rangeQuery).getIndexQuery(), instanceOf(MatchNoDocsQuery.class)); } @@ -193,7 +193,7 @@ public void testFromLargerToErrors() throws Exception { ShapeRelation relation = randomFrom(ShapeRelation.values()); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> ft.rangeQuery(from, to, true, true, relation, null, null, context) + () -> ft.rangeQuery("field", from, to, true, true, relation, null, null, context) ); assertTrue(ex.getMessage().contains("Range query `from` value")); assertTrue(ex.getMessage().contains("is greater than `to` value")); @@ -227,7 +227,7 @@ private SearchExecutionContext createContext() { public void testDateRangeQueryUsingMappingFormat() { SearchExecutionContext context = createContext(); - RangeFieldType strict = new RangeFieldType("field", RangeFieldMapper.Defaults.DATE_FORMATTER); + RangeFieldType strict = new RangeFieldType(RangeFieldMapper.Defaults.DATE_FORMATTER); // don't use DISJOINT here because it doesn't work on date fields which we want to compare bounds with ShapeRelation relation = randomValueOtherThan(ShapeRelation.DISJOINT, () -> randomFrom(ShapeRelation.values())); @@ -237,7 +237,7 @@ public void testDateRangeQueryUsingMappingFormat() { ElasticsearchParseException ex = expectThrows( ElasticsearchParseException.class, - () -> strict.rangeQuery(from, to, true, true, relation, null, null, context) + () -> strict.rangeQuery("field", from, to, true, true, relation, null, null, context) ); assertThat( ex.getMessage(), @@ -249,13 +249,23 @@ public void testDateRangeQueryUsingMappingFormat() { assertEquals(1465975790000L, formatter.parseMillis(from)); assertEquals(1466062190000L, formatter.parseMillis(to)); - RangeFieldType fieldType = new RangeFieldType("field", formatter); - final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); + RangeFieldType fieldType = new RangeFieldType(formatter); + final Query query = fieldType.rangeQuery("field", from, to, true, true, relation, null, fieldType.dateMathParser(), context); assertEquals("field:", query.toString()); // compare lower and upper bounds with what we would get on a `date` field - DateFieldType dateFieldType = new DateFieldType("field", DateFieldMapper.Resolution.MILLISECONDS, formatter); - final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); + DateFieldType dateFieldType = new DateFieldType(DateFieldMapper.Resolution.MILLISECONDS, formatter); + final Query queryOnDateField = dateFieldType.rangeQuery( + "field", + from, + to, + true, + true, + relation, + null, + fieldType.dateMathParser(), + context + ); assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString()); } @@ -271,7 +281,7 @@ public void testDateVsDateRangeBounds() { long lower = randomLongBetween(formatter.parseMillis("2000-01-01T00:00"), formatter.parseMillis("2010-01-01T00:00")); long upper = randomLongBetween(formatter.parseMillis("2011-01-01T00:00"), formatter.parseMillis("2020-01-01T00:00")); - RangeFieldType fieldType = new RangeFieldType("field", true, false, false, formatter, false, Collections.emptyMap()); + RangeFieldType fieldType = new RangeFieldType(true, false, false, formatter, false, Collections.emptyMap()); String lowerAsString = formatter.formatMillis(lower); String upperAsString = formatter.formatMillis(upper); // also add date math rounding to days occasionally @@ -284,6 +294,7 @@ public void testDateVsDateRangeBounds() { boolean includeLower = randomBoolean(); boolean includeUpper = randomBoolean(); final Query query = fieldType.rangeQuery( + "field", lowerAsString, upperAsString, includeLower, @@ -295,7 +306,7 @@ public void testDateVsDateRangeBounds() { ); // get exact lower and upper bounds similar to what we would parse for `date` fields for same input strings - DateFieldType dateFieldType = new DateFieldType("field"); + DateFieldType dateFieldType = new DateFieldType(); long lowerBoundLong = dateFieldType.parseToLong(lowerAsString, includeLower == false, null, formatter.toDateMathParser(), () -> 0); if (includeLower == false) { ++lowerBoundLong; @@ -491,7 +502,7 @@ public void testTermQuery() throws Exception { ShapeRelation relation = ShapeRelation.INTERSECTS; boolean includeLower = true; boolean includeUpper = true; - assertEquals(getExpectedRangeQuery(relation, value, value, includeLower, includeUpper), ft.termQuery(value, context)); + assertEquals(getExpectedRangeQuery(relation, value, value, includeLower, includeUpper), ft.termQuery("field", value, context)); } public void testCaseInsensitiveQuery() throws Exception { @@ -499,32 +510,30 @@ public void testCaseInsensitiveQuery() throws Exception { RangeFieldType ft = createDefaultFieldType(); Object value = nextFrom(); - QueryShardException ex = expectThrows(QueryShardException.class, () -> ft.termQueryCaseInsensitive(value, context)); + QueryShardException ex = expectThrows(QueryShardException.class, () -> ft.termQueryCaseInsensitive("field", value, context)); assertTrue(ex.getMessage().contains("does not support case insensitive term queries")); } public void testFetchSourceValue() throws IOException { - MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(MapperBuilderContext.ROOT) - .fieldType(); + MappedField longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(MapperBuilderContext.ROOT).field(); Map longRange = Map.of("gte", 3.14, "lt", "42.9"); assertEquals(List.of(Map.of("gte", 3L, "lt", 42L)), fetchSourceValue(longMapper, longRange)); - MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("yyyy/MM/dd||epoch_millis") + MappedField dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("yyyy/MM/dd||epoch_millis") .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); Map dateRange = Map.of("lt", "1990/12/29", "gte", 597429487111L); assertEquals(List.of(Map.of("lt", "1990/12/29", "gte", "1988/12/06")), fetchSourceValue(dateMapper, dateRange)); } public void testParseSourceValueWithFormat() throws IOException { - MappedFieldType longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(MapperBuilderContext.ROOT) - .fieldType(); + MappedField longMapper = new RangeFieldMapper.Builder("field", RangeType.LONG, true).build(MapperBuilderContext.ROOT).field(); Map longRange = Map.of("gte", 3.14, "lt", "42.9"); assertEquals(List.of(Map.of("gte", 3L, "lt", 42L)), fetchSourceValue(longMapper, longRange)); - MappedFieldType dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("strict_date_time") + MappedField dateMapper = new RangeFieldMapper.Builder("field", RangeType.DATE, true).format("strict_date_time") .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); Map dateRange = Map.of("lt", "1990-12-29T00:00:00.000Z"); assertEquals(List.of(Map.of("lt", "1990/12/29")), fetchSourceValue(dateMapper, dateRange, "yyy/MM/dd")); assertEquals(List.of(Map.of("lt", "662428800000")), fetchSourceValue(dateMapper, dateRange, "epoch_millis")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java index 85ba074a7b96f..fef66f465c154 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java @@ -90,12 +90,12 @@ public void testReloadSearchAnalyzers() throws IOException { assertFalse(assertSameContainedFilters(originalTokenFilters, current.get("reloadableAnalyzer"))); assertFalse( - assertSameContainedFilters(originalTokenFilters, mapperService.fieldType("field").getTextSearchInfo().searchAnalyzer()) + assertSameContainedFilters(originalTokenFilters, mapperService.mappedField("field").getTextSearchInfo().searchAnalyzer()) ); assertFalse( assertSameContainedFilters( originalTokenFilters, - mapperService.fieldType("otherField").getTextSearchInfo().searchQuoteAnalyzer() + mapperService.mappedField("otherField").getTextSearchInfo().searchQuoteAnalyzer() ) ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index ffee28e772b0f..3ae6621bb94cc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -174,10 +174,10 @@ public void testRuntimeSectionRejectedUpdate() throws IOException { builder.endObject().endObject(); mapperService = createMapperService(builder); assertEquals(Strings.toString(builder), mapperService.documentMapper().mappingSource().toString()); - MappedFieldType concrete = mapperService.fieldType("concrete"); - assertThat(concrete, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - MappedFieldType field = mapperService.fieldType("field"); - assertThat(field, instanceOf(LongScriptFieldType.class)); + MappedField concrete = mapperService.mappedField("concrete"); + assertThat(concrete.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + MappedField field = mapperService.mappedField("field"); + assertThat(field.type(), instanceOf(LongScriptFieldType.class)); } { XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc"); @@ -193,11 +193,11 @@ public void testRuntimeSectionRejectedUpdate() throws IOException { expectThrows(IllegalArgumentException.class, () -> merge(mapperService, builder)); // make sure that the whole rejected update, including changes to runtime fields, has not been applied - MappedFieldType concrete = mapperService.fieldType("concrete"); - assertThat(concrete, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - MappedFieldType field = mapperService.fieldType("field"); - assertThat(field, instanceOf(LongScriptFieldType.class)); - assertNull(mapperService.fieldType("another_field")); + MappedField concrete = mapperService.mappedField("concrete"); + assertThat(concrete.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + MappedField field = mapperService.mappedField("field"); + assertThat(field.type(), instanceOf(LongScriptFieldType.class)); + assertNull(mapperService.mappedField("another_field")); assertEquals(""" {"_doc":{"runtime":{"field":{"type":"long"}},\ "properties":{"concrete":{"type":"keyword"}}}}""", Strings.toString(mapperService.documentMapper().mapping().getRoot())); @@ -210,10 +210,10 @@ public void testRuntimeSectionMerge() throws IOException { String mapping = Strings.toString(fieldMapping(b -> b.field("type", "keyword"))); mapperService = createMapperService(mapping); assertEquals(mapping, mapperService.documentMapper().mappingSource().toString()); - MappedFieldType field = mapperService.fieldType("field"); - assertThat(field, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + MappedField field = mapperService.mappedField("field"); + assertThat(field.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); } - LongScriptFieldType field2; + MappedField field2; { String mapping = Strings.toString(runtimeMapping(builder -> { builder.startObject("field").field("type", "keyword").endObject(); @@ -221,9 +221,9 @@ public void testRuntimeSectionMerge() throws IOException { })); merge(mapperService, mapping); // field overrides now the concrete field already defined - KeywordScriptFieldType field = (KeywordScriptFieldType) mapperService.fieldType("field"); + MappedField field = mapperService.mappedField("field"); assertEquals(KeywordFieldMapper.CONTENT_TYPE, field.typeName()); - field2 = (LongScriptFieldType) mapperService.fieldType("field2"); + field2 = mapperService.mappedField("field2"); assertEquals(NumberFieldMapper.NumberType.LONG.typeName(), field2.typeName()); } { @@ -234,20 +234,20 @@ public void testRuntimeSectionMerge() throws IOException { ) ); merge(mapperService, mapping); - DoubleScriptFieldType field = (DoubleScriptFieldType) mapperService.fieldType("field"); + MappedField field = mapperService.mappedField("field"); assertEquals(NumberFieldMapper.NumberType.DOUBLE.typeName(), field.typeName()); - LongScriptFieldType field2Updated = (LongScriptFieldType) mapperService.fieldType("field2"); + MappedField field2Updated = mapperService.mappedField("field2"); assertSame(field2, field2Updated); } { String mapping = Strings.toString(mapping(builder -> builder.startObject("concrete").field("type", "keyword").endObject())); merge(mapperService, mapping); - DoubleScriptFieldType field = (DoubleScriptFieldType) mapperService.fieldType("field"); + MappedField field = mapperService.mappedField("field"); assertEquals(NumberFieldMapper.NumberType.DOUBLE.typeName(), field.typeName()); - LongScriptFieldType field2Updated = (LongScriptFieldType) mapperService.fieldType("field2"); + MappedField field2Updated = mapperService.mappedField("field2"); assertSame(field2, field2Updated); - MappedFieldType concrete = mapperService.fieldType("concrete"); - assertThat(concrete, instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + MappedField concrete = mapperService.mappedField("concrete"); + assertThat(concrete.type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); } { String mapping = Strings.toString(runtimeMapping(builder -> builder.startObject("field3").field("type", "date").endObject())); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java index 3e6747102305a..d289d11e6cdc6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java @@ -68,11 +68,11 @@ public void testFetchRoutingFieldValue() throws IOException { mapperService, iw -> { iw.addDocument(mapperService.documentMapper().parse(source("1", b -> {}, "abcd")).rootDoc()); }, iw -> { - SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); + SearchLookup lookup = new SearchLookup(mapperService::mappedField, fieldDataLookup()); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.lookup()).thenReturn(lookup); - RoutingFieldMapper.RoutingFieldType ft = (RoutingFieldMapper.RoutingFieldType) mapperService.fieldType("_routing"); - ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null); + MappedField mappedField = mapperService.mappedField("_routing"); + ValueFetcher valueFetcher = mappedField.valueFetcher(searchExecutionContext, null); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); lookup.source().setSegmentAndDocument(context, 0); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java index 0d7659418d38e..0662ec0cf39a1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldTypeTests.java @@ -18,14 +18,14 @@ public class RoutingFieldTypeTests extends FieldTypeTestCase { public void testPrefixQuery() { - MappedFieldType ft = RoutingFieldMapper.RoutingFieldType.INSTANCE; + MappedField mappedField = new MappedField(RoutingFieldMapper.NAME, RoutingFieldMapper.RoutingFieldType.INSTANCE); Query expected = new PrefixQuery(new Term("_routing", new BytesRef("foo*"))); - assertEquals(expected, ft.prefixQuery("foo*", null, MOCK_CONTEXT)); + assertEquals(expected, mappedField.prefixQuery("foo*", null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.prefixQuery("foo*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> mappedField.prefixQuery("foo*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " @@ -35,27 +35,27 @@ public void testPrefixQuery() { } public void testRegexpQuery() { - MappedFieldType ft = RoutingFieldMapper.RoutingFieldType.INSTANCE; + MappedField mappedField = new MappedField(RoutingFieldMapper.NAME, RoutingFieldMapper.RoutingFieldType.INSTANCE); Query expected = new RegexpQuery(new Term("_routing", new BytesRef("foo?"))); - assertEquals(expected, ft.regexpQuery("foo?", 0, 0, 10, null, MOCK_CONTEXT)); + assertEquals(expected, mappedField.regexpQuery("foo?", 0, 0, 10, null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.regexpQuery("foo?", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> mappedField.regexpQuery("foo?", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testWildcardQuery() { - MappedFieldType ft = RoutingFieldMapper.RoutingFieldType.INSTANCE; + MappedField mappedField = new MappedField(RoutingFieldMapper.NAME, RoutingFieldMapper.RoutingFieldType.INSTANCE); Query expected = new WildcardQuery(new Term("_routing", new BytesRef("foo*"))); - assertEquals(expected, ft.wildcardQuery("foo*", null, MOCK_CONTEXT)); + assertEquals(expected, mappedField.wildcardQuery("foo*", null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> mappedField.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 4ef37ee623981..1fd3a3a4fa411 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -81,7 +81,7 @@ public void testBytesAndNumericRepresentation() throws Exception { CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); searcher.doc(0, fieldsVisitor); - fieldsVisitor.postProcess(mapperService::fieldType); + fieldsVisitor.postProcess(mapperService::mappedField); assertThat(fieldsVisitor.fields().size(), equalTo(10)); assertThat(fieldsVisitor.fields().get("field1").size(), equalTo(1)); assertThat(fieldsVisitor.fields().get("field1").get(0), equalTo((byte) 1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TestRuntimeField.java b/server/src/test/java/org/elasticsearch/index/mapper/TestRuntimeField.java index 40e9f4490903e..785bc780dacdc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TestRuntimeField.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TestRuntimeField.java @@ -22,13 +22,13 @@ public final class TestRuntimeField implements RuntimeField { public static final String CONTENT_TYPE = "test-composite"; private final String name; - private final Collection subfields; + private final Collection subfields; public TestRuntimeField(String name, String type) { - this(name, Collections.singleton(new TestRuntimeFieldType(name, type))); + this(name, Collections.singleton(new MappedField(name, new TestRuntimeFieldType(type)))); } - public TestRuntimeField(String name, Collection subfields) { + public TestRuntimeField(String name, Collection subfields) { this.name = name; this.subfields = subfields; } @@ -39,7 +39,7 @@ public String name() { } @Override - public Stream asMappedFieldTypes() { + public Stream asMappedFields() { return subfields.stream(); } @@ -54,13 +54,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static class TestRuntimeFieldType extends MappedFieldType { private final String type; - public TestRuntimeFieldType(String name, String type) { - super(name, false, false, false, TextSearchInfo.NONE, Collections.emptyMap()); + public TestRuntimeFieldType(String type) { + super(false, false, false, TextSearchInfo.NONE, Collections.emptyMap()); this.type = type; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } @@ -70,7 +70,7 @@ public String typeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { return null; } @@ -80,7 +80,7 @@ public boolean isSearchable() { } @Override - public boolean isAggregatable() { + public boolean isAggregatable(String name) { return true; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 96de3d4ce1a9c..c26d7e2a4d78b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -529,13 +529,13 @@ public void testFielddata() throws IOException { MapperService disabledMapper = createMapperService(fieldMapping(this::minimalMapping)); Exception e = expectThrows( IllegalArgumentException.class, - () -> disabledMapper.fieldType("field").fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }) + () -> disabledMapper.mappedField("field").fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }) ); assertThat(e.getMessage(), containsString("Text fields are not optimised for operations that require per-document field data")); MapperService enabledMapper = createMapperService(fieldMapping(b -> b.field("type", "text").field("fielddata", true))); - enabledMapper.fieldType("field").fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }); // no exception - // this time + enabledMapper.mappedField("field").fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }); // no exception + // this time e = expectThrows( MapperParsingException.class, @@ -555,7 +555,7 @@ public void testFrequencyFilter() throws IOException { .endObject() ) ); - TextFieldType fieldType = (TextFieldType) mapperService.fieldType("field"); + TextFieldType fieldType = (TextFieldType) mapperService.mappedField("field").type(); assertThat(fieldType.fielddataMinFrequency(), equalTo(2d)); assertThat(fieldType.fielddataMaxFrequency(), equalTo((double) Integer.MAX_VALUE)); @@ -687,11 +687,10 @@ public void testNestedIndexPrefixes() throws IOException { .endObject() ) ); - MappedFieldType textField = mapperService.fieldType("object.field"); + MappedField textField = mapperService.mappedField("object.field"); assertNotNull(textField); - assertThat(textField, instanceOf(TextFieldType.class)); - MappedFieldType prefix = ((TextFieldType) textField).getPrefixFieldType(); - assertEquals(prefix.name(), "object.field._index_prefix"); + assertThat(textField.type(), instanceOf(TextFieldType.class)); + assertEquals(((TextFieldType) textField.type()).prefixName("object.field"), "object.field._index_prefix"); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("object.field", "some text"))); IndexableField field = doc.rootDoc().getField("object.field._index_prefix"); assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, field.fieldType().indexOptions()); @@ -713,11 +712,10 @@ public void testNestedIndexPrefixes() throws IOException { .endObject() ) ); - MappedFieldType textField = mapperService.fieldType("body.with_prefix"); + MappedField textField = mapperService.mappedField("body.with_prefix"); assertNotNull(textField); - assertThat(textField, instanceOf(TextFieldType.class)); - MappedFieldType prefix = ((TextFieldType) textField).getPrefixFieldType(); - assertEquals(prefix.name(), "body.with_prefix._index_prefix"); + assertThat(textField.type(), instanceOf(TextFieldType.class)); + assertEquals(((TextFieldType) textField.type()).prefixName("body.with_prefix"), "body.with_prefix._index_prefix"); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("body", "some text"))); IndexableField field = doc.rootDoc().getField("body.with_prefix._index_prefix"); @@ -1198,7 +1196,7 @@ public void testIgnoreFieldDataOnLegacyIndex() throws IOException { expectThrows( IllegalArgumentException.class, () -> ((TextFieldMapper) finalMapperService.documentMapper().mappers().getMapper("field")).fieldType() - .fielddataBuilder("test", null) + .fielddataBuilder("field", "test", null) ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java index 755dd3f668bde..70269e255d923 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java @@ -45,23 +45,26 @@ public class TextFieldTypeTests extends FieldTypeTestCase { private static TextFieldType createFieldType() { - return new TextFieldType("field"); + return new TextFieldType(); } public void testIsAggregatableDependsOnFieldData() { TextFieldType ft = createFieldType(); - assertFalse(ft.isAggregatable()); + assertFalse(ft.isAggregatable("field")); ft.setFielddata(true); - assertTrue(ft.isAggregatable()); + assertTrue(ft.isAggregatable("field")); } public void testTermQuery() { MappedFieldType ft = createFieldType(); - assertEquals(new TermQuery(new Term("field", "foo")), ft.termQuery("foo", null)); - assertEquals(AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo")), ft.termQueryCaseInsensitive("fOo", null)); + assertEquals(new TermQuery(new Term("field", "foo")), ft.termQuery("field", "foo", null)); + assertEquals( + AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo")), + ft.termQueryCaseInsensitive("field", "fOo", null) + ); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("bar", null)); + MappedFieldType unsearchable = new TextFieldType(false, false, Collections.emptyMap()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", "bar", null)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } @@ -70,12 +73,12 @@ public void testTermsQuery() { List terms = new ArrayList<>(); terms.add(new BytesRef("foo")); terms.add(new BytesRef("bar")); - assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), null)); + assertEquals(new TermInSetQuery("field", terms), ft.termsQuery("field", Arrays.asList("foo", "bar"), null)); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new TextFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.termsQuery(Arrays.asList("foo", "bar"), null) + () -> unsearchable.termsQuery("field", Arrays.asList("foo", "bar"), null) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } @@ -84,12 +87,12 @@ public void testRangeQuery() { MappedFieldType ft = createFieldType(); assertEquals( new TermRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), - ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT) + ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT) ); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.rangeQuery("field", "foo", "bar", true, false, null, null, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[range] queries on [text] or [keyword] fields cannot be executed when " + "'search.allow_expensive_queries' is set to false.", @@ -99,18 +102,18 @@ public void testRangeQuery() { public void testRegexpQuery() { MappedFieldType ft = createFieldType(); - assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_CONTEXT)); + assertEquals(new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("field", "foo.*", 0, 0, 10, null, MOCK_CONTEXT)); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new TextFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.regexpQuery("foo.*", 0, 0, 10, null, MOCK_CONTEXT) + () -> unsearchable.regexpQuery("field", "foo.*", 0, 0, 10, null, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.regexpQuery("foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.regexpQuery("field", "foo.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } @@ -119,19 +122,20 @@ public void testFuzzyQuery() { MappedFieldType ft = createFieldType(); assertEquals( new FuzzyQuery(new Term("field", "foo"), 2, 1, 50, true), - ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) + ft.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) ); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = new TextFieldType(false, false, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unsearchable.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) + () -> unsearchable.fuzzyQuery("field", "foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT) ); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); ElasticsearchException ee = expectThrows( ElasticsearchException.class, () -> ft.fuzzyQuery( + "field", "foo", Fuzziness.AUTO, randomInt(10) + 1, @@ -147,18 +151,18 @@ public void testIndexPrefixes() { TextFieldType ft = createFieldType(); ft.setIndexPrefixes(2, 10); - Query q = ft.prefixQuery("goin", CONSTANT_SCORE_REWRITE, false, randomMockContext()); + Query q = ft.prefixQuery("field", "goin", CONSTANT_SCORE_REWRITE, false, randomMockContext()); assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q); - q = ft.prefixQuery("internationalisatio", CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT); + q = ft.prefixQuery("field", "internationalisatio", CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT); assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q); - q = ft.prefixQuery("Internationalisatio", CONSTANT_SCORE_REWRITE, true, MOCK_CONTEXT); + q = ft.prefixQuery("field", "Internationalisatio", CONSTANT_SCORE_REWRITE, true, MOCK_CONTEXT); assertEquals(AutomatonQueries.caseInsensitivePrefixQuery(new Term("field", "Internationalisatio")), q); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.prefixQuery("internationalisatio", null, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.prefixQuery("field", "internationalisatio", null, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " @@ -166,7 +170,7 @@ public void testIndexPrefixes() { ee.getMessage() ); - q = ft.prefixQuery("g", CONSTANT_SCORE_REWRITE, false, randomMockContext()); + q = ft.prefixQuery("field", "g", CONSTANT_SCORE_REWRITE, false, randomMockContext()); Automaton automaton = Operations.concatenate(Arrays.asList(Automata.makeChar('g'), Automata.makeAnyChar())); Query expected = new ConstantScoreQuery( @@ -179,24 +183,24 @@ public void testIndexPrefixes() { } public void testFetchSourceValue() throws IOException { - TextFieldType fieldType = createFieldType(); + MappedField mappedField = new MappedField("field", createFieldType()); - assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); - assertEquals(List.of("42"), fetchSourceValue(fieldType, 42L)); - assertEquals(List.of("true"), fetchSourceValue(fieldType, true)); + assertEquals(List.of("value"), fetchSourceValue(mappedField, "value")); + assertEquals(List.of("42"), fetchSourceValue(mappedField, 42L)); + assertEquals(List.of("true"), fetchSourceValue(mappedField, true)); } public void testWildcardQuery() { TextFieldType ft = createFieldType(); // case sensitive - AutomatonQuery actual = (AutomatonQuery) ft.wildcardQuery("*Butterflies*", null, false, MOCK_CONTEXT); + AutomatonQuery actual = (AutomatonQuery) ft.wildcardQuery("field", "*Butterflies*", null, false, MOCK_CONTEXT); AutomatonQuery expected = new WildcardQuery(new Term("field", new BytesRef("*Butterflies*"))); assertEquals(expected, actual); assertFalse(new CharacterRunAutomaton(actual.getAutomaton()).run("some butterflies somewhere")); // case insensitive - actual = (AutomatonQuery) ft.wildcardQuery("*Butterflies*", null, true, MOCK_CONTEXT); + actual = (AutomatonQuery) ft.wildcardQuery("field", "*Butterflies*", null, true, MOCK_CONTEXT); expected = AutomatonQueries.caseInsensitiveWildcardQuery(new Term("field", new BytesRef("*Butterflies*"))); assertEquals(expected, actual); assertTrue(new CharacterRunAutomaton(actual.getAutomaton()).run("some butterflies somewhere")); @@ -204,7 +208,7 @@ public void testWildcardQuery() { ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.wildcardQuery("field", "valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } @@ -215,7 +219,7 @@ public void testWildcardQuery() { public void testNormalizedWildcardQuery() { TextFieldType ft = createFieldType(); - AutomatonQuery actual = (AutomatonQuery) ft.normalizedWildcardQuery("*Butterflies*", null, MOCK_CONTEXT); + AutomatonQuery actual = (AutomatonQuery) ft.normalizedWildcardQuery("field", "*Butterflies*", null, MOCK_CONTEXT); AutomatonQuery expected = new WildcardQuery(new Term("field", new BytesRef("*butterflies*"))); assertEquals(expected, actual); assertTrue(new CharacterRunAutomaton(actual.getAutomaton()).run("some butterflies somewhere")); @@ -223,32 +227,32 @@ public void testNormalizedWildcardQuery() { ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.wildcardQuery("field", "valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testTermIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource termIntervals = ft.termIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource termIntervals = ft.termIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.term(new BytesRef("foo")), termIntervals); } public void testPrefixIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource prefixIntervals = ft.prefixIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.prefix(new BytesRef("foo")), prefixIntervals); } public void testWildcardIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource wildcardIntervals = ft.wildcardIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); } public void testFuzzyIntervals() throws IOException { MappedFieldType ft = createFieldType(); - IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("foo", 1, 2, true, MOCK_CONTEXT); + IntervalsSource fuzzyIntervals = ft.fuzzyIntervals("field", "foo", 1, 2, true, MOCK_CONTEXT); FuzzyQuery fq = new FuzzyQuery(new Term("field", "foo"), 1, 2, 128, true); IntervalsSource expectedIntervals = Intervals.multiterm(fq.getAutomata(), "foo"); assertEquals(expectedIntervals, fuzzyIntervals); @@ -257,14 +261,14 @@ public void testFuzzyIntervals() throws IOException { public void testPrefixIntervalsWithIndexedPrefixes() { TextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); - IntervalsSource prefixIntervals = ft.prefixIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource prefixIntervals = ft.prefixIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.fixField("field._index_prefix", Intervals.term(new BytesRef("foo"))), prefixIntervals); } public void testWildcardIntervalsWithIndexedPrefixes() { TextFieldType ft = createFieldType(); ft.setIndexPrefixes(1, 4); - IntervalsSource wildcardIntervals = ft.wildcardIntervals(new BytesRef("foo"), MOCK_CONTEXT); + IntervalsSource wildcardIntervals = ft.wildcardIntervals("field", new BytesRef("foo"), MOCK_CONTEXT); assertEquals(Intervals.wildcard(new BytesRef("foo")), wildcardIntervals); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/VersionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/VersionFieldMapperTests.java index b292a57375e59..d19479791818e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/VersionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/VersionFieldMapperTests.java @@ -49,10 +49,10 @@ public void testFetchFieldValue() throws IOException { parsedDoc.version().setLongValue(version); iw.addDocument(parsedDoc.rootDoc()); }, iw -> { - VersionFieldMapper.VersionFieldType ft = (VersionFieldMapper.VersionFieldType) mapperService.fieldType("_version"); - SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); + MappedField mappedField = mapperService.mappedField("_version"); + SearchLookup lookup = new SearchLookup(mapperService::mappedField, fieldDataLookup()); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); - ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null); + ValueFetcher valueFetcher = mappedField.valueFetcher(searchExecutionContext, null); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); lookup.source().setSegmentAndDocument(context, 0); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/WholeNumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/WholeNumberFieldMapperTests.java index 7b201f59001e5..2d80ec4c3d046 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/WholeNumberFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/WholeNumberFieldMapperTests.java @@ -28,8 +28,8 @@ protected void testDecimalCoerce() throws IOException { public void testDimension() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - NumberFieldMapper.NumberFieldType ft = (NumberFieldMapper.NumberFieldType) mapperService.fieldType("field"); - assertFalse(ft.isDimension()); + MappedField mappedField = mapperService.mappedField("field"); + assertFalse(mappedField.isDimension()); assertDimension(true, NumberFieldMapper.NumberFieldType::isDimension); assertDimension(false, NumberFieldMapper.NumberFieldType::isDimension); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java index d0b3617934b8e..8ca60a67b1d43 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper.KeyedFlattenedFieldType; -import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper.RootFlattenedFieldType; +import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper.RootFlattenedMappedFieldType; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.junit.AssumptionViolatedException; @@ -380,14 +380,14 @@ public void testSplitQueriesOnWhitespace() throws IOException { b.field("split_queries_on_whitespace", true); })); - RootFlattenedFieldType rootFieldType = (RootFlattenedFieldType) mapperService.fieldType("field"); + RootFlattenedMappedFieldType rootFieldType = (RootFlattenedMappedFieldType) mapperService.mappedField("field").type(); assertThat(rootFieldType.getTextSearchInfo().searchAnalyzer().name(), equalTo("_whitespace")); assertTokenStreamContents( rootFieldType.getTextSearchInfo().searchAnalyzer().analyzer().tokenStream("", "Hello World"), new String[] { "Hello", "World" } ); - KeyedFlattenedFieldType keyedFieldType = (KeyedFlattenedFieldType) mapperService.fieldType("field.key"); + KeyedFlattenedFieldType keyedFieldType = (KeyedFlattenedFieldType) mapperService.mappedField("field.key").type(); assertThat(keyedFieldType.getTextSearchInfo().searchAnalyzer().name(), equalTo("_whitespace")); assertTokenStreamContents( keyedFieldType.getTextSearchInfo().searchAnalyzer().analyzer().tokenStream("", "Hello World"), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParserTests.java index 82b89e0b68bc6..c2533fbb16759 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParserTests.java @@ -31,7 +31,7 @@ public class FlattenedFieldParserTests extends ESTestCase { @Before public void setUp() throws Exception { super.setUp(); - parser = new FlattenedFieldParser("field", "field._keyed", new FakeFieldType("field"), Integer.MAX_VALUE, Integer.MAX_VALUE, null); + parser = new FlattenedFieldParser("field", "field._keyed", new FakeFieldType(), Integer.MAX_VALUE, Integer.MAX_VALUE, null); } public void testTextValues() throws Exception { @@ -273,7 +273,7 @@ public void testDepthLimit() throws Exception { FlattenedFieldParser configuredParser = new FlattenedFieldParser( "field", "field._keyed", - new FakeFieldType("field"), + new FakeFieldType(), 2, Integer.MAX_VALUE, null @@ -295,7 +295,7 @@ public void testDepthLimitBoundary() throws Exception { FlattenedFieldParser configuredParser = new FlattenedFieldParser( "field", "field._keyed", - new FakeFieldType("field"), + new FakeFieldType(), 3, Integer.MAX_VALUE, null @@ -311,7 +311,7 @@ public void testIgnoreAbove() throws Exception { FlattenedFieldParser configuredParser = new FlattenedFieldParser( "field", "field._keyed", - new FakeFieldType("field"), + new FakeFieldType(), Integer.MAX_VALUE, 10, null @@ -330,7 +330,7 @@ public void testNullValues() throws Exception { xContentParser = createXContentParser(input); - MappedFieldType fieldType = new FakeFieldType("field"); + MappedFieldType fieldType = new FakeFieldType(); FlattenedFieldParser configuredParser = new FlattenedFieldParser( "field", "field._keyed", diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java index 209d9fab3eb37..6f6bd2c9766ed 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedIndexFieldDataTests.java @@ -23,7 +23,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.DynamicMappedField; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper.KeyedFlattenedFieldData; import org.elasticsearch.index.shard.ShardId; @@ -46,13 +47,13 @@ public void testGlobalFieldDataCaching() throws IOException { ); FlattenedFieldMapper fieldMapper = new FlattenedFieldMapper.Builder("flattened").build(MapperBuilderContext.ROOT); - MappedFieldType fieldType1 = fieldMapper.fieldType().getChildFieldType("key"); + MappedField childField = ((DynamicMappedField) fieldMapper.field()).getChildField("key"); AtomicInteger onCacheCalled = new AtomicInteger(); ifdService.setListener(new IndexFieldDataCache.Listener() { @Override public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) { - assertEquals(fieldType1.name(), fieldName); + assertEquals(childField.name(), fieldName); onCacheCalled.incrementAndGet(); } }); @@ -71,7 +72,7 @@ public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) { // Load global field data for subfield 'key'. IndexFieldData ifd1 = ifdService.getForField( - fieldType1, + childField, "test", () -> { throw new UnsupportedOperationException("search lookup not available"); } ); @@ -83,9 +84,9 @@ public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) { assertEquals(1, onCacheCalled.get()); // Load global field data for the subfield 'other_key'. - MappedFieldType fieldType2 = fieldMapper.fieldType().getChildFieldType("other_key"); + MappedField childField2 = ((DynamicMappedField) fieldMapper.field()).getChildField("other_key"); IndexFieldData ifd2 = ifdService.getForField( - fieldType2, + childField2, "test", () -> { throw new UnsupportedOperationException("search lookup not available"); } ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java index 3738152037b3c..b22dc3cae2ce5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper.KeyedFlattenedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; @@ -44,39 +45,39 @@ private static KeyedFlattenedFieldType createFieldType() { public void testIndexedValueForSearch() { KeyedFlattenedFieldType ft = createFieldType(); - BytesRef keywordValue = ft.indexedValueForSearch("value"); + BytesRef keywordValue = ft.indexedValueForSearch("field", "value"); assertEquals(new BytesRef("key\0value"), keywordValue); - BytesRef doubleValue = ft.indexedValueForSearch(2.718); + BytesRef doubleValue = ft.indexedValueForSearch("field", 2.718); assertEquals(new BytesRef("key\0" + "2.718"), doubleValue); - BytesRef booleanValue = ft.indexedValueForSearch(true); + BytesRef booleanValue = ft.indexedValueForSearch("field", true); assertEquals(new BytesRef("key\0true"), booleanValue); } public void testTermQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new TermQuery(new Term(ft.name(), "key\0value")); - assertEquals(expected, ft.termQuery("value", null)); + Query expected = new TermQuery(new Term("field", "key\0value")); + assertEquals(expected, ft.termQuery("field", "value", null)); - expected = AutomatonQueries.caseInsensitiveTermQuery(new Term(ft.name(), "key\0value")); - assertEquals(expected, ft.termQueryCaseInsensitive("value", null)); + expected = AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "key\0value")); + assertEquals(expected, ft.termQueryCaseInsensitive("field", "value", null)); KeyedFlattenedFieldType unsearchable = new KeyedFlattenedFieldType("field", false, true, "key", false, Collections.emptyMap()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", null)); - assertEquals("Cannot search on field [" + ft.name() + "] since it is not indexed.", e.getMessage()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", "field", null)); + assertEquals("Cannot search on field [" + "field" + "] since it is not indexed.", e.getMessage()); } public void testTermsQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new TermInSetQuery(ft.name(), new BytesRef("key\0value1"), new BytesRef("key\0value2")); + Query expected = new TermInSetQuery("field", new BytesRef("key\0value1"), new BytesRef("key\0value2")); List terms = new ArrayList<>(); terms.add("value1"); terms.add("value2"); - Query actual = ft.termsQuery(terms, null); + Query actual = ft.termsQuery("field", terms, null); assertEquals(expected, actual); } @@ -84,22 +85,22 @@ public void testTermsQuery() { public void testExistsQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new PrefixQuery(new Term(ft.name(), "key\0")); - assertEquals(expected, ft.existsQuery(null)); + Query expected = new PrefixQuery(new Term("field", "key\0")); + assertEquals(expected, ft.existsQuery("field", null)); } public void testPrefixQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new PrefixQuery(new Term(ft.name(), "key\0val")); - assertEquals(expected, ft.prefixQuery("val", MultiTermQuery.CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT)); + Query expected = new PrefixQuery(new Term("field", "key\0val")); + assertEquals(expected, ft.prefixQuery("field", "val", MultiTermQuery.CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT)); - expected = AutomatonQueries.caseInsensitivePrefixQuery(new Term(ft.name(), "key\0vAl")); - assertEquals(expected, ft.prefixQuery("vAl", MultiTermQuery.CONSTANT_SCORE_REWRITE, true, MOCK_CONTEXT)); + expected = AutomatonQueries.caseInsensitivePrefixQuery(new Term("field", "key\0vAl")); + assertEquals(expected, ft.prefixQuery("field", "vAl", MultiTermQuery.CONSTANT_SCORE_REWRITE, true, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.prefixQuery("val", MultiTermQuery.CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.prefixQuery("field", "val", MultiTermQuery.CONSTANT_SCORE_REWRITE, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " @@ -113,7 +114,7 @@ public void testFuzzyQuery() { UnsupportedOperationException e = expectThrows( UnsupportedOperationException.class, - () -> ft.fuzzyQuery("value", Fuzziness.fromEdits(2), 1, 50, true, randomMockContext()) + () -> ft.fuzzyQuery("field", "value", Fuzziness.fromEdits(2), 1, 50, true, randomMockContext()) ); assertEquals("[fuzzy] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); } @@ -121,21 +122,24 @@ public void testFuzzyQuery() { public void testRangeQuery() { KeyedFlattenedFieldType ft = createFieldType(); - TermRangeQuery expected = new TermRangeQuery(ft.name(), new BytesRef("key\0lower"), new BytesRef("key\0upper"), false, false); - assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, MOCK_CONTEXT)); + TermRangeQuery expected = new TermRangeQuery("field", new BytesRef("key\0lower"), new BytesRef("key\0upper"), false, false); + assertEquals(expected, ft.rangeQuery("field", "lower", "upper", false, false, MOCK_CONTEXT)); - expected = new TermRangeQuery(ft.name(), new BytesRef("key\0lower"), new BytesRef("key\0upper"), true, true); - assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, MOCK_CONTEXT)); + expected = new TermRangeQuery("field", new BytesRef("key\0lower"), new BytesRef("key\0upper"), true, true); + assertEquals(expected, ft.rangeQuery("field", "lower", "upper", true, true, MOCK_CONTEXT)); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.rangeQuery("lower", null, false, false, null)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> ft.rangeQuery("field", "lower", null, false, false, null) + ); assertEquals("[range] queries on keyed [flattened] fields must include both an upper and a lower bound.", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> ft.rangeQuery(null, "upper", false, false, MOCK_CONTEXT)); + e = expectThrows(IllegalArgumentException.class, () -> ft.rangeQuery("field", null, "upper", false, false, MOCK_CONTEXT)); assertEquals("[range] queries on keyed [flattened] fields must include both an upper and a lower bound.", e.getMessage()); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.rangeQuery("lower", "upper", false, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.rangeQuery("field", "lower", "upper", false, false, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[range] queries on [text] or [keyword] fields cannot be executed when " + "'search.allow_expensive_queries' is set to false.", @@ -148,7 +152,7 @@ public void testRegexpQuery() { UnsupportedOperationException e = expectThrows( UnsupportedOperationException.class, - () -> ft.regexpQuery("valu*", 0, 0, 10, null, randomMockContext()) + () -> ft.regexpQuery("field", "valu*", 0, 0, 10, null, randomMockContext()) ); assertEquals("[regexp] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); } @@ -158,28 +162,28 @@ public void testWildcardQuery() { UnsupportedOperationException e = expectThrows( UnsupportedOperationException.class, - () -> ft.wildcardQuery("valu*", null, false, randomMockContext()) + () -> ft.wildcardQuery("field", "valu*", null, false, randomMockContext()) ); assertEquals("[wildcard] queries are not currently supported on keyed [flattened] fields.", e.getMessage()); } public void testFetchIsEmpty() throws IOException { Map sourceValue = Map.of("key", "value"); - KeyedFlattenedFieldType ft = createFieldType(); + MappedField mappedField = new MappedField("field", createFieldType()); - assertEquals(List.of(), fetchSourceValue(ft, sourceValue)); - assertEquals(List.of(), fetchSourceValue(ft, null)); + assertEquals(List.of(), fetchSourceValue(mappedField, sourceValue)); + assertEquals(List.of(), fetchSourceValue(mappedField, null)); } public void testFetchSourceValue() throws IOException { - KeyedFlattenedFieldType ft = createFieldType(); + MappedField mappedField = new MappedField("field", createFieldType()); Map sourceValue = Map.of("key", "value"); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.isSourceEnabled()).thenReturn(true); when(searchExecutionContext.sourcePath("field.key")).thenReturn(Set.of("field.key")); - ValueFetcher fetcher = ft.valueFetcher(searchExecutionContext, null); + ValueFetcher fetcher = mappedField.valueFetcher(searchExecutionContext, null); SourceLookup lookup = new SourceLookup(); lookup.setSource(Collections.singletonMap("field", sourceValue)); @@ -187,7 +191,10 @@ public void testFetchSourceValue() throws IOException { lookup.setSource(Collections.singletonMap("field", null)); assertEquals(List.of(), fetcher.fetchValues(lookup, new ArrayList())); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.valueFetcher(searchExecutionContext, "format")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> mappedField.valueFetcher(searchExecutionContext, "format") + ); assertEquals("Field [field.key] of type [flattened] doesn't support formats.", e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/RootFlattenedFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/RootFlattenedFieldTypeTests.java index 7bcfdb30e8d09..da1af889c98e2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/RootFlattenedFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/RootFlattenedFieldTypeTests.java @@ -22,7 +22,8 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper.RootFlattenedFieldType; +import org.elasticsearch.index.mapper.MappedField; +import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper.RootFlattenedMappedFieldType; import java.io.IOException; import java.util.Collections; @@ -31,12 +32,12 @@ public class RootFlattenedFieldTypeTests extends FieldTypeTestCase { - private static RootFlattenedFieldType createDefaultFieldType() { - return new RootFlattenedFieldType("field", true, true, Collections.emptyMap(), false, false); + private static RootFlattenedMappedFieldType createDefaultFieldType() { + return new RootFlattenedMappedFieldType(true, true, Collections.emptyMap(), false, false); } public void testValueForDisplay() { - RootFlattenedFieldType ft = createDefaultFieldType(); + RootFlattenedMappedFieldType ft = createDefaultFieldType(); String fieldValue = "{ \"key\": \"value\" }"; BytesRef storedValue = new BytesRef(fieldValue); @@ -44,37 +45,38 @@ public void testValueForDisplay() { } public void testTermQuery() { - RootFlattenedFieldType ft = createDefaultFieldType(); + RootFlattenedMappedFieldType ft = createDefaultFieldType(); Query expected = new TermQuery(new Term("field", "value")); - assertEquals(expected, ft.termQuery("value", null)); + assertEquals(expected, ft.termQuery("field", "value", null)); expected = AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "Value")); - assertEquals(expected, ft.termQueryCaseInsensitive("Value", null)); + assertEquals(expected, ft.termQueryCaseInsensitive("field", "Value", null)); - RootFlattenedFieldType unsearchable = new RootFlattenedFieldType("field", false, true, Collections.emptyMap(), false, false); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", null)); + RootFlattenedMappedFieldType unsearchable = new RootFlattenedMappedFieldType(false, true, Collections.emptyMap(), false, false); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("field", "field", null)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } public void testExistsQuery() { - RootFlattenedFieldType ft = new RootFlattenedFieldType("field", true, false, Collections.emptyMap(), false, false); - assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.NAME, new BytesRef("field"))), ft.existsQuery(null)); + RootFlattenedMappedFieldType ft = new RootFlattenedMappedFieldType(true, false, Collections.emptyMap(), false, false); + assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.NAME, new BytesRef("field"))), ft.existsQuery("field", null)); - RootFlattenedFieldType withDv = new RootFlattenedFieldType("field", true, true, Collections.emptyMap(), false, false); - assertEquals(new FieldExistsQuery("field"), withDv.existsQuery(null)); + RootFlattenedMappedFieldType withDv = new RootFlattenedMappedFieldType(true, true, Collections.emptyMap(), false, false); + assertEquals(new FieldExistsQuery("field"), withDv.existsQuery("field", null)); } public void testFuzzyQuery() { - RootFlattenedFieldType ft = createDefaultFieldType(); + RootFlattenedMappedFieldType ft = createDefaultFieldType(); Query expected = new FuzzyQuery(new Term("field", "value"), 2, 1, 50, true); - Query actual = ft.fuzzyQuery("value", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT); + Query actual = ft.fuzzyQuery("field", "value", Fuzziness.fromEdits(2), 1, 50, true, MOCK_CONTEXT); assertEquals(expected, actual); ElasticsearchException ee = expectThrows( ElasticsearchException.class, () -> ft.fuzzyQuery( + "field", "value", Fuzziness.AUTO, randomInt(10) + 1, @@ -87,17 +89,17 @@ public void testFuzzyQuery() { } public void testRangeQuery() { - RootFlattenedFieldType ft = createDefaultFieldType(); + RootFlattenedMappedFieldType ft = createDefaultFieldType(); TermRangeQuery expected = new TermRangeQuery("field", new BytesRef("lower"), new BytesRef("upper"), false, false); - assertEquals(expected, ft.rangeQuery("lower", "upper", false, false, MOCK_CONTEXT)); + assertEquals(expected, ft.rangeQuery("field", "lower", "upper", false, false, MOCK_CONTEXT)); expected = new TermRangeQuery("field", new BytesRef("lower"), new BytesRef("upper"), true, true); - assertEquals(expected, ft.rangeQuery("lower", "upper", true, true, MOCK_CONTEXT)); + assertEquals(expected, ft.rangeQuery("field", "lower", "upper", true, true, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.rangeQuery("lower", "upper", true, true, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.rangeQuery("field", "lower", "upper", true, true, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals( "[range] queries on [text] or [keyword] fields cannot be executed when " + "'search.allow_expensive_queries' is set to false.", @@ -106,37 +108,37 @@ public void testRangeQuery() { } public void testRegexpQuery() { - RootFlattenedFieldType ft = createDefaultFieldType(); + RootFlattenedMappedFieldType ft = createDefaultFieldType(); Query expected = new RegexpQuery(new Term("field", "val.*")); - Query actual = ft.regexpQuery("val.*", 0, 0, 10, null, MOCK_CONTEXT); + Query actual = ft.regexpQuery("field", "val.*", 0, 0, 10, null, MOCK_CONTEXT); assertEquals(expected, actual); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.regexpQuery("val.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.regexpQuery("field", "val.*", randomInt(10), 0, randomInt(10) + 1, null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[regexp] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testWildcardQuery() { - RootFlattenedFieldType ft = createDefaultFieldType(); + RootFlattenedMappedFieldType ft = createDefaultFieldType(); Query expected = new WildcardQuery(new Term("field", new BytesRef("valu*"))); - assertEquals(expected, ft.wildcardQuery("valu*", null, MOCK_CONTEXT)); + assertEquals(expected, ft.wildcardQuery("field", "valu*", null, MOCK_CONTEXT)); ElasticsearchException ee = expectThrows( ElasticsearchException.class, - () -> ft.wildcardQuery("valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) + () -> ft.wildcardQuery("field", "valu*", null, MOCK_CONTEXT_DISALLOW_EXPENSIVE) ); assertEquals("[wildcard] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", ee.getMessage()); } public void testFetchSourceValue() throws IOException { Map sourceValue = Map.of("key", "value"); - RootFlattenedFieldType ft = createDefaultFieldType(); + MappedField mappedField = new MappedField("field", createDefaultFieldType()); - assertEquals(List.of(sourceValue), fetchSourceValue(ft, sourceValue)); - assertEquals(List.of(), fetchSourceValue(ft, null)); + assertEquals(List.of(sourceValue), fetchSourceValue(mappedField, sourceValue)); + assertEquals(List.of(), fetchSourceValue(mappedField, null)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 069474cca314c..26694cc0d8c98 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.codec.PerFieldMapperCodec; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; @@ -114,13 +115,14 @@ protected boolean supportsStoredFields() { } @Override - protected void assertSearchable(MappedFieldType fieldType) { - assertThat(fieldType, instanceOf(DenseVectorFieldType.class)); - assertEquals(fieldType.isIndexed(), indexed); - assertEquals(fieldType.isSearchable(), indexed); + protected void assertSearchable(MappedField mappedField) { + assertThat(mappedField.type(), instanceOf(DenseVectorFieldType.class)); + assertEquals(mappedField.isIndexed(), indexed); + assertEquals(mappedField.isSearchable(), indexed); } - protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { + @Override + protected void assertExistsQuery(MappedField field, Query query, LuceneDocument fields) { assertThat(query, instanceOf(FieldExistsQuery.class)); FieldExistsQuery existsQuery = (FieldExistsQuery) query; assertEquals("field", existsQuery.getField()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java index b155f0b51bfc7..f6cafc698e9e1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.DenseVectorFieldType; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.VectorSimilarity; @@ -27,7 +28,7 @@ public DenseVectorFieldTypeTests() { } private DenseVectorFieldType createFieldType() { - return new DenseVectorFieldType("f", Version.CURRENT, 5, indexed, VectorSimilarity.cosine, Collections.emptyMap()); + return new DenseVectorFieldType(Version.CURRENT, 5, indexed, VectorSimilarity.cosine, Collections.emptyMap()); } public void testHasDocValues() { @@ -47,28 +48,27 @@ public void testIsSearchable() { public void testIsAggregatable() { DenseVectorFieldType ft = createFieldType(); - assertFalse(ft.isAggregatable()); + assertFalse(ft.isAggregatable("field")); } public void testFielddataBuilder() { DenseVectorFieldType ft = createFieldType(); - assertNotNull(ft.fielddataBuilder("index", () -> { throw new UnsupportedOperationException(); })); + assertNotNull(ft.fielddataBuilder("field", "index", () -> { throw new UnsupportedOperationException(); })); } public void testDocValueFormat() { DenseVectorFieldType ft = createFieldType(); - expectThrows(IllegalArgumentException.class, () -> ft.docValueFormat(null, null)); + expectThrows(IllegalArgumentException.class, () -> ft.docValueFormat("field", null, null)); } public void testFetchSourceValue() throws IOException { DenseVectorFieldType ft = createFieldType(); List vector = List.of(0.0, 1.0, 2.0, 3.0, 4.0); - assertEquals(vector, fetchSourceValue(ft, vector)); + assertEquals(vector, fetchSourceValue(new MappedField("field", ft), vector)); } public void testCreateKnnQuery() { DenseVectorFieldType unindexedField = new DenseVectorFieldType( - "f", Version.CURRENT, 3, false, @@ -77,30 +77,31 @@ public void testCreateKnnQuery() { ); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unindexedField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f }, 10, null) + () -> unindexedField.createKnnQuery("f", new float[] { 0.3f, 0.1f, 1.0f }, 10, null) ); assertThat(e.getMessage(), containsString("to perform knn search on field [f], its mapping must have [index] set to [true]")); DenseVectorFieldType dotProductField = new DenseVectorFieldType( - "f", Version.CURRENT, 3, true, VectorSimilarity.dot_product, Collections.emptyMap() ); - e = expectThrows(IllegalArgumentException.class, () -> dotProductField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f }, 10, null)); + e = expectThrows( + IllegalArgumentException.class, + () -> dotProductField.createKnnQuery("f", new float[] { 0.3f, 0.1f, 1.0f }, 10, null) + ); assertThat(e.getMessage(), containsString("The [dot_product] similarity can only be used with unit-length vectors.")); DenseVectorFieldType cosineField = new DenseVectorFieldType( - "f", Version.CURRENT, 3, true, VectorSimilarity.cosine, Collections.emptyMap() ); - e = expectThrows(IllegalArgumentException.class, () -> cosineField.createKnnQuery(new float[] { 0.0f, 0.0f, 0.0f }, 10, null)); + e = expectThrows(IllegalArgumentException.class, () -> cosineField.createKnnQuery("f", new float[] { 0.0f, 0.0f, 0.0f }, 10, null)); assertThat(e.getMessage(), containsString("The [cosine] similarity does not support vectors with zero magnitude.")); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index f3769216eb03b..1b9fd68b88d36 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -38,7 +38,10 @@ protected boolean forbidPrivateIndexSettings() { public void testValueFetcherIsNotSupported() { SparseVectorFieldMapper.Builder builder = new SparseVectorFieldMapper.Builder("field"); MappedFieldType fieldMapper = builder.build(MapperBuilderContext.ROOT).fieldType(); - UnsupportedOperationException exc = expectThrows(UnsupportedOperationException.class, () -> fieldMapper.valueFetcher(null, null)); + UnsupportedOperationException exc = expectThrows( + UnsupportedOperationException.class, + () -> fieldMapper.valueFetcher("field", null, null) + ); assertEquals(SparseVectorFieldMapper.ERROR_MESSAGE_7X, exc.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldTypeTests.java index 05187bc3d3b0d..7a54f026047bd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldTypeTests.java @@ -16,31 +16,37 @@ public class SparseVectorFieldTypeTests extends FieldTypeTestCase { public void testDocValuesDisabled() { - MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType("field", Collections.emptyMap()); + MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType(Collections.emptyMap()); assertFalse(fieldType.hasDocValues()); - expectThrows(IllegalArgumentException.class, () -> fieldType.fielddataBuilder("index", null)); + expectThrows(IllegalArgumentException.class, () -> fieldType.fielddataBuilder("field", "index", null)); } public void testIsNotAggregatable() { - MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType("field", Collections.emptyMap()); - assertFalse(fieldType.isAggregatable()); + MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType(Collections.emptyMap()); + assertFalse(fieldType.isAggregatable("field")); } public void testDocValueFormatIsNotSupported() { - MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType("field", Collections.emptyMap()); - UnsupportedOperationException exc = expectThrows(UnsupportedOperationException.class, () -> fieldType.docValueFormat(null, null)); + MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType(Collections.emptyMap()); + UnsupportedOperationException exc = expectThrows( + UnsupportedOperationException.class, + () -> fieldType.docValueFormat("field", null, null) + ); assertEquals(SparseVectorFieldMapper.ERROR_MESSAGE_7X, exc.getMessage()); } public void testExistsQueryIsNotSupported() { - MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType("field", Collections.emptyMap()); - UnsupportedOperationException exc = expectThrows(UnsupportedOperationException.class, () -> fieldType.existsQuery(null)); + MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType(Collections.emptyMap()); + UnsupportedOperationException exc = expectThrows(UnsupportedOperationException.class, () -> fieldType.existsQuery("field", null)); assertEquals(SparseVectorFieldMapper.ERROR_MESSAGE_7X, exc.getMessage()); } public void testTermQueryIsNotSupported() { - MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType("field", Collections.emptyMap()); - UnsupportedOperationException exc = expectThrows(UnsupportedOperationException.class, () -> fieldType.termQuery(null, null)); + MappedFieldType fieldType = new SparseVectorFieldMapper.SparseVectorFieldType(Collections.emptyMap()); + UnsupportedOperationException exc = expectThrows( + UnsupportedOperationException.class, + () -> fieldType.termQuery("field", null, null) + ); assertEquals(SparseVectorFieldMapper.ERROR_MESSAGE_7X, exc.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index dc3b4dabe971d..af82cbc2f51d9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -70,7 +70,7 @@ protected void doAssertLuceneQuery(DistanceFeatureQueryBuilder queryBuilder, Que double pivotDouble = DistanceUnit.DEFAULT.parse(pivot, DistanceUnit.DEFAULT); expectedQuery = LatLonPoint.newDistanceFeatureQuery(fieldName, 1.0f, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble); } else { // if (fieldName.equals(DATE_FIELD_NAME)) - DateFieldType fieldType = (DateFieldType) context.getFieldType(fieldName); + DateFieldType fieldType = (DateFieldType) context.getMappedField(fieldName).type(); long originLong = fieldType.parseToLong(origin, true, null, null, context::nowInMillis); TimeValue pivotVal = TimeValue.parseTimeValue(pivot, DistanceFeatureQueryBuilder.class.getSimpleName() + ".pivot"); long pivotLong; diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index efc37610de8cb..5a4b489f42a9a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -59,7 +59,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, assertThat(query, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; String field = expectedFieldName(fields.iterator().next()); - if (context.getFieldType(field) == null) { + if (context.getMappedField(field) == null) { // not a leaf field, so we're doing an object exists query assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) constantScoreQuery.getQuery(); @@ -68,7 +68,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, for (BooleanClause booleanClause : booleanQuery) { assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } - } else if (context.getFieldType(field).hasDocValues() || context.getFieldType(field).getTextSearchInfo().hasNorms()) { + } else if (context.getMappedField(field).hasDocValues() || context.getMappedField(field).getTextSearchInfo().hasNorms()) { assertThat(constantScoreQuery.getQuery(), instanceOf(FieldExistsQuery.class)); FieldExistsQuery existsQuery = (FieldExistsQuery) constantScoreQuery.getQuery(); assertEquals(field, existsQuery.getField()); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index c0875b88207da..2f986b91bafc7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; @@ -244,10 +244,10 @@ public void testStrictnessDefault() { @Override protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { - final MappedFieldType fieldType = context.getFieldType(queryBuilder.fieldName()); - if (fieldType == null) { + final MappedField mappedField = context.getMappedField(queryBuilder.fieldName()); + if (mappedField == null) { assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery); - } else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) { + } else if (mappedField.type() instanceof GeoPointFieldMapper.GeoPointFieldType) { assertEquals(IndexOrDocValuesQuery.class, query.getClass()); Query indexQuery = ((IndexOrDocValuesQuery) query).getIndexQuery(); String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); @@ -259,7 +259,7 @@ protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Quer Query dvQuery = ((IndexOrDocValuesQuery) query).getRandomAccessQuery(); assertEquals(LatLonDocValuesField.newSlowBoxQuery(expectedFieldName, qMinLat, qMaxLat, qMinLon, qMaxLon), dvQuery); } else { - assertEquals(GeoShapeFieldMapper.GeoShapeFieldType.class, fieldType.getClass()); + assertEquals(GeoShapeFieldMapper.GeoShapeFieldType.class, mappedField.type().getClass()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index a564b04122537..41da2ca4cbf21 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; @@ -143,10 +143,10 @@ public void testToQuery() throws IOException { @Override protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { - final MappedFieldType fieldType = context.getFieldType(queryBuilder.fieldName()); - if (fieldType == null) { + final MappedField mappedField = context.getMappedField(queryBuilder.fieldName()); + if (mappedField == null) { assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery); - } else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) { + } else if (mappedField.type() instanceof GeoPointFieldMapper.GeoPointFieldType) { Query indexQuery = ((IndexOrDocValuesQuery) query).getIndexQuery(); String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); @@ -156,7 +156,7 @@ protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query q Query dvQuery = ((IndexOrDocValuesQuery) query).getRandomAccessQuery(); assertEquals(LatLonDocValuesField.newSlowDistanceQuery(expectedFieldName, qLat, qLon, queryBuilder.distance()), dvQuery); } else { - assertEquals(GeoShapeFieldMapper.GeoShapeFieldType.class, fieldType.getClass()); + assertEquals(GeoShapeFieldMapper.GeoShapeFieldType.class, mappedField.type().getClass()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index a8108175da68b..951ef72238db7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.LinearRing; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -50,8 +50,8 @@ protected GeoPolygonQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { - MappedFieldType fieldType = context.getFieldType(queryBuilder.fieldName()); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(queryBuilder.fieldName()); + if (mappedField == null) { assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery); } else { // TODO: Test case when there are no docValues Query indexQuery = ((IndexOrDocValuesQuery) query).getIndexQuery(); diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index e50e0f82b400f..57d9d2fb71ab1 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.search.MatchQueryParser; import org.elasticsearch.index.search.MatchQueryParser.Type; @@ -147,13 +147,13 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, return; } - MappedFieldType fieldType = context.getFieldType(queryBuilder.fieldName()); - if (query instanceof TermQuery && fieldType != null) { + MappedField mappedField = context.getMappedField(queryBuilder.fieldName()); + if (query instanceof TermQuery && mappedField != null) { String queryValue = queryBuilder.value().toString(); if (isTextField(queryBuilder.fieldName()) && (queryBuilder.analyzer() == null || queryBuilder.analyzer().equals("simple"))) { queryValue = queryValue.toLowerCase(Locale.ROOT); } - Query expectedTermQuery = fieldType.termQuery(queryValue, context); + Query expectedTermQuery = mappedField.termQuery(queryValue, context); assertEquals(expectedTermQuery, query); } diff --git a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java index b38a0b59a0c61..06314e2cc880b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -63,8 +63,9 @@ protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, query, Matchers.anyOf(instanceOf(PrefixQuery.class), instanceOf(MatchNoDocsQuery.class), instanceOf(AutomatonQuery.class)) ); - if (context.getFieldType(queryBuilder.fieldName()) != null && queryBuilder.caseInsensitive() == false) { // The field is mapped and - // case sensitive + if (context.getMappedField(queryBuilder.fieldName()) != null && queryBuilder.caseInsensitive() == false) { // The field is mapped + // and + // case sensitive PrefixQuery prefixQuery = (PrefixQuery) query; String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 860cb00675c2c..8c3cc8b770eb4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -1040,7 +1040,7 @@ public void testExistsFieldQuery() throws Exception { SearchExecutionContext context = createSearchExecutionContext(); QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder(TEXT_FIELD_NAME + ":*"); Query query = queryBuilder.toQuery(context); - if (context.getFieldType(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms()) { + if (context.getMappedField(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms()) { assertThat(query, equalTo(new ConstantScoreQuery(new FieldExistsQuery(TEXT_FIELD_NAME)))); } else { assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", TEXT_FIELD_NAME))))); @@ -1050,7 +1050,7 @@ public void testExistsFieldQuery() throws Exception { String value = (quoted ? "\"" : "") + TEXT_FIELD_NAME + (quoted ? "\"" : ""); queryBuilder = new QueryStringQueryBuilder("_exists_:" + value); query = queryBuilder.toQuery(context); - if (context.getFieldType(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms()) { + if (context.getMappedField(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms()) { assertThat(query, equalTo(new ConstantScoreQuery(new FieldExistsQuery(TEXT_FIELD_NAME)))); } else { assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", TEXT_FIELD_NAME))))); diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 67a640c6e694b..fd6ccb6362c9e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.test.AbstractQueryTestCase; @@ -64,7 +65,7 @@ protected RangeQueryBuilder doCreateTestQueryBuilder() { upper = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(end); // Create timestamp option only then we have a date mapper, // otherwise we could trigger exception. - if (createSearchExecutionContext().getFieldType(DATE_FIELD_NAME) != null) { + if (createSearchExecutionContext().getMappedField(DATE_FIELD_NAME) != null) { if (randomBoolean()) { query.timeZone(randomZone().getId()); } @@ -149,8 +150,8 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); if (queryBuilder.from() == null && queryBuilder.to() == null) { final Query expectedQuery; - final MappedFieldType resolvedFieldType = context.getFieldType(queryBuilder.fieldName()); - if (resolvedFieldType.hasDocValues() || context.getFieldType(resolvedFieldType.name()).getTextSearchInfo().hasNorms()) { + final MappedField mappedField = context.getMappedField(queryBuilder.fieldName()); + if (mappedField.hasDocValues() || context.getMappedField(mappedField.name()).getTextSearchInfo().hasNorms()) { expectedQuery = new ConstantScoreQuery(new FieldExistsQuery(expectedFieldName)); } else { expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName))); @@ -171,14 +172,14 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, assertThat(query, instanceOf(IndexOrDocValuesQuery.class)); query = ((IndexOrDocValuesQuery) query).getIndexQuery(); assertThat(query, instanceOf(PointRangeQuery.class)); - MappedFieldType mappedFieldType = context.getFieldType(expectedFieldName); + MappedField mappedField = context.getMappedField(expectedFieldName); final Long fromInMillis; final Long toInMillis; // we have to normalize the incoming value into milliseconds since it could be literally anything - if (mappedFieldType instanceof DateFieldMapper.DateFieldType) { + if (mappedField.type() instanceof DateFieldMapper.DateFieldType) { fromInMillis = queryBuilder.from() == null ? null - : ((DateFieldMapper.DateFieldType) mappedFieldType).parseToLong( + : ((DateFieldMapper.DateFieldType) mappedField.type()).parseToLong( queryBuilder.from(), queryBuilder.includeLower(), queryBuilder.getDateTimeZone(), @@ -187,7 +188,7 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, ); toInMillis = queryBuilder.to() == null ? null - : ((DateFieldMapper.DateFieldType) mappedFieldType).parseToLong( + : ((DateFieldMapper.DateFieldType) mappedField.type()).parseToLong( queryBuilder.to(), queryBuilder.includeUpper(), queryBuilder.getDateTimeZone(), @@ -196,7 +197,7 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, ); } else { fromInMillis = toInMillis = null; - fail("unexpected mapped field type: [" + mappedFieldType.getClass() + "] " + mappedFieldType.toString()); + fail("unexpected mapped field type: [" + mappedField.getClass() + "] " + mappedField.toString()); } Long min = fromInMillis; @@ -448,7 +449,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC // Range query with open bounds rewrite to an exists query Query luceneQuery = rewrittenRange.toQuery(searchExecutionContext); final Query expectedQuery; - if (searchExecutionContext.getFieldType(query.fieldName()).hasDocValues()) { + if (searchExecutionContext.getMappedField(query.fieldName()).hasDocValues()) { expectedQuery = new ConstantScoreQuery(new FieldExistsQuery(query.fieldName())); } else { expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, query.fieldName()))); diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 4f87376311452..99e39b06c585e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -42,7 +42,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.KeywordScriptFieldType; import org.elasticsearch.index.mapper.LongScriptFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperRegistry; @@ -104,32 +104,32 @@ public class SearchExecutionContextTests extends ESTestCase { public void testFailIfFieldMappingNotFound() { SearchExecutionContext context = createSearchExecutionContext(IndexMetadata.INDEX_UUID_NA_VALUE, null); context.setAllowUnmappedFields(false); - MappedFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType); - assertThat(result, sameInstance(fieldType)); + MappedField mappedField = new MappedField("name", new TextFieldMapper.TextFieldType()); + MappedField result = context.failIfFieldMappingNotFound("name", mappedField); + assertThat(result, sameInstance(mappedField)); QueryShardException e = expectThrows(QueryShardException.class, () -> context.failIfFieldMappingNotFound("name", null)); assertEquals("No field mapping can be found for the field with name [name]", e.getMessage()); context.setAllowUnmappedFields(true); - result = context.failIfFieldMappingNotFound("name", fieldType); - assertThat(result, sameInstance(fieldType)); + result = context.failIfFieldMappingNotFound("name", mappedField); + assertThat(result, sameInstance(mappedField)); result = context.failIfFieldMappingNotFound("name", null); assertThat(result, nullValue()); context.setAllowUnmappedFields(false); context.setMapUnmappedFieldAsString(true); - result = context.failIfFieldMappingNotFound("name", fieldType); - assertThat(result, sameInstance(fieldType)); + result = context.failIfFieldMappingNotFound("name", mappedField); + assertThat(result, sameInstance(mappedField)); result = context.failIfFieldMappingNotFound("name", null); assertThat(result, notNullValue()); - assertThat(result, instanceOf(TextFieldMapper.TextFieldType.class)); + assertThat(result.type(), instanceOf(TextFieldMapper.TextFieldType.class)); assertThat(result.name(), equalTo("name")); } public void testBuildAnonymousFieldType() { SearchExecutionContext context = createSearchExecutionContext("uuid", null); - assertThat(context.buildAnonymousFieldType("keyword"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); - assertThat(context.buildAnonymousFieldType("long"), instanceOf(NumberFieldMapper.NumberFieldType.class)); + assertThat(context.buildAnonymousField("keyword").type(), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertThat(context.buildAnonymousField("long").type(), instanceOf(NumberFieldMapper.NumberFieldType.class)); } public void testToQueryFails() { @@ -149,7 +149,7 @@ public void testClusterAlias() throws IOException { IndexFieldMapper mapper = new IndexFieldMapper(); - IndexFieldData forField = context.getForField(mapper.fieldType()); + IndexFieldData forField = context.getForField(mapper.field()); String expected = clusterAlias == null ? context.getIndexSettings().getIndexMetadata().getIndex().getName() : clusterAlias + ":" + context.getIndexSettings().getIndex().getName(); @@ -299,7 +299,7 @@ public void testFielddataLookupOneFieldManyReferences() throws IOException { ); } - private static MappingLookup createMappingLookup(List concreteFields, List runtimeFields) { + private static MappingLookup createMappingLookup(List concreteFields, List runtimeFields) { List mappers = concreteFields.stream().map(MockFieldMapper::new).toList(); RootObjectMapper.Builder builder = new RootObjectMapper.Builder("_doc", ObjectMapper.Defaults.SUBOBJECTS); Map runtimeFieldTypes = runtimeFields.stream().collect(Collectors.toMap(RuntimeField::name, r -> r)); @@ -322,19 +322,22 @@ public void testSearchRequestRuntimeFields() { "uuid", null, createMappingLookup( - List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")), + List.of( + new MappedField("pig", new MockFieldMapper.FakeFieldType()), + new MappedField("cat", new MockFieldMapper.FakeFieldType()) + ), List.of(new TestRuntimeField("runtime", "long")) ), runtimeMappings ); assertTrue(context.isFieldMapped("cat")); - assertThat(context.getFieldType("cat"), instanceOf(KeywordScriptFieldType.class)); + assertThat(context.getMappedField("cat").type(), instanceOf(KeywordScriptFieldType.class)); assertThat(context.getMatchingFieldNames("cat"), equalTo(Set.of("cat"))); assertTrue(context.isFieldMapped("dog")); - assertThat(context.getFieldType("dog"), instanceOf(LongScriptFieldType.class)); + assertThat(context.getMappedField("dog").type(), instanceOf(LongScriptFieldType.class)); assertThat(context.getMatchingFieldNames("dog"), equalTo(Set.of("dog"))); assertTrue(context.isFieldMapped("pig")); - assertThat(context.getFieldType("pig"), instanceOf(MockFieldMapper.FakeFieldType.class)); + assertThat(context.getMappedField("pig").type(), instanceOf(MockFieldMapper.FakeFieldType.class)); assertThat(context.getMatchingFieldNames("pig"), equalTo(Set.of("pig"))); assertThat(context.getMatchingFieldNames("*"), equalTo(Set.of("cat", "dog", "pig", "runtime"))); } @@ -347,7 +350,13 @@ public void testSearchRequestRuntimeFieldsWrongFormat() { () -> createSearchExecutionContext( "uuid", null, - createMappingLookup(List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")), List.of()), + createMappingLookup( + List.of( + new MappedField("pig", new MockFieldMapper.FakeFieldType()), + new MappedField("cat", new MockFieldMapper.FakeFieldType()) + ), + List.of() + ), runtimeMappings ) ); @@ -362,7 +371,13 @@ public void testSearchRequestRuntimeFieldsRemoval() { () -> createSearchExecutionContext( "uuid", null, - createMappingLookup(List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")), List.of()), + createMappingLookup( + List.of( + new MappedField("pig", new MockFieldMapper.FakeFieldType()), + new MappedField("cat", new MockFieldMapper.FakeFieldType()) + ), + List.of() + ), runtimeMappings ) ); @@ -377,10 +392,10 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { ); MappingLookup mappingLookup = createMappingLookup( List.of( - new MockFieldMapper.FakeFieldType("pig"), - new MockFieldMapper.FakeFieldType("pig.subfield"), - new MockFieldMapper.FakeFieldType("cat"), - new MockFieldMapper.FakeFieldType("cat.subfield") + new MappedField("pig", new MockFieldMapper.FakeFieldType()), + new MappedField("pig.subfield", new MockFieldMapper.FakeFieldType()), + new MappedField("cat", new MockFieldMapper.FakeFieldType()), + new MappedField("cat.subfield", new MockFieldMapper.FakeFieldType()) ), List.of(new TestRuntimeField("runtime", "long")) ); @@ -480,9 +495,13 @@ private static RuntimeField runtimeField(String name, Function runtimeDocValues) { - TestRuntimeField.TestRuntimeFieldType fieldType = new TestRuntimeField.TestRuntimeFieldType(name, null) { + TestRuntimeField.TestRuntimeFieldType fieldType = new TestRuntimeField.TestRuntimeFieldType(null) { @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder( + String name, + String fullyQualifiedIndexName, + Supplier searchLookup + ) { return (cache, breakerService) -> new IndexFieldData<>() { @Override public String getFieldName() { @@ -581,7 +600,7 @@ public BucketedSort newBucketedSort( }; } }; - return new TestRuntimeField(name, Collections.singleton(fieldType)); + return new TestRuntimeField(name, Collections.singleton(new MappedField(name, fieldType))); } private static List collect(String field, SearchExecutionContext searchExecutionContext) throws IOException { @@ -595,12 +614,12 @@ private static List collect(String field, SearchExecutionContext searchE indexWriter.addDocument(List.of(new StringField("indexed_field", "second", Field.Store.NO))); try (DirectoryReader reader = indexWriter.getReader()) { IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = searchExecutionContext.getFieldType(field); + MappedField mappedField = searchExecutionContext.getMappedField(field); IndexFieldData indexFieldData; if (randomBoolean()) { - indexFieldData = searchExecutionContext.getForField(fieldType); + indexFieldData = searchExecutionContext.getForField(mappedField); } else { - indexFieldData = searchExecutionContext.lookup().getForField(fieldType); + indexFieldData = searchExecutionContext.lookup().getForField(mappedField); } searcher.search(query, new Collector() { @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java index cc341c2be7831..3e03b3a46586a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.json.JsonStringEncoder; import java.io.IOException; @@ -52,7 +52,7 @@ protected void doAssertLuceneQuery(SpanTermQueryBuilder queryBuilder, Query quer String expectedFieldName = expectedFieldName(queryBuilder.fieldName); assertThat(spanTermQuery.getTerm().field(), equalTo(expectedFieldName)); - MappedFieldType mapper = context.getFieldType(queryBuilder.fieldName()); + MappedField mapper = context.getMappedField(queryBuilder.fieldName()); if (mapper != null) { Term term = ((TermQuery) mapper.termQuery(queryBuilder.value(), null)).getTerm(); assertThat(spanTermQuery.getTerm(), equalTo(term)); diff --git a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java index ea1f9c6160cb0..72ccd4800be34 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java @@ -16,7 +16,7 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.xcontent.json.JsonStringEncoder; import java.io.IOException; @@ -88,7 +88,7 @@ protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, S .or(instanceOf(MatchNoDocsQuery.class)) .or(instanceOf(AutomatonQuery.class)) ); - MappedFieldType mapper = context.getFieldType(queryBuilder.fieldName()); + MappedField mapper = context.getMappedField(queryBuilder.fieldName()); if (query instanceof TermQuery termQuery) { String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); @@ -103,7 +103,7 @@ protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, S } } - private Query termQuery(MappedFieldType mapper, Object value, boolean caseInsensitive) { + private Query termQuery(MappedField mapper, Object value, boolean caseInsensitive) { if (caseInsensitive) { return mapper.termQueryCaseInsensitive(value, FieldTypeTestCase.MOCK_CONTEXT); } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 9cc03612ccbe4..d4e43ae9b9a2d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -126,7 +126,7 @@ protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, String fieldName = expectedFieldName(queryBuilder.fieldName()); Query expected; - if (context.getFieldType(fieldName) != null) { + if (context.getMappedField(fieldName) != null) { expected = new TermInSetQuery( fieldName, terms.stream().filter(Objects::nonNull).map(Object::toString).map(BytesRef::new).toList() diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilderTests.java index d57db8e251ba5..fcd4cc83dab2a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilderTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.index.query.SearchExecutionContext; @@ -53,7 +53,8 @@ public void testRandomScoreFunctionWithSeedNoField() throws Exception { Mockito.when(context.index()).thenReturn(settings.getIndex()); Mockito.when(context.getShardId()).thenReturn(0); Mockito.when(context.getIndexSettings()).thenReturn(settings); - Mockito.when(context.getFieldType(IdFieldMapper.NAME)).thenReturn(new KeywordFieldMapper.KeywordFieldType(IdFieldMapper.NAME)); + Mockito.when(context.getMappedField(IdFieldMapper.NAME)) + .thenReturn(new MappedField(IdFieldMapper.NAME, new KeywordFieldMapper.KeywordFieldType())); Mockito.when(context.isFieldMapped(IdFieldMapper.NAME)).thenReturn(true); builder.toFunction(context); assertWarnings("As of version 7.0 Elasticsearch will require that a [field] parameter is provided when a [seed] is set"); @@ -73,8 +74,8 @@ public void testRandomScoreFunctionWithSeed() throws Exception { Mockito.when(context.index()).thenReturn(settings.getIndex()); Mockito.when(context.getShardId()).thenReturn(0); Mockito.when(context.getIndexSettings()).thenReturn(settings); - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("foo", NumberType.LONG); - Mockito.when(context.getFieldType("foo")).thenReturn(ft); + MappedField mappedField = new MappedField("foo", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + Mockito.when(context.getMappedField("foo")).thenReturn(mappedField); Mockito.when(context.isFieldMapped("foo")).thenReturn(true); builder.toFunction(context); } diff --git a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryParserTests.java b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryParserTests.java index db418016fb70f..d4e92d23c019d 100644 --- a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryParserTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MockFieldMapper.FakeFieldType; import org.elasticsearch.index.query.MultiMatchQueryBuilder; @@ -121,8 +122,8 @@ public void testCrossFieldMultiMatchQuery() throws IOException { } public void testBlendTerms() { - FakeFieldType ft1 = new FakeFieldType("foo"); - FakeFieldType ft2 = new FakeFieldType("bar"); + MappedField f1 = new MappedField("foo", new FakeFieldType()); + MappedField f2 = new MappedField("bar", new FakeFieldType()); Term[] terms = new Term[] { new Term("foo", "baz"), new Term("bar", "baz") }; float[] boosts = new float[] { 2, 3 }; Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); @@ -138,24 +139,24 @@ public void testBlendTerms() { new BytesRef("baz"), 1f, false, - Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3)) + Arrays.asList(new FieldAndBoost(f1, 2), new FieldAndBoost(f2, 3)) ); assertEquals(expected, actual); } public void testBlendTermsUnsupportedValueWithLenient() { - FakeFieldType ft1 = new FakeFieldType("foo"); - FakeFieldType ft2 = new FakeFieldType("bar") { + MappedField f1 = new MappedField("foo", new FakeFieldType()); + MappedField f2 = new MappedField("bar", new FakeFieldType() { @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException(); } - }; + }); Term[] terms = new Term[] { new Term("foo", "baz") }; float[] boosts = new float[] { 2 }; Query expected = new DisjunctionMaxQuery( Arrays.asList( - Queries.newMatchNoDocsQuery("failed [" + ft2.name() + "] query, caused by illegal_argument_exception:[null]"), + Queries.newMatchNoDocsQuery("failed [" + f2.name() + "] query, caused by illegal_argument_exception:[null]"), BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f) ), 1f @@ -172,18 +173,18 @@ public Query termQuery(Object value, SearchExecutionContext context) { new BytesRef("baz"), 1f, true, - Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3)) + Arrays.asList(new FieldAndBoost(f1, 2), new FieldAndBoost(f2, 3)) ); assertEquals(expected, actual); } public void testBlendTermsUnsupportedValueWithoutLenient() { - FakeFieldType ft = new FakeFieldType("bar") { + MappedField mappedField = new MappedField("bar", new FakeFieldType() { @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException(); } - }; + }); expectThrows( IllegalArgumentException.class, () -> MultiMatchQueryParser.blendTerm( @@ -198,19 +199,19 @@ public Query termQuery(Object value, SearchExecutionContext context) { new BytesRef("baz"), 1f, false, - Arrays.asList(new FieldAndBoost(ft, 1)) + Arrays.asList(new FieldAndBoost(mappedField, 1)) ) ); } public void testBlendNoTermQuery() { - FakeFieldType ft1 = new FakeFieldType("foo"); - FakeFieldType ft2 = new FakeFieldType("bar") { + MappedField f1 = new MappedField("foo", new FakeFieldType()); + MappedField f2 = new MappedField("bar", new FakeFieldType() { @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { return new MatchAllDocsQuery(); } - }; + }); Term[] terms = new Term[] { new Term("foo", "baz") }; float[] boosts = new float[] { 2 }; Query expectedDisjunct1 = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); @@ -228,7 +229,7 @@ public Query termQuery(Object value, SearchExecutionContext context) { new BytesRef("baz"), 1f, false, - Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3)) + Arrays.asList(new FieldAndBoost(f1, 2), new FieldAndBoost(f2, 3)) ); assertEquals(expected, actual); } diff --git a/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java b/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java index 793e9eadf9f02..469a40f72bdce 100644 --- a/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/NestedHelperTests.java @@ -70,7 +70,7 @@ public void setUp() throws Exception { } private static NestedHelper buildNestedHelper(MapperService mapperService) { - return new NestedHelper(mapperService.mappingLookup().nestedLookup(), field -> mapperService.fieldType(field) != null); + return new NestedHelper(mapperService.mappingLookup().nestedLookup(), field -> mapperService.mappedField(field) != null); } public void testMatchAll() { @@ -90,28 +90,28 @@ public void testMatchNo() { } public void testTermsQuery() { - Query termsQuery = mapperService.fieldType("foo").termsQuery(Collections.singletonList("bar"), null); + Query termsQuery = mapperService.mappedField("foo").termsQuery(Collections.singletonList("bar"), null); assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); - termsQuery = mapperService.fieldType("nested1.foo").termsQuery(Collections.singletonList("bar"), null); + termsQuery = mapperService.mappedField("nested1.foo").termsQuery(Collections.singletonList("bar"), null); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); - termsQuery = mapperService.fieldType("nested2.foo").termsQuery(Collections.singletonList("bar"), null); + termsQuery = mapperService.mappedField("nested2.foo").termsQuery(Collections.singletonList("bar"), null); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing")); - termsQuery = mapperService.fieldType("nested3.foo").termsQuery(Collections.singletonList("bar"), null); + termsQuery = mapperService.mappedField("nested3.foo").termsQuery(Collections.singletonList("bar"), null); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termsQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2")); @@ -120,28 +120,28 @@ public void testTermsQuery() { } public void testTermQuery() { - Query termQuery = mapperService.fieldType("foo").termQuery("bar", null); + Query termQuery = mapperService.mappedField("foo").termQuery("bar", null); assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested_missing")); - termQuery = mapperService.fieldType("nested1.foo").termQuery("bar", null); + termQuery = mapperService.mappedField("nested1.foo").termQuery("bar", null); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested_missing")); - termQuery = mapperService.fieldType("nested2.foo").termQuery("bar", null); + termQuery = mapperService.mappedField("nested2.foo").termQuery("bar", null); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested_missing")); - termQuery = mapperService.fieldType("nested3.foo").termQuery("bar", null); + termQuery = mapperService.mappedField("nested3.foo").termQuery("bar", null); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(termQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(termQuery, "nested2")); @@ -151,28 +151,28 @@ public void testTermQuery() { public void testRangeQuery() { SearchExecutionContext context = mock(SearchExecutionContext.class); - Query rangeQuery = mapperService.fieldType("foo2").rangeQuery(2, 5, true, true, null, null, null, context); + Query rangeQuery = mapperService.mappedField("foo2").rangeQuery(2, 5, true, true, null, null, null, context); assertFalse(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested_missing")); - rangeQuery = mapperService.fieldType("nested1.foo2").rangeQuery(2, 5, true, true, null, null, null, context); + rangeQuery = mapperService.mappedField("nested1.foo2").rangeQuery(2, 5, true, true, null, null, null, context); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); assertFalse(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested_missing")); - rangeQuery = mapperService.fieldType("nested2.foo2").rangeQuery(2, 5, true, true, null, null, null, context); + rangeQuery = mapperService.mappedField("nested2.foo2").rangeQuery(2, 5, true, true, null, null, null, context); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested3")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested_missing")); - rangeQuery = mapperService.fieldType("nested3.foo2").rangeQuery(2, 5, true, true, null, null, null, context); + rangeQuery = mapperService.mappedField("nested3.foo2").rangeQuery(2, 5, true, true, null, null, null, context); assertTrue(buildNestedHelper(mapperService).mightMatchNestedDocs(rangeQuery)); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested1")); assertTrue(buildNestedHelper(mapperService).mightMatchNonNestedDocs(rangeQuery, "nested2")); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index b4748d99fa1b4..952e444cfe14c 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -82,7 +82,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SeqNoFieldMapper; @@ -2654,7 +2654,7 @@ public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException { shard.refresh("created segment 2"); // test global ordinals are evicted - MappedFieldType foo = shard.mapperService().fieldType("foo"); + MappedField foo = shard.mapperService().mappedField("foo"); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache( shard.indexSettings.getNodeSettings(), new IndexFieldDataCache.Listener() { diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index a52fd7e608d24..f49e49fdf9f29 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; @@ -74,9 +74,9 @@ public void testResolveSimilaritiesFromMapping_bm25() throws IOException { .put("index.similarity.my_similarity.discount_overlaps", false) .build(); MapperService mapperService = createIndex("foo", indexSettings, mapping).mapperService(); - assertThat(mapperService.fieldType("field1").getTextSearchInfo().similarity().get(), instanceOf(LegacyBM25Similarity.class)); + assertThat(mapperService.mappedField("field1").getTextSearchInfo().similarity().get(), instanceOf(LegacyBM25Similarity.class)); - LegacyBM25Similarity similarity = (LegacyBM25Similarity) mapperService.fieldType("field1").getTextSearchInfo().similarity().get(); + LegacyBM25Similarity similarity = (LegacyBM25Similarity) mapperService.mappedField("field1").getTextSearchInfo().similarity().get(); assertThat(similarity.getK1(), equalTo(2.0f)); assertThat(similarity.getB(), equalTo(0.5f)); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); @@ -96,7 +96,7 @@ public void testResolveSimilaritiesFromMapping_boolean() throws IOException { .endObject(); MapperService mapperService = createIndex("foo", Settings.EMPTY, mapping).mapperService(); - assertThat(mapperService.fieldType("field1").getTextSearchInfo().similarity().get(), instanceOf(BooleanSimilarity.class)); + assertThat(mapperService.mappedField("field1").getTextSearchInfo().similarity().get(), instanceOf(BooleanSimilarity.class)); } public void testResolveSimilaritiesFromMapping_DFR() throws IOException { @@ -120,9 +120,9 @@ public void testResolveSimilaritiesFromMapping_DFR() throws IOException { .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); MapperService mapperService = createIndex("foo", indexSettings, mapping).mapperService(); - assertThat(mapperService.fieldType("field1").getTextSearchInfo().similarity().get(), instanceOf(DFRSimilarity.class)); + assertThat(mapperService.mappedField("field1").getTextSearchInfo().similarity().get(), instanceOf(DFRSimilarity.class)); - DFRSimilarity similarity = (DFRSimilarity) mapperService.fieldType("field1").getTextSearchInfo().similarity().get(); + DFRSimilarity similarity = (DFRSimilarity) mapperService.mappedField("field1").getTextSearchInfo().similarity().get(); assertThat(similarity.getBasicModel(), instanceOf(BasicModelG.class)); assertThat(similarity.getAfterEffect(), instanceOf(AfterEffectL.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -150,9 +150,9 @@ public void testResolveSimilaritiesFromMapping_IB() throws IOException { .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); MapperService mapperService = createIndex("foo", indexSettings, mapping).mapperService(); - assertThat(mapperService.fieldType("field1").getTextSearchInfo().similarity().get(), instanceOf(IBSimilarity.class)); + assertThat(mapperService.mappedField("field1").getTextSearchInfo().similarity().get(), instanceOf(IBSimilarity.class)); - IBSimilarity similarity = (IBSimilarity) mapperService.fieldType("field1").getTextSearchInfo().similarity().get(); + IBSimilarity similarity = (IBSimilarity) mapperService.mappedField("field1").getTextSearchInfo().similarity().get(); assertThat(similarity.getDistribution(), instanceOf(DistributionSPL.class)); assertThat(similarity.getLambda(), instanceOf(LambdaTTF.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -177,10 +177,10 @@ public void testResolveSimilaritiesFromMapping_DFI() throws IOException { .put("index.similarity.my_similarity.independence_measure", "chisquared") .build(); MapperService mapperService = createIndex("foo", indexSettings, mapping).mapperService(); - MappedFieldType fieldType = mapperService.fieldType("field1"); + MappedField mappedField = mapperService.mappedField("field1"); - assertThat(fieldType.getTextSearchInfo().similarity().get(), instanceOf(DFISimilarity.class)); - DFISimilarity similarity = (DFISimilarity) fieldType.getTextSearchInfo().similarity().get(); + assertThat(mappedField.getTextSearchInfo().similarity().get(), instanceOf(DFISimilarity.class)); + DFISimilarity similarity = (DFISimilarity) mappedField.getTextSearchInfo().similarity().get(); assertThat(similarity.getIndependence(), instanceOf(IndependenceChiSquared.class)); } @@ -203,9 +203,12 @@ public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException .build(); MapperService mapperService = createIndex("foo", indexSettings, mapping).mapperService(); - assertThat(mapperService.fieldType("field1").getTextSearchInfo().similarity().get(), instanceOf(LMDirichletSimilarity.class)); + assertThat(mapperService.mappedField("field1").getTextSearchInfo().similarity().get(), instanceOf(LMDirichletSimilarity.class)); - LMDirichletSimilarity similarity = (LMDirichletSimilarity) mapperService.fieldType("field1").getTextSearchInfo().similarity().get(); + LMDirichletSimilarity similarity = (LMDirichletSimilarity) mapperService.mappedField("field1") + .getTextSearchInfo() + .similarity() + .get(); assertThat(similarity.getMu(), equalTo(3000f)); } @@ -227,9 +230,9 @@ public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOExcept .put("index.similarity.my_similarity.lambda", 0.7f) .build(); MapperService mapperService = createIndex("foo", indexSettings, mapping).mapperService(); - assertThat(mapperService.fieldType("field1").getTextSearchInfo().similarity().get(), instanceOf(LMJelinekMercerSimilarity.class)); + assertThat(mapperService.mappedField("field1").getTextSearchInfo().similarity().get(), instanceOf(LMJelinekMercerSimilarity.class)); - LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) mapperService.fieldType("field1") + LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) mapperService.mappedField("field1") .getTextSearchInfo() .similarity() .get(); diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java index 8ad4593602a25..bac0fd92c46dc 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java @@ -24,7 +24,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MockFieldMapper; import org.elasticsearch.test.ESTestCase; @@ -89,7 +89,7 @@ private > void assertSearchCollapse( IndexSearcher searcher = newSearcher(reader); SortField sortField = dvProducers.sortField(reverseSort); - MappedFieldType fieldType = new MockFieldMapper.FakeFieldType(sortField.getField()); + MappedField mappedField = new MappedField(sortField.getField(), new MockFieldMapper.FakeFieldType()); Sort sort = new Sort(sortField); Comparator comparator = reverseSort ? Collections.reverseOrder() : Comparator.naturalOrder(); @@ -104,8 +104,8 @@ private > void assertSearchCollapse( FieldDoc after = new FieldDoc(Integer.MAX_VALUE, 0, new Object[] { sortedValues.get(randomIndex) }); SinglePassGroupingCollector collapsingCollector = numeric - ? SinglePassGroupingCollector.createNumeric("field", fieldType, sort, expectedNumGroups, after) - : SinglePassGroupingCollector.createKeyword("field", fieldType, sort, expectedNumGroups, after); + ? SinglePassGroupingCollector.createNumeric("field", mappedField, sort, expectedNumGroups, after) + : SinglePassGroupingCollector.createKeyword("field", mappedField, sort, expectedNumGroups, after); TopFieldCollector topFieldCollector = TopFieldCollector.create(sort, totalHits, after, Integer.MAX_VALUE); Query query = new MatchAllDocsQuery(); diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index 8dd7ed9c21896..7cf234a53ac92 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.tests.search.CheckHits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MockFieldMapper; import org.elasticsearch.test.ESTestCase; @@ -109,7 +109,7 @@ private > void assertSearchCollapse( final SortField sort2 = new SortField("sort2", SortField.Type.LONG); Sort sort = new Sort(sort1, sort2, collapseField); - MappedFieldType fieldType = new MockFieldMapper.FakeFieldType(collapseField.getField()); + MappedField mappedField = new MappedField(collapseField.getField(), new MockFieldMapper.FakeFieldType()); int expectedNumGroups = values.size(); @@ -117,7 +117,7 @@ private > void assertSearchCollapse( if (numeric) { collapsingCollector = SinglePassGroupingCollector.createNumeric( collapseField.getField(), - fieldType, + mappedField, sort, expectedNumGroups, null @@ -125,7 +125,7 @@ private > void assertSearchCollapse( } else { collapsingCollector = SinglePassGroupingCollector.createKeyword( collapseField.getField(), - fieldType, + mappedField, sort, expectedNumGroups, null @@ -198,9 +198,9 @@ private > void assertSearchCollapse( final SegmentSearcher subSearcher = subSearchers[shardIDX]; final SinglePassGroupingCollector c; if (numeric) { - c = SinglePassGroupingCollector.createNumeric(collapseField.getField(), fieldType, sort, expectedNumGroups, null); + c = SinglePassGroupingCollector.createNumeric(collapseField.getField(), mappedField, sort, expectedNumGroups, null); } else { - c = SinglePassGroupingCollector.createKeyword(collapseField.getField(), fieldType, sort, expectedNumGroups, null); + c = SinglePassGroupingCollector.createKeyword(collapseField.getField(), mappedField, sort, expectedNumGroups, null); } subSearcher.search(weight, c); shardHits[shardIDX] = c.getTopGroups(0); @@ -378,7 +378,7 @@ public void testEmptyNumericSegment() throws Exception { final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new MockFieldMapper.FakeFieldType("group"); + MappedField mappedField = new MappedField("group", new MockFieldMapper.FakeFieldType()); SortField sortField = new SortField("group", SortField.Type.LONG); sortField.setMissingValue(Long.MAX_VALUE); @@ -386,7 +386,7 @@ public void testEmptyNumericSegment() throws Exception { final SinglePassGroupingCollector collapsingCollector = SinglePassGroupingCollector.createNumeric( "group", - fieldType, + mappedField, sort, 10, null @@ -425,13 +425,13 @@ public void testEmptySortedSegment() throws Exception { final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); - MappedFieldType fieldType = new MockFieldMapper.FakeFieldType("group"); + MappedField mappedField = new MappedField("group", new MockFieldMapper.FakeFieldType()); Sort sort = new Sort(new SortField("group", SortField.Type.STRING)); final SinglePassGroupingCollector collapsingCollector = SinglePassGroupingCollector.createKeyword( "group", - fieldType, + mappedField, sort, 10, null diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index 08c6dfbbd1f2b..95aad468cdb1c 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.ParsedQuery; @@ -274,7 +274,7 @@ public ScrollContext scrollContext() { assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query())); when(searchExecutionContext.getIndexSettings()).thenReturn(indexSettings); - when(searchExecutionContext.getFieldType(anyString())).thenReturn(mock(MappedFieldType.class)); + when(searchExecutionContext.getMappedField(anyString())).thenReturn(mock(MappedField.class)); readerContext.close(); readerContext = new ReaderContext( diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java index e1cf3fbc2ed96..6b7480e8bfcfe 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -69,7 +70,8 @@ private boolean needsScores(AggregationBuilder builder) throws IOException { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); DirectoryReader reader = indexWriter.getReader() ) { - return createAggregator(builder, new IndexSearcher(reader), new KeywordFieldType("f")).scoreMode().needsScores(); + return createAggregator(builder, new IndexSearcher(reader), new MappedField("f", new KeywordFieldType())).scoreMode() + .needsScores(); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java index 5a6d95cf98e69..079296a680ac6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -78,20 +78,11 @@ private ValuesSourceConfig getVSConfig( boolean indexed, AggregationContext context ) { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType( + MappedField mappedField = new MappedField( fieldName, - numType, - indexed, - false, - true, - false, - null, - Collections.emptyMap(), - null, - false, - null + new NumberFieldMapper.NumberFieldType(numType, indexed, false, true, false, null, Collections.emptyMap(), null, false, null) ); - return ValuesSourceConfig.resolveFieldOnly(ft, context); + return ValuesSourceConfig.resolveFieldOnly(mappedField, context); } private ValuesSourceConfig getVSConfig( @@ -100,18 +91,20 @@ private ValuesSourceConfig getVSConfig( boolean indexed, AggregationContext context ) { - MappedFieldType ft = new DateFieldMapper.DateFieldType( + MappedField mappedField = new MappedField( fieldName, - indexed, - false, - true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - resolution, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + indexed, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + resolution, + null, + null, + Collections.emptyMap() + ) ); - return ValuesSourceConfig.resolveFieldOnly(ft, context); + return ValuesSourceConfig.resolveFieldOnly(mappedField, context); } public void testShortcutIsApplicable() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java index bd73ce5fe63ba..2377c8fbf5272 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -41,7 +42,7 @@ public BucketsAggregator buildMergeAggregator() throws IOException { AggregationContext context = createAggregationContext( indexSearcher, null, - new NumberFieldMapper.NumberFieldType("test", NumberFieldMapper.NumberType.INTEGER) + new MappedField("test", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)) ); return new BucketsAggregator("test", AggregatorFactories.EMPTY, context, null, null, null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java index 666ab670bc124..455f348c52799 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DocCountProviderTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.CustomTermFreqField; import org.elasticsearch.index.mapper.DocCountFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -82,8 +82,8 @@ public void testQueryFiltering() throws IOException { private void testAggregation(Query query, CheckedConsumer indexer, Consumer verify) throws IOException { AggregationBuilder builder = new FilterAggregationBuilder("f", new MatchAllQueryBuilder()); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD, NumberFieldMapper.NumberType.LONG); - MappedFieldType docCountFieldType = new DocCountFieldMapper.DocCountFieldType(); - testCase(builder, query, indexer, verify, fieldType, docCountFieldType); + MappedField mappedField = new MappedField(NUMBER_FIELD, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField docCountField = new MappedField(DocCountFieldMapper.NAME, new DocCountFieldMapper.DocCountFieldType()); + testCase(builder, query, indexer, verify, mappedField, docCountField); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java index bc011c188652d..4b12da536677c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; @@ -63,11 +63,11 @@ private void testCase( ) throws IOException { GlobalAggregationBuilder aggregationBuilder = new GlobalAggregationBuilder("_name"); aggregationBuilder.subAggregation(new MinAggregationBuilder("in_global").field("number")); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); testCase(aggregationBuilder, topLevelQuery, buildIndex, (InternalGlobal result) -> { Min min = result.getAggregations().get("in_global"); verify.accept(result, min); - }, fieldType); + }, mappedField); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 9dd973bc9eb9d..bcb5d03c56a72 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -120,23 +121,23 @@ import static org.hamcrest.Matchers.nullValue; public class CompositeAggregatorTests extends AggregatorTestCase { - private static MappedFieldType[] FIELD_TYPES; + private static MappedField[] MAPPED_FIELDS; private List objectMappers; @Override @Before public void setUp() throws Exception { super.setUp(); - FIELD_TYPES = new MappedFieldType[9]; - FIELD_TYPES[0] = new KeywordFieldMapper.KeywordFieldType("keyword"); - FIELD_TYPES[1] = new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG); - FIELD_TYPES[2] = new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE); - FIELD_TYPES[3] = new DateFieldMapper.DateFieldType("date", DateFormatter.forPattern("yyyy-MM-dd||epoch_millis")); - FIELD_TYPES[4] = new NumberFieldMapper.NumberFieldType("price", NumberFieldMapper.NumberType.INTEGER); - FIELD_TYPES[5] = new KeywordFieldMapper.KeywordFieldType("terms"); - FIELD_TYPES[6] = new IpFieldMapper.IpFieldType("ip"); - FIELD_TYPES[7] = new GeoPointFieldMapper.GeoPointFieldType("geo_point"); - FIELD_TYPES[8] = TimeSeriesIdFieldMapper.FIELD_TYPE; + MAPPED_FIELDS = new MappedField[9]; + MAPPED_FIELDS[0] = new MappedField("keyword", new KeywordFieldMapper.KeywordFieldType()); + MAPPED_FIELDS[1] = new MappedField("long", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MAPPED_FIELDS[2] = new MappedField("double", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); + MAPPED_FIELDS[3] = new MappedField("date", new DateFieldMapper.DateFieldType(DateFormatter.forPattern("yyyy-MM-dd||epoch_millis"))); + MAPPED_FIELDS[4] = new MappedField("price", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MAPPED_FIELDS[5] = new MappedField("terms", new KeywordFieldMapper.KeywordFieldType()); + MAPPED_FIELDS[6] = new MappedField("ip", new IpFieldMapper.IpFieldType()); + MAPPED_FIELDS[7] = new MappedField("geo_point", new GeoPointFieldMapper.GeoPointFieldType()); + MAPPED_FIELDS[8] = new MappedField(TimeSeriesIdFieldMapper.NAME, TimeSeriesIdFieldMapper.FIELD_TYPE); objectMappers = new ArrayList<>(); } @@ -145,7 +146,7 @@ public void setUp() throws Exception { @After public void tearDown() throws Exception { super.tearDown(); - FIELD_TYPES = null; + MAPPED_FIELDS = null; objectMappers = null; } @@ -742,7 +743,7 @@ public void testUsingTestCase() throws Exception { assertEquals(2L, result.getBuckets().get(1).getDocCount()); assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); - }, FIELD_TYPES); + }, MAPPED_FIELDS); } /** @@ -791,8 +792,8 @@ public void testSubAggregationOfNested() throws Exception { assertEquals("{keyword=Stationary}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); }, - new KeywordFieldMapper.KeywordFieldType(nestedPath + "." + leafNameField), - new NumberFieldMapper.NumberFieldType("price", NumberFieldMapper.NumberType.LONG) + new MappedField(nestedPath + "." + leafNameField, new KeywordFieldMapper.KeywordFieldType()), + new MappedField("price", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)) ); } @@ -841,8 +842,8 @@ public void testSubAggregationOfNestedAggregateAfter() throws Exception { assertEquals("{keyword=Stationary}", result.getBuckets().get(0).getKeyAsString()); assertEquals(1L, result.getBuckets().get(0).getDocCount()); }, - new KeywordFieldMapper.KeywordFieldType(nestedPath + "." + leafNameField), - new NumberFieldMapper.NumberFieldType("price", NumberFieldMapper.NumberType.LONG) + new MappedField(nestedPath + "." + leafNameField, new KeywordFieldMapper.KeywordFieldType()), + new MappedField("price", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)) ); } @@ -3249,8 +3250,7 @@ private void execu List> verify ) throws IOException { assert create.size() == verify.size() : "create and verify should be the same size"; - Map types = Arrays.stream(FIELD_TYPES) - .collect(Collectors.toMap(MappedFieldType::name, Function.identity())); + Map types = Arrays.stream(MAPPED_FIELDS).collect(Collectors.toMap(MappedField::name, Function.identity())); Sort indexSort = useIndexSort ? buildIndexSort(sources, types) : null; IndexSettings indexSettings = createIndexSettings(indexSort); try (Directory directory = newDirectory()) { @@ -3291,7 +3291,7 @@ private void execu try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = new IndexSearcher(indexReader); for (int i = 0; i < create.size(); i++) { - verify.get(i).accept(searchAndReduce(indexSettings, indexSearcher, query, create.get(i).get(), FIELD_TYPES)); + verify.get(i).accept(searchAndReduce(indexSettings, indexSearcher, query, create.get(i).get(), MAPPED_FIELDS)); } } } @@ -3393,25 +3393,25 @@ private static long asLong(String dateTime) { return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } - private static Sort buildIndexSort(List> sources, Map fieldTypes) { + private static Sort buildIndexSort(List> sources, Map mappedFields) { List sortFields = new ArrayList<>(); - Map remainingFieldTypes = new HashMap<>(fieldTypes); + Map remainingMappedFields = new HashMap<>(mappedFields); List> sourcesToCreateSorts = randomBoolean() ? sources : sources.subList(0, 1); for (CompositeValuesSourceBuilder source : sourcesToCreateSorts) { - MappedFieldType type = fieldTypes.remove(source.field()); - remainingFieldTypes.remove(source.field()); - SortField sortField = sortFieldFrom(type); + MappedField mappedField = mappedFields.remove(source.field()); + remainingMappedFields.remove(source.field()); + SortField sortField = sortFieldFrom(mappedField); if (sortField == null) { break; } sortFields.add(sortField); } - while (remainingFieldTypes.size() > 0 && randomBoolean()) { + while (remainingMappedFields.size() > 0 && randomBoolean()) { // Add extra unused sorts - List fields = new ArrayList<>(remainingFieldTypes.keySet()); + List fields = new ArrayList<>(remainingMappedFields.keySet()); Collections.sort(fields); String field = fields.get(between(0, fields.size() - 1)); - SortField sortField = sortFieldFrom(remainingFieldTypes.remove(field)); + SortField sortField = sortFieldFrom(remainingMappedFields.remove(field)); if (sortField != null) { sortFields.add(sortField); } @@ -3419,16 +3419,20 @@ private static Sort buildIndexSort(List> sources return sortFields.size() > 0 ? new Sort(sortFields.toArray(SortField[]::new)) : null; } - private static SortField sortFieldFrom(MappedFieldType type) { + private static SortField sortFieldFrom(MappedField mappedField) { + if (mappedField == null) { + return null; + } + MappedFieldType type = mappedField.type(); if (type instanceof KeywordFieldMapper.KeywordFieldType) { - return new SortedSetSortField(type.name(), false); + return new SortedSetSortField(mappedField.name(), false); } else if (type instanceof DateFieldMapper.DateFieldType) { - return new SortedNumericSortField(type.name(), SortField.Type.LONG, false); + return new SortedNumericSortField(mappedField.name(), SortField.Type.LONG, false); } else if (type instanceof NumberFieldMapper.NumberFieldType) { return switch (type.typeName()) { - case "byte", "short", "integer" -> new SortedNumericSortField(type.name(), SortField.Type.INT, false); - case "long" -> new SortedNumericSortField(type.name(), SortField.Type.LONG, false); - case "float", "double" -> new SortedNumericSortField(type.name(), SortField.Type.DOUBLE, false); + case "byte", "short", "integer" -> new SortedNumericSortField(mappedField.name(), SortField.Type.INT, false); + case "long" -> new SortedNumericSortField(mappedField.name(), SortField.Type.LONG, false); + case "float", "double" -> new SortedNumericSortField(mappedField.name(), SortField.Type.DOUBLE, false); default -> null; }; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index c7a396a23965d..4cb4e16da416c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -54,11 +54,11 @@ public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { static class ClassAndName { - final MappedFieldType fieldType; + final MappedField mappedField; final Class> clazz; - ClassAndName(MappedFieldType fieldType, Class> clazz) { - this.fieldType = fieldType; + ClassAndName(MappedField mappedField, Class> clazz) { + this.mappedField = mappedField; this.clazz = clazz; } } @@ -161,21 +161,21 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index values = new Comparable[numValues]; if (type.clazz == Long.class) { if (i < indexSortSourcePrefix) { - indexSortFields[i] = new SortedNumericSortField(type.fieldType.name(), SortField.Type.LONG); + indexSortFields[i] = new SortedNumericSortField(type.mappedField.name(), SortField.Type.LONG); } for (int j = 0; j < numValues; j++) { values[j] = randomLong(); } } else if (type.clazz == Double.class) { if (i < indexSortSourcePrefix) { - indexSortFields[i] = new SortedNumericSortField(type.fieldType.name(), SortField.Type.DOUBLE); + indexSortFields[i] = new SortedNumericSortField(type.mappedField.name(), SortField.Type.DOUBLE); } for (int j = 0; j < numValues; j++) { values[j] = randomDouble(); } } else if (type.clazz == BytesRef.class) { if (i < indexSortSourcePrefix) { - indexSortFields[i] = new SortedSetSortField(type.fieldType.name(), false); + indexSortFields[i] = new SortedSetSortField(type.mappedField.name(), false); } for (int j = 0; j < numValues; j++) { values[j] = new BytesRef(randomAlphaOfLengthBetween(5, 50)); @@ -210,19 +210,19 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index values.add(possibleValues.get(j)[randomIntBetween(0, possibleValues.get(j).length - 1)]); if (types[j].clazz == Long.class) { long value = (Long) values.get(k); - document.add(new SortedNumericDocValuesField(types[j].fieldType.name(), value)); - document.add(new LongPoint(types[j].fieldType.name(), value)); + document.add(new SortedNumericDocValuesField(types[j].mappedField.name(), value)); + document.add(new LongPoint(types[j].mappedField.name(), value)); } else if (types[j].clazz == Double.class) { document.add( new SortedNumericDocValuesField( - types[j].fieldType.name(), + types[j].mappedField.name(), NumericUtils.doubleToSortableLong((Double) values.get(k)) ) ); } else if (types[j].clazz == BytesRef.class) { BytesRef value = (BytesRef) values.get(k); - document.add(new SortedSetDocValuesField(types[j].fieldType.name(), (BytesRef) values.get(k))); - document.add(new TextField(types[j].fieldType.name(), value.utf8ToString(), Field.Store.NO)); + document.add(new SortedSetDocValuesField(types[j].mappedField.name(), (BytesRef) values.get(k))); + document.add(new TextField(types[j].mappedField.name(), value.utf8ToString(), Field.Store.NO)); } else { assert (false); } @@ -244,12 +244,12 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index int size = keys.size() > 1 ? randomIntBetween(1, keys.size()) : 1; SingleDimensionValuesSource[] sources = new SingleDimensionValuesSource[types.length]; for (int i = 0; i < types.length; i++) { - final MappedFieldType fieldType = types[i].fieldType; + final MappedField mappedField = types[i].mappedField; if (types[i].clazz == Long.class) { sources[i] = new LongValuesSource( bigArrays, - fieldType, - context -> DocValues.getSortedNumeric(context.reader(), fieldType.name()), + mappedField, + context -> DocValues.getSortedNumeric(context.reader(), mappedField.name()), value -> value, DocValueFormat.RAW, missingBucket, @@ -260,8 +260,8 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index } else if (types[i].clazz == Double.class) { sources[i] = new DoubleValuesSource( bigArrays, - fieldType, - context -> FieldData.sortableLongBitsToDoubles(DocValues.getSortedNumeric(context.reader(), fieldType.name())), + mappedField, + context -> FieldData.sortableLongBitsToDoubles(DocValues.getSortedNumeric(context.reader(), mappedField.name())), DocValueFormat.RAW, missingBucket, MissingOrder.DEFAULT, @@ -274,8 +274,8 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index // since ordinals are global in this case. sources[i] = new GlobalOrdinalValuesSource( bigArrays, - fieldType, - context -> DocValues.getSortedSet(context.reader(), fieldType.name()), + mappedField, + context -> DocValues.getSortedSet(context.reader(), mappedField.name()), DocValueFormat.RAW, missingBucket, MissingOrder.DEFAULT, @@ -286,8 +286,8 @@ private void testRandomCase(boolean forceMerge, boolean missingBucket, int index sources[i] = new BinaryValuesSource( bigArrays, (b) -> {}, - fieldType, - context -> FieldData.toString(DocValues.getSortedSet(context.reader(), fieldType.name())), + mappedField, + context -> FieldData.toString(DocValues.getSortedSet(context.reader(), mappedField.name())), DocValueFormat.RAW, missingBucket, MissingOrder.DEFAULT, @@ -357,12 +357,12 @@ public void collect(int doc, long bucket) throws IOException { } } - private static MappedFieldType createNumber(String name, NumberFieldMapper.NumberType type) { - return new NumberFieldMapper.NumberFieldType(name, type); + private static MappedField createNumber(String name, NumberFieldMapper.NumberType type) { + return new MappedField(name, new NumberFieldMapper.NumberFieldType(type)); } - private static MappedFieldType createKeyword(String name) { - return new KeywordFieldMapper.KeywordFieldType(name); + private static MappedField createKeyword(String name) { + return new MappedField(name, new KeywordFieldMapper.KeywordFieldType()); } private static int compareKey(CompositeKey key1, CompositeKey key2) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java index 56b430c4ee662..af655c3761e86 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.test.ESTestCase; @@ -30,7 +30,7 @@ public class SingleDimensionValuesSourceTests extends ESTestCase { public void testBinarySorted() { - MappedFieldType keyword = new KeywordFieldMapper.KeywordFieldType("keyword"); + MappedField keyword = new MappedField("keyword", new KeywordFieldMapper.KeywordFieldType()); BinaryValuesSource source = new BinaryValuesSource( BigArrays.NON_RECYCLING_INSTANCE, (b) -> {}, @@ -76,7 +76,7 @@ public void testBinarySorted() { ); assertNull(source.createSortedDocsProducerOrNull(reader, null)); - MappedFieldType ip = new IpFieldMapper.IpFieldType("ip"); + MappedField ip = new MappedField("ip", new IpFieldMapper.IpFieldType()); source = new BinaryValuesSource( BigArrays.NON_RECYCLING_INSTANCE, (b) -> {}, @@ -92,7 +92,7 @@ public void testBinarySorted() { } public void testGlobalOrdinalsSorted() { - final MappedFieldType keyword = new KeywordFieldMapper.KeywordFieldType("keyword"); + MappedField keyword = new MappedField("keyword", new KeywordFieldMapper.KeywordFieldType()); GlobalOrdinalValuesSource source = new GlobalOrdinalValuesSource( BigArrays.NON_RECYCLING_INSTANCE, keyword, @@ -137,7 +137,7 @@ public void testGlobalOrdinalsSorted() { assertNull(source.createSortedDocsProducerOrNull(reader, null)); assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("foo", "bar")))); - final MappedFieldType ip = new IpFieldMapper.IpFieldType("ip"); + final MappedField ip = new MappedField("ip", new IpFieldMapper.IpFieldType()); source = new GlobalOrdinalValuesSource( BigArrays.NON_RECYCLING_INSTANCE, ip, @@ -154,7 +154,7 @@ public void testGlobalOrdinalsSorted() { public void testNumericSorted() { for (NumberFieldMapper.NumberType numberType : NumberFieldMapper.NumberType.values()) { - MappedFieldType number = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); + MappedField number = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); final SingleDimensionValuesSource source; if (numberType == NumberFieldMapper.NumberType.BYTE || numberType == NumberFieldMapper.NumberType.SHORT diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java index 86ac87363463b..4bc0bf843b3c5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java @@ -16,7 +16,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -36,12 +36,12 @@ import static org.hamcrest.Matchers.lessThan; public class FilterAggregatorTests extends AggregatorTestCase { - private MappedFieldType fieldType; + private MappedField mappedField; @Before public void setUpTest() throws Exception { super.setUp(); - fieldType = new KeywordFieldMapper.KeywordFieldType("field"); + mappedField = new MappedField("field", new KeywordFieldMapper.KeywordFieldType()); } public void testEmpty() throws Exception { @@ -52,7 +52,7 @@ public void testEmpty() throws Exception { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); QueryBuilder filter = QueryBuilders.termQuery("field", randomAlphaOfLength(5)); FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter); - InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); + InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, mappedField); assertEquals(response.getDocCount(), 0); assertFalse(AggregationInspectionHelper.hasValue(response)); indexReader.close(); @@ -87,7 +87,7 @@ public void testRandom() throws Exception { QueryBuilder filter = QueryBuilders.termQuery("field", Integer.toString(value)); FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter); - final InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); + final InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, mappedField); assertEquals(response.getDocCount(), (long) expectedBucketCount[value]); if (expectedBucketCount[value] > 0) { assertTrue(AggregationInspectionHelper.hasValue(response)); @@ -108,7 +108,7 @@ public void testBucketComparator() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); FilterAggregationBuilder builder = new FilterAggregationBuilder("test", new MatchAllQueryBuilder()); - FilterAggregator agg = createAggregator(builder, indexSearcher, fieldType); + FilterAggregator agg = createAggregator(builder, indexSearcher, mappedField); agg.preCollection(); LeafBucketCollector collector = agg.getLeafCollector( new AggregationExecutionContext(indexReader.leaves().get(0), null, null) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index ad80997a1d588..2511833e70089 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -45,7 +45,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.index.mapper.ObjectMapper; @@ -123,7 +123,7 @@ public void testEmpty() throws Exception { indexSearcher, new MatchAllDocsQuery(), builder, - new KeywordFieldMapper.KeywordFieldType("field") + new MappedField("field", new KeywordFieldMapper.KeywordFieldType()) ); assertEquals(response.getBuckets().size(), numFilters); for (InternalFilters.InternalBucket filter : response.getBuckets()) { @@ -173,7 +173,7 @@ public void testNoFiltersWithSubAggs() throws IOException { new MatchAllDocsQuery(), iw -> { iw.addDocument(List.of(new SortedNumericDocValuesField("i", 1))); }, (InternalFilters result) -> { assertThat(result.getBuckets(), hasSize(0)); }, - new NumberFieldMapper.NumberFieldType("m", NumberFieldMapper.NumberType.INTEGER) + new MappedField("m", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)) ); } @@ -224,7 +224,7 @@ public void testKeyedFilter() throws Exception { indexSearcher, new MatchAllDocsQuery(), builder, - new KeywordFieldMapper.KeywordFieldType("field") + new MappedField("field", new KeywordFieldMapper.KeywordFieldType()) ); assertEquals(filters.getBuckets().size(), 7); assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2); @@ -284,7 +284,7 @@ public void testRandom() throws Exception { indexSearcher, new MatchAllDocsQuery(), builder, - new KeywordFieldMapper.KeywordFieldType("field") + new MappedField("field", new KeywordFieldMapper.KeywordFieldType()) ); List buckets = response.getBuckets(); assertEquals(buckets.size(), filters.length + 1); @@ -310,8 +310,17 @@ public void testRandom() throws Exception { * Test that we perform the appropriate unwrapping to merge queries. */ public void testMergingQueries() throws IOException { - DateFieldMapper.DateFieldType ft = new DateFieldMapper.DateFieldType("test"); - Query topLevelQuery = ft.rangeQuery("2020-01-01", "2020-02-01", true, true, null, null, null, mock(SearchExecutionContext.class)); + MappedField mappedField = new MappedField("test", new DateFieldMapper.DateFieldType()); + Query topLevelQuery = mappedField.rangeQuery( + "2020-01-01", + "2020-02-01", + true, + true, + null, + null, + null, + mock(SearchExecutionContext.class) + ); FiltersAggregationBuilder builder = new FiltersAggregationBuilder( "t", // The range query will be wrapped in IndexOrDocValuesQuery by the date field type @@ -343,11 +352,11 @@ public void testMergingQueries() throws IOException { Map debug = new HashMap<>(); filter.collectDebugInfo(debug::put); assertMap(debug, matchesMap().extraOk().entry("query", ((IndexOrDocValuesQuery) topLevelQuery).getIndexQuery().toString())); - }, ft); + }, mappedField); } public void testWithMergedPointRangeQueries() throws IOException { - MappedFieldType ft = new DateFieldMapper.DateFieldType("test", Resolution.MILLISECONDS); + MappedField mappedField = new MappedField("test", new DateFieldMapper.DateFieldType(Resolution.MILLISECONDS)); AggregationBuilder builder = new FiltersAggregationBuilder( "test", new KeyedFilter("q1", new RangeQueryBuilder("test").from("2020-01-01").to("2020-03-01").includeUpper(false)) @@ -364,20 +373,22 @@ public void testWithMergedPointRangeQueries() throws IOException { InternalFilters filters = (InternalFilters) result; assertThat(filters.getBuckets(), hasSize(1)); assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(1L)); - }, ft); + }, mappedField); } public void testRangeFilter() throws IOException { - MappedFieldType ft = new DateFieldMapper.DateFieldType( + MappedField mappedField = new MappedField( "test", - true, - false, - false, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + false, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); AggregationBuilder builder = new FiltersAggregationBuilder( "test", @@ -418,7 +429,7 @@ public void testRangeFilter() throws IOException { ) ); }, - ft + mappedField ); }); } @@ -427,7 +438,7 @@ public void testRangeFilter() throws IOException { * Tests a filter that needs the cache to be fast. */ public void testPhraseFilter() throws IOException { - MappedFieldType ft = new TextFieldMapper.TextFieldType("test"); + MappedField mappedField = new MappedField("test", new TextFieldMapper.TextFieldType()); AggregationBuilder builder = new FiltersAggregationBuilder( "test", new KeyedFilter("q1", new MatchPhraseQueryBuilder("test", "will find me").slop(0)) @@ -477,7 +488,7 @@ public void onUse(Query query) {} ) ); }, - ft + mappedField ); } }); @@ -488,7 +499,7 @@ public void onUse(Query query) {} * matches it. */ public void testNested() throws IOException { - KeywordFieldType ft = new KeywordFieldType("author"); + MappedField mappedField = new MappedField("author", new KeywordFieldType()); CheckedConsumer buildIndex = iw -> iw.addDocuments( NestedAggregatorTests.generateBook("test", new String[] { "foo", "bar" }, new int[] { 5, 10, 15, 20 }) ); @@ -501,7 +512,7 @@ public void testNested() throws IOException { assertThat(filters.getBuckets(), hasSize(1)); assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(1L)); }, - ft + mappedField ); testCase( new FiltersAggregationBuilder("test", new KeyedFilter("q1", new MatchAllQueryBuilder())), @@ -512,7 +523,7 @@ public void testNested() throws IOException { assertThat(filters.getBuckets(), hasSize(1)); assertThat(filters.getBucketByKey("q1").getDocCount(), equalTo(1L)); }, - ft + mappedField ); } @@ -629,7 +640,7 @@ public void testTwoTermsWithDocCount() throws IOException { ) ); }, - new KeywordFieldType("a") + new MappedField("a", new KeywordFieldMapper.KeywordFieldType()) ); } @@ -719,7 +730,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} * {@code "segments_counted_in_constant_time", lessThan(searcher.getLeafContexts().size())}. */ public void testTermOnFilteredIndex() throws IOException { - KeywordFieldType ft = new KeywordFieldType("foo"); + MappedField mappedField = new MappedField("foo", new KeywordFieldMapper.KeywordFieldType()); AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", new TermQueryBuilder("foo", "bar"))); try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -743,7 +754,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} LongPoint.newRangeQuery("t", 5, Long.MAX_VALUE) ); IndexSearcher searcher = newIndexSearcher(limitedReader); - AggregationContext context = createAggregationContext(searcher, new MatchAllDocsQuery(), ft); + AggregationContext context = createAggregationContext(searcher, new MatchAllDocsQuery(), mappedField); FilterByFilterAggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); searcher.search(context.query(), aggregator); @@ -785,7 +796,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} * always take the fast path in filter-by-filter. */ public void testTermOnFilterWithMatchAll() throws IOException { - KeywordFieldType ft = new KeywordFieldType("foo"); + MappedField mappedField = new MappedField("foo", new KeywordFieldMapper.KeywordFieldType()); AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", new TermQueryBuilder("foo", "bar"))); try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -809,7 +820,7 @@ public void onCache(ShardId shardId, Accountable accountable) {} LongPoint.newRangeQuery("t", Long.MIN_VALUE, Long.MAX_VALUE) ); IndexSearcher searcher = newIndexSearcher(limitedReader); - AggregationContext context = createAggregationContext(searcher, new MatchAllDocsQuery(), ft); + AggregationContext context = createAggregationContext(searcher, new MatchAllDocsQuery(), mappedField); FilterByFilterAggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); searcher.search(context.query(), aggregator); @@ -845,18 +856,20 @@ public void onCache(ShardId shardId, Accountable accountable) {} } public void testComplexUnionDisabledFilterByFilter() throws IOException { - MappedFieldType dft = new DateFieldMapper.DateFieldType( + MappedField df = new MappedField( "date", - true, - false, - false, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + false, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); - MappedFieldType kft = new KeywordFieldType("kwd"); + MappedField kf = new MappedField("kwd", new KeywordFieldType()); AggregationBuilder builder = new FiltersAggregationBuilder( "test", new KeyedFilter("q1", new RangeQueryBuilder("date").from("2020-01-01").to("2020-03-01").includeUpper(false)) @@ -894,7 +907,7 @@ public void testComplexUnionDisabledFilterByFilter() throws IOException { ) ) ); - }, dft, kft); + }, df, kf); } public void testMatchNoneFilter() throws IOException { @@ -972,7 +985,7 @@ public void testMatchNoneTopLevel() throws IOException { } public void testTermFilter() throws IOException { - KeywordFieldMapper.KeywordFieldType ft = new KeywordFieldMapper.KeywordFieldType("f", true, false, Collections.emptyMap()); + MappedField mappedField = new MappedField("f", new KeywordFieldMapper.KeywordFieldType(true, false, Collections.emptyMap())); AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", new MatchQueryBuilder("f", "0"))); CheckedConsumer buildIndex = iw -> { for (int i = 0; i < 10; i++) { @@ -1006,12 +1019,12 @@ public void testTermFilter() throws IOException { ) ); }, - ft + mappedField ); } public void testTermTopLevel() throws IOException { - KeywordFieldMapper.KeywordFieldType ft = new KeywordFieldMapper.KeywordFieldType("f", true, false, Collections.emptyMap()); + MappedField mappedField = new MappedField("f", new KeywordFieldMapper.KeywordFieldType(true, false, Collections.emptyMap())); AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", new MatchAllQueryBuilder())); CheckedConsumer buildIndex = iw -> { for (int i = 0; i < 10; i++) { @@ -1045,13 +1058,13 @@ public void testTermTopLevel() throws IOException { ) ); }, - ft + mappedField ); } public void testBoolThenDateTopLevel() throws IOException { - MappedFieldType ft = new DateFieldMapper.DateFieldType("test"); - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(false); + MappedField mappedField = new MappedField("test", new DateFieldMapper.DateFieldType()); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(false)); String start = "2010-01-02T00:00:00.000Z"; String middle = "2010-01-02T00:00:05.000Z"; @@ -1106,14 +1119,14 @@ public void testBoolThenDateTopLevel() throws IOException { ) ); }, - ft, - fnft + mappedField, + fnf ); } public void testBoolThenDateFilter() throws IOException { - MappedFieldType ft = new DateFieldMapper.DateFieldType("test"); - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(false); + MappedField mappedField = new MappedField("test", new DateFieldMapper.DateFieldType()); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(false)); String start = "2010-01-02T00:00:00.000Z"; String middle = "2010-01-02T00:00:05.000Z"; @@ -1165,14 +1178,14 @@ public void testBoolThenDateFilter() throws IOException { ) ); }, - ft, - fnft + mappedField, + fnf ); } public void testBoolWithMatchAllThenDateFilter() throws IOException { - MappedFieldType ft = new DateFieldMapper.DateFieldType("test"); - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(false); + MappedField mappedField = new MappedField("test", new DateFieldMapper.DateFieldType()); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(false)); String start = "2010-01-02T00:00:00.000Z"; String middle = "2010-01-02T00:00:05.000Z"; @@ -1227,24 +1240,26 @@ public void testBoolWithMatchAllThenDateFilter() throws IOException { ) ); }, - ft, - fnft + mappedField, + fnf ); } public void testSubAggs() throws IOException { - MappedFieldType dateFt = new DateFieldMapper.DateFieldType( + MappedField dateF = new MappedField( "test", - true, - false, - false, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + false, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); - MappedFieldType intFt = new NumberFieldMapper.NumberFieldType("int", NumberType.INTEGER); + MappedField intF = new MappedField("int", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AggregationBuilder builder = new FiltersAggregationBuilder( "test", new KeyedFilter("q1", new RangeQueryBuilder("test").from("2010-01-01").to("2010-03-01").includeUpper(false)), @@ -1312,24 +1327,26 @@ public void testSubAggs() throws IOException { ).entry("test.s", matchesMap()).entry("test.m", matchesMap()) ); }, - dateFt, - intFt + dateF, + intF ); } public void testSubAggsManyDocs() throws IOException { - MappedFieldType dateFt = new DateFieldMapper.DateFieldType( + MappedField dateF = new MappedField( "test", - true, - false, - false, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + false, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); - MappedFieldType intFt = new NumberFieldMapper.NumberFieldType("int", NumberType.INTEGER); + MappedField intF = new MappedField("int", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AggregationBuilder builder = new FiltersAggregationBuilder( "test", new KeyedFilter("q1", new RangeQueryBuilder("test").from("2010-01-01").to("2010-03-01").includeUpper(false)), @@ -1383,24 +1400,26 @@ public void testSubAggsManyDocs() throws IOException { ).entry("test.s", matchesMap()).entry("test.m", matchesMap()) ); }, - dateFt, - intFt + dateF, + intF ); } public void testSubAggsManyFilters() throws IOException { - MappedFieldType dateFt = new DateFieldMapper.DateFieldType( + MappedField dateF = new MappedField( "test", - true, - false, - false, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + false, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); - MappedFieldType intFt = new NumberFieldMapper.NumberFieldType("int", NumberType.INTEGER); + MappedField intF = new MappedField("int", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); List buckets = new ArrayList<>(); DateFormatter formatter = DateFormatter.forPattern("strict_date"); long start = formatter.parseMillis("2010-01-01"); @@ -1461,13 +1480,13 @@ public void testSubAggsManyFilters() throws IOException { ).entry("test.s", matchesMap()).entry("test.m", matchesMap()) ); }, - dateFt, - intFt + dateF, + intF ); } public void testDocValuesFieldExistsForDate() throws IOException { - DateFieldMapper.DateFieldType ft = new DateFieldMapper.DateFieldType("f"); + MappedField mappedField = new MappedField("f", new DateFieldMapper.DateFieldType()); QueryBuilder exists; if (randomBoolean()) { exists = new ExistsQueryBuilder("f"); @@ -1476,16 +1495,16 @@ public void testDocValuesFieldExistsForDate() throws IOException { exists = new RangeQueryBuilder("f").gte("2020-01-01").lt("2020-01-02"); } long start = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2020-01-01T00:00:01"); - docValuesFieldExistsTestCase(exists, ft, true, i -> { + docValuesFieldExistsTestCase(exists, mappedField, true, i -> { long date = start + TimeUnit.HOURS.toMillis(i); return List.of(new LongPoint("f", date), new NumericDocValuesField("f", date)); }); } public void testDocValuesFieldExistsForDateWithMultiValuedFields() throws IOException { - DateFieldMapper.DateFieldType ft = new DateFieldMapper.DateFieldType("f"); + MappedField mappedField = new MappedField("f", new DateFieldMapper.DateFieldType()); long start = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2020-01-01T00:00:01"); - docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), ft, true, i -> { + docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), mappedField, true, i -> { long date = start + TimeUnit.HOURS.toMillis(i); return List.of( new LongPoint("f", date), @@ -1497,25 +1516,16 @@ public void testDocValuesFieldExistsForDateWithMultiValuedFields() throws IOExce } public void testDocValuesFieldExistsForDateWithoutData() throws IOException { - docValuesFieldExistsNoDataTestCase(new DateFieldMapper.DateFieldType("f")); + docValuesFieldExistsNoDataTestCase(new MappedField("f", new DateFieldMapper.DateFieldType())); } public void testDocValuesFieldExistsForNumber() throws IOException { NumberFieldMapper.NumberType numberType = randomFrom(NumberFieldMapper.NumberType.values()); - NumberFieldMapper.NumberFieldType ft = new NumberFieldMapper.NumberFieldType( + MappedField mappedField = new MappedField( "f", - numberType, - true, - false, - true, - true, - null, - Map.of(), - null, - false, - null + new NumberFieldMapper.NumberFieldType(numberType, true, false, true, true, null, Map.of(), null, false, null) ); - docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), ft, true, i -> { + docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), mappedField, true, i -> { final LuceneDocument document = new LuceneDocument(); numberType.addFields(document, "f", i, true, true, false); return document; @@ -1524,43 +1534,45 @@ public void testDocValuesFieldExistsForNumber() throws IOException { public void testDocValuesFieldExistsForNumberWithoutData() throws IOException { docValuesFieldExistsNoDataTestCase( - new NumberFieldMapper.NumberFieldType( + new MappedField( "f", - randomFrom(NumberFieldMapper.NumberType.values()), - true, - false, - true, - true, - null, - Map.of(), - null, - false, - null + new NumberFieldMapper.NumberFieldType( + randomFrom(NumberFieldMapper.NumberType.values()), + true, + false, + true, + true, + null, + Map.of(), + null, + false, + null + ) ) ); } public void testDocValuesFieldExistsForKeyword() throws IOException { - KeywordFieldMapper.KeywordFieldType ft = new KeywordFieldMapper.KeywordFieldType("f", true, true, Map.of()); - docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), ft, true, i -> { + MappedField mappedField = new MappedField("f", new KeywordFieldMapper.KeywordFieldType(true, true, Map.of())); + docValuesFieldExistsTestCase(new ExistsQueryBuilder("f"), mappedField, true, i -> { BytesRef text = new BytesRef(randomAlphaOfLength(5)); return List.of(new Field("f", text, KeywordFieldMapper.Defaults.FIELD_TYPE), new SortedSetDocValuesField("f", text)); }); } public void testDocValuesFieldExistsForKeywordWithoutData() throws IOException { - docValuesFieldExistsNoDataTestCase(new KeywordFieldMapper.KeywordFieldType("f", true, true, Map.of())); + docValuesFieldExistsNoDataTestCase(new MappedField("f", new KeywordFieldMapper.KeywordFieldType(true, true, Map.of()))); } private void docValuesFieldExistsTestCase( QueryBuilder exists, - MappedFieldType fieldType, + MappedField mappedField, boolean countsResultsInConstantTime, IntFunction> buildDocWithField ) throws IOException { AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", exists)); // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(true)); debugTestCase(builder, new MatchAllDocsQuery(), iw -> { for (int i = 0; i < 10; i++) { iw.addDocuments( @@ -1579,11 +1591,11 @@ private void docValuesFieldExistsTestCase( MapMatcher expectedFilterDebug = matchesMap().extraOk() .entry("segments_counted_in_constant_time", countsResultsInConstantTime ? greaterThan(0) : equalTo(0)); assertMap(debug, matchesMap().entry("test", matchesMap().extraOk().entry("filters", matchesList().item(expectedFilterDebug)))); - }, fieldType, fnft); + }, mappedField, fnf); } - private void docValuesFieldExistsNoDataTestCase(MappedFieldType fieldType) throws IOException { - QueryBuilder exists = new ExistsQueryBuilder(fieldType.name()); + private void docValuesFieldExistsNoDataTestCase(MappedField mappedField) throws IOException { + QueryBuilder exists = new ExistsQueryBuilder(mappedField.name()); AggregationBuilder builder = new FiltersAggregationBuilder("test", new KeyedFilter("q1", exists)); CheckedConsumer buildIndex = iw -> { for (int i = 0; i < 10; i++) { @@ -1591,17 +1603,17 @@ private void docValuesFieldExistsNoDataTestCase(MappedFieldType fieldType) throw } }; // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(true)); withAggregator(builder, new MatchAllDocsQuery(), buildIndex, (searcher, aggregator) -> { assertThat(aggregator, instanceOf(FilterByFilterAggregator.class)); Map debug = collectAndGetFilterDebugInfo(searcher, aggregator); assertMap(debug, matchesMap().extraOk().entry("segments_counted_in_constant_time", greaterThan(0))); - }, fieldType, fnft); + }, mappedField, fnf); testCase(builder, new MatchAllDocsQuery(), buildIndex, (InternalFilters result) -> { assertThat(result.getBuckets(), hasSize(1)); assertThat(result.getBucketByKey("q1").getDocCount(), equalTo(0L)); - }, fieldType, fnft); + }, mappedField, fnf); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index 563c1b4102617..efc753e9f9820 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -407,12 +407,12 @@ public void testAggregateWrongField() throws IOException { AutoDateHistogramAggregationBuilder aggregation = new AutoDateHistogramAggregationBuilder("_name").setNumBuckets(10) .field("bogus_bogus"); - final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType("date_field"); + final MappedField mappedField = new MappedField("date_field", new DateFieldMapper.DateFieldType()); testCase(aggregation, DEFAULT_QUERY, iw -> {}, (Consumer) histogram -> { assertEquals(0, histogram.getBuckets().size()); assertFalse(AggregationInspectionHelper.hasValue(histogram)); - }, fieldType); + }, mappedField); } public void testBooleanFieldDeprecated() throws IOException { @@ -421,7 +421,7 @@ public void testBooleanFieldDeprecated() throws IOException { Document d = new Document(); d.add(new SortedNumericDocValuesField(fieldName, 0)); iw.addDocument(d); - }, a -> {}, new BooleanFieldMapper.BooleanFieldType(fieldName)); + }, a -> {}, new MappedField(fieldName, new BooleanFieldMapper.BooleanFieldType())); assertWarnings("Running AutoIntervalDateHistogram aggregations on [boolean] fields is deprecated"); } @@ -430,12 +430,12 @@ public void testUnmappedMissing() throws IOException { .field("bogus_bogus") .missing("2017-12-12"); - final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType("date_field"); + final MappedField mappedField = new MappedField("date_field", new DateFieldMapper.DateFieldType()); testCase(aggregation, DEFAULT_QUERY, iw -> {}, (Consumer) histogram -> { assertEquals(0, histogram.getBuckets().size()); assertFalse(AggregationInspectionHelper.hasValue(histogram)); - }, fieldType); + }, mappedField); } public void testIntervalYear() throws IOException { @@ -936,18 +936,24 @@ private void testSearchCase( configure.accept(aggregationBuilder); } - final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggregationBuilder.field()); + final MappedField mappedField = new MappedField(aggregationBuilder.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType instantFieldType = new NumberFieldMapper.NumberFieldType(INSTANT_FIELD, NumberFieldMapper.NumberType.LONG); - MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG); + MappedField instantField = new MappedField( + INSTANT_FIELD, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + MappedField numericField = new MappedField( + NUMERIC_FIELD, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); final InternalAutoDateHistogram histogram = searchAndReduce( indexSearcher, query, aggregationBuilder, - fieldType, - instantFieldType, - numericFieldType + mappedField, + instantField, + numericField ); verify.accept(histogram); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java index ba2147bc9f3f6..4e843c8230517 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -100,32 +101,39 @@ protected final void asSubAggTestCase( CheckedBiConsumer buildIndex, Consumer verify ) throws IOException { - KeywordFieldMapper.KeywordFieldType k1ft = new KeywordFieldMapper.KeywordFieldType("k1"); - KeywordFieldMapper.KeywordFieldType k2ft = new KeywordFieldMapper.KeywordFieldType("k2"); - NumberFieldMapper.NumberFieldType nft = new NumberFieldMapper.NumberFieldType("n", NumberType.LONG); - DateFieldMapper.DateFieldType dft = aggregableDateFieldType(false, randomBoolean()); - testCase(builder, new MatchAllDocsQuery(), iw -> buildIndex.accept(iw, dft), verify, k1ft, k2ft, nft, dft); + MappedField k1f = new MappedField("k1", new KeywordFieldMapper.KeywordFieldType()); + MappedField k2f = new MappedField("k2", new KeywordFieldMapper.KeywordFieldType()); + MappedField nf = new MappedField("n", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + MappedField df = aggregableDateField(false, randomBoolean()); + testCase( + builder, + new MatchAllDocsQuery(), + iw -> buildIndex.accept(iw, (DateFieldMapper.DateFieldType) df.type()), + verify, + k1f, + k2f, + nf, + df + ); } - protected final DateFieldMapper.DateFieldType aggregableDateFieldType(boolean useNanosecondResolution, boolean isSearchable) { - return aggregableDateFieldType(useNanosecondResolution, isSearchable, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + protected final MappedField aggregableDateField(boolean useNanosecondResolution, boolean isSearchable) { + return aggregableDateField(useNanosecondResolution, isSearchable, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); } - protected final DateFieldMapper.DateFieldType aggregableDateFieldType( - boolean useNanosecondResolution, - boolean isSearchable, - DateFormatter formatter - ) { - return new DateFieldMapper.DateFieldType( + protected final MappedField aggregableDateField(boolean useNanosecondResolution, boolean isSearchable, DateFormatter formatter) { + return new MappedField( AGGREGABLE_DATE, - isSearchable, - randomBoolean(), - true, - formatter, - useNanosecondResolution ? DateFieldMapper.Resolution.NANOSECONDS : DateFieldMapper.Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + isSearchable, + randomBoolean(), + true, + formatter, + useNanosecondResolution ? DateFieldMapper.Resolution.NANOSECONDS : DateFieldMapper.Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index e7fdea0de06b1..88c5e0367fa82 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocCountFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -89,7 +90,7 @@ public void testBooleanFieldDeprecated() throws IOException { iw.addDocument(d); }, a -> {}, - new BooleanFieldMapper.BooleanFieldType(fieldName) + new MappedField(fieldName, new BooleanFieldMapper.BooleanFieldType()) ); assertWarnings("Running DateHistogram aggregations on [boolean] fields is deprecated"); } @@ -761,7 +762,7 @@ public void testOverlappingBounds() { public void testFewRoundingPointsUsesFromRange() throws IOException { aggregationImplementationChoiceTestCase( - aggregableDateFieldType(false, true, DateFormatter.forPattern("yyyy")), + aggregableDateField(false, true, DateFormatter.forPattern("yyyy")), IntStream.range(2000, 2010).mapToObj(Integer::toString).collect(toList()), new DateHistogramAggregationBuilder("test").field(AGGREGABLE_DATE).calendarInterval(DateHistogramInterval.YEAR), true @@ -770,7 +771,7 @@ public void testFewRoundingPointsUsesFromRange() throws IOException { public void testManyRoundingPointsDoesNotUseFromRange() throws IOException { aggregationImplementationChoiceTestCase( - aggregableDateFieldType(false, true, DateFormatter.forPattern("yyyy")), + aggregableDateField(false, true, DateFormatter.forPattern("yyyy")), IntStream.range(2000, 3000).mapToObj(Integer::toString).collect(toList()), new DateHistogramAggregationBuilder("test").field(AGGREGABLE_DATE).calendarInterval(DateHistogramInterval.YEAR), false @@ -783,7 +784,7 @@ public void testManyRoundingPointsDoesNotUseFromRange() throws IOException { */ public void testNanosDoesUseFromRange() throws IOException { aggregationImplementationChoiceTestCase( - aggregableDateFieldType(true, true, DateFormatter.forPattern("yyyy")), + aggregableDateField(true, true, DateFormatter.forPattern("yyyy")), List.of("2017", "2018"), new DateHistogramAggregationBuilder("test").field(AGGREGABLE_DATE).calendarInterval(DateHistogramInterval.YEAR), true @@ -792,7 +793,7 @@ public void testNanosDoesUseFromRange() throws IOException { public void testFarFutureDoesNotUseFromRange() throws IOException { aggregationImplementationChoiceTestCase( - aggregableDateFieldType(false, true, DateFormatter.forPattern("yyyyyy")), + aggregableDateField(false, true, DateFormatter.forPattern("yyyyyy")), List.of("402017", "402018"), new DateHistogramAggregationBuilder("test").field(AGGREGABLE_DATE).calendarInterval(DateHistogramInterval.YEAR), false @@ -801,7 +802,7 @@ public void testFarFutureDoesNotUseFromRange() throws IOException { public void testMissingValueDoesNotUseFromRange() throws IOException { aggregationImplementationChoiceTestCase( - aggregableDateFieldType(false, true, DateFormatter.forPattern("yyyy")), + aggregableDateField(false, true, DateFormatter.forPattern("yyyy")), List.of("2017", "2018"), new DateHistogramAggregationBuilder("test").field(AGGREGABLE_DATE).calendarInterval(DateHistogramInterval.YEAR).missing("2020"), false @@ -810,7 +811,7 @@ public void testMissingValueDoesNotUseFromRange() throws IOException { public void testExtendedBoundsUsesFromRange() throws IOException { aggregationImplementationChoiceTestCase( - aggregableDateFieldType(false, true, DateFormatter.forPattern("yyyy")), + aggregableDateField(false, true, DateFormatter.forPattern("yyyy")), List.of("2017", "2018"), List.of("2016", "2017", "2018", "2019"), new DateHistogramAggregationBuilder("test").field(AGGREGABLE_DATE) @@ -823,7 +824,7 @@ public void testExtendedBoundsUsesFromRange() throws IOException { public void testHardBoundsUsesFromRange() throws IOException { aggregationImplementationChoiceTestCase( - aggregableDateFieldType(false, true, DateFormatter.forPattern("yyyy")), + aggregableDateField(false, true, DateFormatter.forPattern("yyyy")), List.of("2016", "2017", "2018", "2019"), List.of("2017", "2018"), new DateHistogramAggregationBuilder("test").field(AGGREGABLE_DATE) @@ -845,9 +846,9 @@ public void testOneBucketOptimized() throws IOException { iw.addDocument(List.of()); } }; - DateFieldMapper.DateFieldType ft = new DateFieldMapper.DateFieldType("f"); + MappedField mappedField = new MappedField("f", new DateFieldMapper.DateFieldType()); // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(true)); debugTestCase( builder, new MatchAllDocsQuery(), @@ -886,8 +887,8 @@ public void testOneBucketOptimized() throws IOException { ) ); }, - ft, - fnft + mappedField, + fnf ); } @@ -904,9 +905,9 @@ public void testOneBucketWithDocCountUsesFilterByFilter() throws IOException { iw.addDocument(List.of(new LongPoint("f", date), new NumericDocValuesField("f", date), DocCountFieldMapper.field(2))); } }; - DateFieldMapper.DateFieldType ft = new DateFieldMapper.DateFieldType("f"); + MappedField mappedField = new MappedField("f", new DateFieldMapper.DateFieldType()); // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(true)); debugTestCase( builder, new MatchAllDocsQuery(), @@ -944,8 +945,8 @@ public void testOneBucketWithDocCountUsesFilterByFilter() throws IOException { ) ); }, - ft, - fnft + mappedField, + fnf ); } @@ -971,9 +972,9 @@ public void testTwoBucketsWithDocCountUsesTraditionalCollection() throws IOExcep ); } }; - DateFieldMapper.DateFieldType ft = new DateFieldMapper.DateFieldType("f"); + MappedField mappedField = new MappedField("f", new DateFieldMapper.DateFieldType()); // Exists queries convert to MatchNone if this isn't defined - FieldNamesFieldMapper.FieldNamesFieldType fnft = FieldNamesFieldMapper.FieldNamesFieldType.get(true); + MappedField fnf = new MappedField(FieldNamesFieldMapper.NAME, FieldNamesFieldMapper.FieldNamesFieldType.get(true)); debugTestCase( builder, new MatchAllDocsQuery(), @@ -1001,22 +1002,22 @@ public void testTwoBucketsWithDocCountUsesTraditionalCollection() throws IOExcep ) ); }, - ft, - fnft + mappedField, + fnf ); } private void aggregationImplementationChoiceTestCase( - DateFieldMapper.DateFieldType ft, + MappedField mappedField, List data, DateHistogramAggregationBuilder builder, boolean usesFromRange ) throws IOException { - aggregationImplementationChoiceTestCase(ft, data, data, builder, usesFromRange); + aggregationImplementationChoiceTestCase(mappedField, data, data, builder, usesFromRange); } private void aggregationImplementationChoiceTestCase( - DateFieldMapper.DateFieldType ft, + MappedField mappedField, List data, List resultingBucketKeys, DateHistogramAggregationBuilder builder, @@ -1024,13 +1025,13 @@ private void aggregationImplementationChoiceTestCase( ) throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { for (String d : data) { - long instant = asLong(d, ft); + long instant = asLong(d, mappedField); indexWriter.addDocument( List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant), new LongPoint(AGGREGABLE_DATE, instant)) ); } try (IndexReader reader = indexWriter.getReader()) { - AggregationContext context = createAggregationContext(new IndexSearcher(reader), new MatchAllDocsQuery(), ft); + AggregationContext context = createAggregationContext(new IndexSearcher(reader), new MatchAllDocsQuery(), mappedField); Aggregator agg = createAggregator(builder, context); Matcher matcher = instanceOf(DateHistogramAggregator.FromDateRange.class); if (usesFromRange == false) { @@ -1106,7 +1107,7 @@ public void testBuildEmpty() throws IOException { */ assertThat(histo.emptyBucketInfo.rounding.prepareForUnknown().round(0), equalTo(0L)); }, - aggregableDateFieldType(false, true) + aggregableDateField(false, true) ); } @@ -1129,14 +1130,14 @@ private void testSearchCase( boolean useNanosecondResolution ) throws IOException { boolean aggregableDateIsSearchable = randomBoolean(); - DateFieldMapper.DateFieldType fieldType = aggregableDateFieldType(useNanosecondResolution, aggregableDateIsSearchable); + MappedField mappedField = aggregableDateField(useNanosecondResolution, aggregableDateIsSearchable); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (String date : dataset) { - long instant = asLong(date, fieldType); + long instant = asLong(date, mappedField); document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant)); if (aggregableDateIsSearchable) { document.add(new LongPoint(AGGREGABLE_DATE, instant)); @@ -1155,7 +1156,7 @@ private void testSearchCase( configure.accept(aggregationBuilder); } - InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, fieldType); + InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, mappedField); verify.accept(histogram); } } @@ -1165,7 +1166,7 @@ private static long asLong(String dateTime) { return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } - private static long asLong(String dateTime, DateFieldMapper.DateFieldType fieldType) { - return fieldType.parse(dateTime); + private static long asLong(String dateTime, MappedField mappedField) { + return ((DateFieldMapper.DateFieldType) mappedField.type()).parse(dateTime); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java index aa0803ed378dd..4b56fab1d4e51 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.lucene.queries.BinaryDocValuesRangeQuery; @@ -124,11 +124,11 @@ public void testUnsupportedRangeType() throws Exception { DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("my_agg").field(fieldName) .calendarInterval(DateHistogramInterval.MONTH); - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); + MappedField mappedField = new MappedField(fieldName, new RangeFieldMapper.RangeFieldType(rangeType)); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - expectThrows(IllegalArgumentException.class, () -> createAggregator(aggBuilder, searcher, fieldType)); + expectThrows(IllegalArgumentException.class, () -> createAggregator(aggBuilder, searcher, mappedField)); } } } @@ -1062,12 +1062,15 @@ private void testCase( CheckedConsumer buildIndex, Consumer verify ) throws IOException { - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(FIELD_NAME, RangeFieldMapper.Defaults.DATE_FORMATTER); + MappedField mappedField = new MappedField( + FIELD_NAME, + new RangeFieldMapper.RangeFieldType(RangeFieldMapper.Defaults.DATE_FORMATTER) + ); final DateHistogramAggregationBuilder aggregationBuilder = new DateHistogramAggregationBuilder("_name").field(FIELD_NAME); if (configure != null) { configure.accept(aggregationBuilder); } - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } private void testCase( @@ -1075,7 +1078,7 @@ private void testCase( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType fieldType + MappedField mappedField ) throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); @@ -1084,7 +1087,7 @@ private void testCase( try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); + InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, mappedField); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java index 15319674c2644..75365ab3cdbbe 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -117,12 +117,12 @@ public void testDates() throws Exception { ); String fieldName = "date_field"; - DateFieldMapper.DateFieldType fieldType = dateField(fieldName, DateFieldMapper.Resolution.MILLISECONDS); + MappedField mappedField = dateField(fieldName, DateFieldMapper.Resolution.MILLISECONDS); try (Directory dir = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), dir)) { Document document = new Document(); for (String date : dataset) { - long instant = fieldType.parse(date); + long instant = ((DateFieldMapper.DateFieldType) mappedField.type()).parse(date); document.add(new SortedNumericDocValuesField(fieldName, instant)); indexWriter.addDocument(document); document.clear(); @@ -132,7 +132,7 @@ public void testDates() throws Exception { .interval(1000 * 60 * 60 * 24); try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -360,7 +360,7 @@ public void testExtendedBounds() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field") .interval(5) .extendedBounds(-12, 13); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); @@ -393,10 +393,10 @@ public void testHardBounds() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field") .interval(5) .hardBounds(new DoubleBounds(0.0, 10.0)); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertEquals(1, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java index f063eab612104..4258484c0e686 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -615,16 +616,21 @@ private void testSearchCase( final MappedFieldType fieldType; if (dataset.size() == 0 || dataset.get(0) instanceof Double) { - fieldType = new NumberFieldMapper.NumberFieldType(aggregationBuilder.field(), NumberFieldMapper.NumberType.DOUBLE); + fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); } else if (dataset.get(0) instanceof Long) { - fieldType = new NumberFieldMapper.NumberFieldType(aggregationBuilder.field(), NumberFieldMapper.NumberType.LONG); + fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); } else if (dataset.get(0) instanceof Integer) { - fieldType = new NumberFieldMapper.NumberFieldType(aggregationBuilder.field(), NumberFieldMapper.NumberType.INTEGER); + fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); } else { throw new IOException("Test data has an invalid type"); } - final InternalVariableWidthHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); + final InternalVariableWidthHistogram histogram = searchAndReduce( + indexSearcher, + query, + aggregationBuilder, + new MappedField(aggregationBuilder.field(), fieldType) + ); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java index 9255d17d87dce..8fed12fd0c30c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -20,7 +20,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.index.mapper.RangeFieldMapper; @@ -66,52 +66,52 @@ public class MissingAggregatorTests extends AggregatorTestCase { public void testMatchNoDocs() throws IOException { final int numDocs = randomIntBetween(10, 200); - final MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); + final MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(fieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(mappedField.name()); testCase(newMatchAllQuery(), builder, writer -> { for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(fieldType.name(), randomLong()))); + writer.addDocument(singleton(new SortedNumericDocValuesField(mappedField.name(), randomLong()))); } }, internalMissing -> { assertEquals(0, internalMissing.getDocCount()); assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); - }, singleton(fieldType)); + }, singleton(mappedField)); } public void testMatchAllDocs() throws IOException { int numDocs = randomIntBetween(10, 200); - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + final MappedField anotherField = new MappedField("another_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggField.name()); testCase(newMatchAllQuery(), builder, writer -> { for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + writer.addDocument(singleton(new SortedNumericDocValuesField(anotherField.name(), randomLong()))); } }, internalMissing -> { assertEquals(numDocs, internalMissing.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, List.of(aggFieldType, anotherFieldType)); + }, List.of(aggField, anotherField)); } public void testMatchSparse() throws IOException { - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + final MappedField anotherField = new MappedField("another_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggField.name()); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; final List> docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { - docs.add(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); + docs.add(singleton(new SortedNumericDocValuesField(aggField.name(), randomLong()))); } else { - docs.add(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + docs.add(singleton(new SortedNumericDocValuesField(anotherField.name(), randomLong()))); docsMissingAggField++; } } @@ -120,19 +120,19 @@ public void testMatchSparse() throws IOException { testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, List.of(aggFieldType, anotherFieldType)); + }, List.of(aggField, anotherField)); } public void testMatchSparseRangeField() throws IOException { final RangeType rangeType = RangeType.DOUBLE; - final MappedFieldType aggFieldType = new RangeFieldMapper.RangeFieldType("agg_field", rangeType); - final MappedFieldType anotherFieldType = new RangeFieldMapper.RangeFieldType("another_field", rangeType); + final MappedField aggField = new MappedField("agg_field", new RangeFieldMapper.RangeFieldType(rangeType)); + final MappedField anotherField = new MappedField("another_field", new RangeFieldMapper.RangeFieldType(rangeType)); final RangeFieldMapper.Range range = new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true); final BytesRef encodedRange = rangeType.encodeRanges(singleton(range)); - final BinaryDocValuesField encodedRangeField = new BinaryDocValuesField(aggFieldType.name(), encodedRange); + final BinaryDocValuesField encodedRangeField = new BinaryDocValuesField(aggField.name(), encodedRange); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggField.name()); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; @@ -141,7 +141,7 @@ public void testMatchSparseRangeField() throws IOException { if (randomBoolean()) { docs.add(singleton(encodedRangeField)); } else { - docs.add(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + docs.add(singleton(new SortedNumericDocValuesField(anotherField.name(), randomLong()))); docsMissingAggField++; } } @@ -150,64 +150,64 @@ public void testMatchSparseRangeField() throws IOException { testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, List.of(aggFieldType, anotherFieldType)); + }, List.of(aggField, anotherField)); } public void testUnmappedWithoutMissingParam() throws IOException { final int numDocs = randomIntBetween(10, 20); - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field("unknown_field"); testCase(newMatchAllQuery(), builder, writer -> { for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); + writer.addDocument(singleton(new SortedNumericDocValuesField(aggField.name(), randomLong()))); } }, internalMissing -> { assertEquals(numDocs, internalMissing.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, singleton(aggFieldType)); + }, singleton(aggField)); } public void testUnmappedWithMissingParam() throws IOException { final int numDocs = randomIntBetween(10, 20); - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field("unknown_field").missing(randomLong()); testCase(newMatchAllQuery(), builder, writer -> { for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); + writer.addDocument(singleton(new SortedNumericDocValuesField(aggField.name(), randomLong()))); } }, internalMissing -> { assertEquals(0, internalMissing.getDocCount()); assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); - }, singleton(aggFieldType)); + }, singleton(aggField)); } public void testMissingParam() throws IOException { final int numDocs = randomIntBetween(10, 20); - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + final MappedField anotherField = new MappedField("another_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()).missing(randomLong()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggField.name()).missing(randomLong()); testCase(newMatchAllQuery(), builder, writer -> { for (int i = 0; i < numDocs; i++) { - writer.addDocument(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + writer.addDocument(singleton(new SortedNumericDocValuesField(anotherField.name(), randomLong()))); } }, internalMissing -> { assertEquals(0, internalMissing.getDocCount()); assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); - }, List.of(aggFieldType, anotherFieldType)); + }, List.of(aggField, anotherField)); } public void testMultiValuedField() throws IOException { - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + final MappedField anotherField = new MappedField("another_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggField.name()); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; @@ -217,12 +217,12 @@ public void testMultiValuedField() throws IOException { final long randomLong = randomLong(); docs.add( Set.of( - new SortedNumericDocValuesField(aggFieldType.name(), randomLong), - new SortedNumericDocValuesField(aggFieldType.name(), randomLong + 1) + new SortedNumericDocValuesField(aggField.name(), randomLong), + new SortedNumericDocValuesField(aggField.name(), randomLong + 1) ) ); } else { - docs.add(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + docs.add(singleton(new SortedNumericDocValuesField(anotherField.name(), randomLong()))); docsMissingAggField++; } } @@ -231,7 +231,7 @@ public void testMultiValuedField() throws IOException { testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { assertEquals(finalDocsMissingAggField, internalMissing.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, List.of(aggFieldType, anotherFieldType)); + }, List.of(aggField, anotherField)); } public void testSingleValuedFieldWithValueScript() throws IOException { @@ -243,19 +243,19 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws IOException } private void valueScriptTestCase(Script script) throws IOException { - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); - final MappedFieldType anotherFieldType = new NumberFieldMapper.NumberFieldType("another_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + final MappedField anotherField = new MappedField("another_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); - final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggFieldType.name()).script(script); + final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").field(aggField.name()).script(script); final int numDocs = randomIntBetween(100, 200); int docsMissingAggField = 0; final List> docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { - docs.add(singleton(new SortedNumericDocValuesField(aggFieldType.name(), randomLong()))); + docs.add(singleton(new SortedNumericDocValuesField(aggField.name(), randomLong()))); } else { - docs.add(singleton(new SortedNumericDocValuesField(anotherFieldType.name(), randomLong()))); + docs.add(singleton(new SortedNumericDocValuesField(anotherField.name(), randomLong()))); docsMissingAggField++; } } @@ -264,7 +264,7 @@ private void valueScriptTestCase(Script script) throws IOException { testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { assertEquals(finalDocsMissingField, internalMissing.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, List.of(aggFieldType, anotherFieldType)); + }, List.of(aggField, anotherField)); } public void testMultiValuedFieldWithFieldScriptWithParams() throws IOException { @@ -281,7 +281,7 @@ public void testMultiValuedFieldWithFieldScript() throws IOException { } private void fieldScriptTestCase(Script script, long threshold) throws IOException { - final MappedFieldType aggFieldType = new NumberFieldMapper.NumberFieldType("agg_field", NumberType.LONG); + final MappedField aggField = new MappedField("agg_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final MissingAggregationBuilder builder = new MissingAggregationBuilder("_name").script(script); @@ -296,8 +296,8 @@ private void fieldScriptTestCase(Script script, long threshold) throws IOExcepti } docs.add( Set.of( - new SortedNumericDocValuesField(aggFieldType.name(), firstValue), - new SortedNumericDocValuesField(aggFieldType.name(), secondValue) + new SortedNumericDocValuesField(aggField.name(), firstValue), + new SortedNumericDocValuesField(aggField.name(), secondValue) ) ); } @@ -306,7 +306,7 @@ private void fieldScriptTestCase(Script script, long threshold) throws IOExcepti testCase(newMatchAllQuery(), builder, writer -> writer.addDocuments(docs), internalMissing -> { assertEquals(finalDocsBelowThreshold, internalMissing.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); - }, singleton(aggFieldType)); + }, singleton(aggField)); } private void testCase( @@ -314,7 +314,7 @@ private void testCase( MissingAggregationBuilder builder, CheckedConsumer writeIndex, Consumer verify, - Collection fieldTypes + Collection mappedFields ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -323,15 +323,15 @@ private void testCase( try (IndexReader indexReader = DirectoryReader.open(directory)) { final IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - final MappedFieldType[] fieldTypesArray = fieldTypes.toArray(new MappedFieldType[0]); - final InternalMissing missing = searchAndReduce(indexSearcher, query, builder, fieldTypesArray); + final MappedField[] mappedFieldArray = mappedFields.toArray(new MappedField[0]); + final InternalMissing missing = searchAndReduce(indexSearcher, query, builder, mappedFieldArray); verify.accept(missing); } } } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new MissingAggregationBuilder("_name").field(fieldName); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index af27f43f87ed5..6c0ab672a8473 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.mapper.NestedPathFieldMapper; @@ -146,13 +146,16 @@ public void testNoDocs() throws IOException { NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(maxAgg); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalNested nested = searchAndReduce( newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, - fieldType + mappedField ); assertEquals(NESTED_AGG, nested.getName()); @@ -196,13 +199,16 @@ public void testSingleNestingMax() throws IOException { NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(maxAgg); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalNested nested = searchAndReduce( newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, - fieldType + mappedField ); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -253,13 +259,16 @@ public void testDoubleNestingMax() throws IOException { MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(maxAgg); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalNested nested = searchAndReduce( newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, - fieldType + mappedField ); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -311,13 +320,16 @@ public void testOrphanedDocs() throws IOException { NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, NESTED_OBJECT); SumAggregationBuilder sumAgg = new SumAggregationBuilder(SUM_AGG_NAME).field(VALUE_FIELD_NAME); nestedBuilder.subAggregation(sumAgg); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalNested nested = searchAndReduce( newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, - fieldType + mappedField ); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -392,7 +404,10 @@ public void testResetRootDocId() throws Exception { try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG, "nested_field"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); @@ -402,7 +417,7 @@ public void testResetRootDocId() throws Exception { newSearcher(indexReader, false, true), new ConstantScoreQuery(bq.build()), nestedBuilder, - fieldType + mappedField ); assertEquals(NESTED_AGG, nested.getName()); @@ -429,8 +444,8 @@ public void testNestedOrdering() throws IOException { iw.addDocuments(generateBook("9", new String[] { "g", "c", "e" }, new int[] { 18, 8 })); } try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("num_pages", NumberFieldMapper.NumberType.LONG); - MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("author"); + MappedField field1 = new MappedField("num_pages", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField field2 = new MappedField("author", new KeywordFieldMapper.KeywordFieldType()); TermsAggregationBuilder termsBuilder = new TermsAggregationBuilder("authors").userValueTypeHint(ValueType.STRING) .field("author") @@ -440,13 +455,7 @@ public void testNestedOrdering() throws IOException { nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - Terms terms = searchAndReduce( - newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), - termsBuilder, - fieldType1, - fieldType2 - ); + Terms terms = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, field1, field2); assertEquals(7, terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -495,13 +504,7 @@ public void testNestedOrdering() throws IOException { nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - terms = searchAndReduce( - newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), - termsBuilder, - fieldType1, - fieldType2 - ); + terms = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, field1, field2); assertEquals(7, terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -575,8 +578,8 @@ public void testNestedOrdering_random() throws IOException { } }); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("num_pages", NumberFieldMapper.NumberType.LONG); - MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("author"); + MappedField field1 = new MappedField("num_pages", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField field2 = new MappedField("author", new KeywordFieldMapper.KeywordFieldType()); TermsAggregationBuilder termsBuilder = new TermsAggregationBuilder("authors").userValueTypeHint(ValueType.STRING) .size(books.size()) @@ -587,13 +590,7 @@ public void testNestedOrdering_random() throws IOException { nestedBuilder.subAggregation(minAgg); termsBuilder.subAggregation(nestedBuilder); - Terms terms = searchAndReduce( - newSearcher(indexReader, false, true), - new MatchAllDocsQuery(), - termsBuilder, - fieldType1, - fieldType2 - ); + Terms terms = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, field1, field2); assertEquals(books.size(), terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -685,15 +682,15 @@ public void testPreGetChildLeafCollectors() throws IOException { FilterAggregationBuilder filterAggregationBuilder = new FilterAggregationBuilder("filterAgg", new MatchAllQueryBuilder()); filterAggregationBuilder.subAggregation(nestedBuilder); - MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("key"); - MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("value"); + MappedField field1 = new MappedField("key", new KeywordFieldMapper.KeywordFieldType()); + MappedField field2 = new MappedField("value", new KeywordFieldMapper.KeywordFieldType()); Filter filter = searchAndReduce( newSearcher(indexReader, false, true), Queries.newNonNestedFilter(), filterAggregationBuilder, - fieldType1, - fieldType2 + field1, + field2 ); assertEquals("filterAgg", filter.getName()); @@ -727,7 +724,10 @@ public void testFieldAlias() throws IOException { int numRootDocs = randomIntBetween(1, 20); int expectedNestedDocs = 0; - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); try (Directory directory = newDirectory()) { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { @@ -755,8 +755,8 @@ public void testFieldAlias() throws IOException { max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias") ); - InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, fieldType); - Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, mappedField); + Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, mappedField); assertEquals(nested, aliasNested); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -804,13 +804,16 @@ public void testNestedWithPipeline() throws IOException { ) ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalNested nested = searchAndReduce( newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, - fieldType + mappedField ); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -888,10 +891,16 @@ public static CheckedConsumer buildResellerData( }; } - public static MappedFieldType[] resellersMappedFields() { - MappedFieldType productIdField = new NumberFieldMapper.NumberFieldType("product_id", NumberFieldMapper.NumberType.LONG); - MappedFieldType resellerIdField = new NumberFieldMapper.NumberFieldType("reseller_id", NumberFieldMapper.NumberType.LONG); - return new MappedFieldType[] { productIdField, resellerIdField }; + public static MappedField[] resellersMappedFields() { + MappedField productIdField = new MappedField( + "product_id", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + MappedField resellerIdField = new MappedField( + "reseller_id", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + return new MappedField[] { productIdField, resellerIdField }; } private double generateMaxDocs(List> documents, int numNestedDocs, int id, String path, String fieldName) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 6681aaa3b6f00..1595ef57a3db0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -19,7 +19,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; @@ -72,9 +72,12 @@ public void testNoDocs() throws IOException { nestedBuilder.subAggregation(reverseNestedBuilder); MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); reverseNestedBuilder.subAggregation(maxAgg); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); - Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, mappedField); ReverseNested reverseNested = (ReverseNested) ((InternalAggregation) nested).getProperty(REVERSE_AGG_NAME); assertEquals(REVERSE_AGG_NAME, reverseNested.getName()); assertEquals(0, reverseNested.getDocCount()); @@ -132,9 +135,12 @@ public void testMaxFromParentDocs() throws IOException { nestedBuilder.subAggregation(reverseNestedBuilder); MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME).field(VALUE_FIELD_NAME); reverseNestedBuilder.subAggregation(maxAgg); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); - Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, mappedField); assertEquals(expectedNestedDocs, nested.getDocCount()); ReverseNested reverseNested = (ReverseNested) ((InternalAggregation) nested).getProperty(REVERSE_AGG_NAME); @@ -152,7 +158,10 @@ public void testFieldAlias() throws IOException { int numParentDocs = randomIntBetween(1, 20); int expectedParentDocs = 0; - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + VALUE_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); try (Directory directory = newDirectory()) { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { @@ -201,8 +210,8 @@ public void testFieldAlias() throws IOException { reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg) ); - Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, fieldType); - Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, mappedField); + Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, mappedField); ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME); ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java index d7acbf3d51c98..56eb2354a325b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -138,7 +138,7 @@ public void testEmptyDocument() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = Collections.emptyList(); // WHEN @@ -165,7 +165,7 @@ public void testEmptyDocument() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); - }, fieldType); + }, mappedField); } public void testIpv4Addresses() throws IOException { @@ -178,7 +178,7 @@ public void testIpv4Addresses() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), @@ -221,7 +221,7 @@ public void testIpv4Addresses() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(1L, 1L, 4L, 1L) ); - }, fieldType); + }, mappedField); } public void testIpv6Addresses() throws IOException { @@ -234,7 +234,7 @@ public void testIpv6Addresses() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("2001:db8:a4f8:112a:6001:0:12:7f2a", "2001:db8:a4f8:112a::", prefixLength, defaultTime()), new TestIpDataHolder("2001:db8:a4f8:112a:7044:1f01:0:44f2", "2001:db8:a4f8:112a::", prefixLength, defaultTime()), @@ -274,7 +274,7 @@ public void testIpv6Addresses() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(2L, 1L, 2L) ); - }, fieldType); + }, mappedField); } public void testZeroPrefixLength() throws IOException { @@ -287,7 +287,7 @@ public void testZeroPrefixLength() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("192.168.1.12", "0.0.0.0", prefixLength, defaultTime()), new TestIpDataHolder("192.168.1.12", "0.0.0.0", prefixLength, defaultTime()), @@ -330,7 +330,7 @@ public void testZeroPrefixLength() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of((long) ipAddresses.size()) ); - }, fieldType); + }, mappedField); } public void testIpv4MaxPrefixLength() throws IOException { @@ -343,7 +343,7 @@ public void testIpv4MaxPrefixLength() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("192.168.1.12", "192.168.1.12", prefixLength, defaultTime()), new TestIpDataHolder("192.168.1.12", "192.168.1.12", prefixLength, defaultTime()), @@ -386,7 +386,7 @@ public void testIpv4MaxPrefixLength() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(1L, 1L, 1L, 2L, 1L, 1L) ); - }, fieldType); + }, mappedField); } public void testIpv6MaxPrefixLength() throws IOException { @@ -399,7 +399,7 @@ public void testIpv6MaxPrefixLength() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("2001:db8:a4f8:112a:6001:0:12:7f2a", "2001:db8:a4f8:112a:6001:0:12:7f2a", prefixLength, defaultTime()), new TestIpDataHolder("2001:db8:a4f8:112a:7044:1f01:0:44f2", "2001:db8:a4f8:112a:7044:1f01:0:44f2", prefixLength, defaultTime()), @@ -439,7 +439,7 @@ public void testIpv6MaxPrefixLength() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(1L, 1L, 1L, 1L, 1L) ); - }, fieldType); + }, mappedField); } public void testAggregateOnIpv4Field() throws IOException { @@ -453,7 +453,8 @@ public void testAggregateOnIpv4Field() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType[] fieldTypes = { new IpFieldMapper.IpFieldType(ipv4FieldName), new IpFieldMapper.IpFieldType(ipv6FieldName) }; + final MappedField mappedIpv4Field = new MappedField(ipv4FieldName, new IpFieldMapper.IpFieldType()); + final MappedField mappedIpv6Field = new MappedField(ipv6FieldName, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), @@ -500,7 +501,7 @@ public void testAggregateOnIpv4Field() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(1L, 1L, 4L, 1L) ); - }, fieldTypes); + }, mappedIpv4Field, mappedIpv6Field); } public void testAggregateOnIpv6Field() throws IOException { @@ -514,7 +515,8 @@ public void testAggregateOnIpv6Field() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType[] fieldTypes = { new IpFieldMapper.IpFieldType(ipv4FieldName), new IpFieldMapper.IpFieldType(ipv6FieldName) }; + final MappedField mappedIpv4Field = new MappedField(ipv4FieldName, new IpFieldMapper.IpFieldType()); + final MappedField mappedIpv6Field = new MappedField(ipv6FieldName, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("2001:db8:a4f8:112a:6001:0:12:7f2a", "2001:db8:a4f8:112a::", prefixLength, defaultTime()), new TestIpDataHolder("2001:db8:a4f8:112a:7044:1f01:0:44f2", "2001:db8:a4f8:112a::", prefixLength, defaultTime()), @@ -558,7 +560,7 @@ public void testAggregateOnIpv6Field() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(2L, 1L, 2L) ); - }, fieldTypes); + }, mappedIpv4Field, mappedIpv6Field); } public void testIpv4AggregationAsSubAggregation() throws IOException { @@ -580,9 +582,11 @@ public void testIpv4AggregationAsSubAggregation() throws IOException { .minDocCount(1) .prefixLength(prefixLength) ); - final DateFieldMapper.DateFieldType dateFieldType = new DateFieldMapper.DateFieldType(datetimeFieldName); - final IpFieldMapper.IpFieldType ipFieldType = new IpFieldMapper.IpFieldType(ipv4FieldName); - final MappedFieldType[] fieldTypes = { ipFieldType, dateFieldType }; + final DateFieldMapper.DateFieldType dateFieldType = new DateFieldMapper.DateFieldType(); + final IpFieldMapper.IpFieldType ipFieldType = new IpFieldMapper.IpFieldType(); + final MappedField[] mappedFields = { + new MappedField(datetimeFieldName, dateFieldType), + new MappedField(ipv4FieldName, ipFieldType) }; long day1 = dateFieldType.parse("2021-10-12"); long day2 = dateFieldType.parse("2021-10-11"); @@ -648,7 +652,7 @@ public void testIpv4AggregationAsSubAggregation() throws IOException { assertTrue(bucket2Subnets.containsAll(expectedBucket2Subnets)); assertTrue(expectedBucket1Subnets.containsAll(bucket1Subnets)); assertTrue(expectedBucket2Subnets.containsAll(bucket2Subnets)); - }, fieldTypes); + }, mappedFields); } public void testIpv6AggregationAsSubAggregation() throws IOException { @@ -670,9 +674,11 @@ public void testIpv6AggregationAsSubAggregation() throws IOException { .minDocCount(1) .prefixLength(prefixLength) ); - final DateFieldMapper.DateFieldType dateFieldType = new DateFieldMapper.DateFieldType(datetimeFieldName); - final IpFieldMapper.IpFieldType ipFieldType = new IpFieldMapper.IpFieldType(ipv4FieldName); - final MappedFieldType[] fieldTypes = { ipFieldType, dateFieldType }; + final DateFieldMapper.DateFieldType dateFieldType = new DateFieldMapper.DateFieldType(); + final IpFieldMapper.IpFieldType ipFieldType = new IpFieldMapper.IpFieldType(); + final MappedField[] mappedFields = { + new MappedField(datetimeFieldName, dateFieldType), + new MappedField(ipv4FieldName, ipFieldType) }; long day1 = dateFieldType.parse("2021-11-04"); long day2 = dateFieldType.parse("2021-11-05"); @@ -735,7 +741,7 @@ public void testIpv6AggregationAsSubAggregation() throws IOException { assertTrue(bucket2Subnets.containsAll(expectedBucket2Subnets)); assertTrue(expectedBucket1Subnets.containsAll(bucket1Subnets)); assertTrue(expectedBucket2Subnets.containsAll(bucket2Subnets)); - }, fieldTypes); + }, mappedFields); } public void testIpPrefixSubAggregations() throws IOException { @@ -759,8 +765,7 @@ public void testIpPrefixSubAggregations() throws IOException { .minDocCount(1) .prefixLength(subPrefixLength) ); - final IpFieldMapper.IpFieldType ipFieldType = new IpFieldMapper.IpFieldType(ipv4FieldName); - final MappedFieldType[] fieldTypes = { ipFieldType }; + final MappedField mappedField = new MappedField(ipv4FieldName, new IpFieldMapper.IpFieldType()); final String FIRST_SUBNET = "192.168.0.0"; final String SECOND_SUBNET = "192.169.0.0"; @@ -819,7 +824,7 @@ public void testIpPrefixSubAggregations() throws IOException { assertTrue(secondSubnetNestedSubnets.containsAll(expectedSecondSubnetNestedSUbnets)); assertTrue(expectedSecondSubnetNestedSUbnets.containsAll(secondSubnetNestedSubnets)); - }, fieldTypes); + }, mappedField); } public void testIpv4AppendPrefixLength() throws IOException { @@ -832,7 +837,7 @@ public void testIpv4AppendPrefixLength() throws IOException { .appendPrefixLength(true) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), @@ -873,7 +878,7 @@ public void testIpv4AppendPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); - }, fieldType); + }, mappedField); } public void testIpv6AppendPrefixLength() throws IOException { @@ -886,7 +891,7 @@ public void testIpv6AppendPrefixLength() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("2001:db8:a4f8:112a:6001:0:12:7f2a", "2001:db8:a4f8:112a::", prefixLength, defaultTime()), new TestIpDataHolder("2001:db8:a4f8:112a:7044:1f01:0:44f2", "2001:db8:a4f8:112a::", prefixLength, defaultTime()), @@ -924,7 +929,7 @@ public void testIpv6AppendPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); - }, fieldType); + }, mappedField); } public void testMinDocCount() throws IOException { @@ -938,7 +943,7 @@ public void testMinDocCount() throws IOException { .appendPrefixLength(false) .minDocCount(minDocCount) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), @@ -982,7 +987,7 @@ public void testMinDocCount() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(4L) ); - }, fieldType); + }, mappedField); } public void testAggregationWithQueryFilter() throws IOException { @@ -995,7 +1000,7 @@ public void testAggregationWithQueryFilter() throws IOException { .appendPrefixLength(false) .minDocCount(1) .prefixLength(prefixLength); - final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); + final MappedField mappedField = new MappedField(field, new IpFieldMapper.IpFieldType()); final List ipAddresses = List.of( new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), new TestIpDataHolder("192.168.1.12", "192.168.0.0", prefixLength, defaultTime()), @@ -1047,7 +1052,7 @@ public void testAggregationWithQueryFilter() throws IOException { ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), List.of(4L) ); - }, fieldType); + }, mappedField); } public void testMetricAggregation() throws IOException { @@ -1064,9 +1069,9 @@ public void testMetricAggregation() throws IOException { .minDocCount(1) .prefixLength(prefixLength) .subAggregation(new SumAggregationBuilder(subAggregationName).field(timeField)); - final MappedFieldType[] fieldTypes = { - new IpFieldMapper.IpFieldType(ipField), - new NumberFieldMapper.NumberFieldType(timeField, NumberFieldMapper.NumberType.LONG) }; + final MappedField[] mappedFields = { + new MappedField(ipField, new IpFieldMapper.IpFieldType()), + new MappedField(timeField, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)) }; final List ipAddresses = List.of( new TestIpDataHolder("2001:db8:a4f8:112a:6001:0:12:7f2a", "2001:db8:a4f8:112a::", prefixLength, 100), new TestIpDataHolder("2001:db8:a4f8:112a:7044:1f01:0:44f2", "2001:db8:a4f8:112a::", prefixLength, 110), @@ -1109,7 +1114,7 @@ public void testMetricAggregation() throws IOException { assertEquals(210, ((Sum) ipPrefix.getBuckets().get(0).getAggregations().get(subAggregationName)).value(), 0); assertEquals(200, ((Sum) ipPrefix.getBuckets().get(1).getAggregations().get(subAggregationName)).value(), 0); assertEquals(300, ((Sum) ipPrefix.getBuckets().get(2).getAggregations().get(subAggregationName)).value(), 0); - }, fieldTypes); + }, mappedFields); } private Function appendPrefixLength(int prefixLength) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java index bd84603d8c1b5..3cd4d88f5b7d5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper.Resolution; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -62,7 +62,7 @@ public void testBooleanFieldDeprecated() throws IOException { Document d = new Document(); d.add(new SortedNumericDocValuesField(fieldName, 0)); iw.addDocument(d); - }, a -> {}, new BooleanFieldMapper.BooleanFieldType(fieldName)); + }, a -> {}, new MappedField(fieldName, new BooleanFieldMapper.BooleanFieldType())); assertWarnings("Running Range or DateRange aggregations on [boolean] fields is deprecated"); } @@ -127,7 +127,7 @@ public void testMatchesNumericDocValues() throws IOException { } public void testMissingDateStringWithDateField() throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD_NAME); + MappedField mappedField = new MappedField(DATE_FIELD_NAME, new DateFieldMapper.DateFieldType()); DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(DATE_FIELD_NAME) .missing("2015-11-13T16:14:34") @@ -143,7 +143,7 @@ public void testMissingDateStringWithDateField() throws IOException { assertEquals(1, ranges.size()); assertEquals(2, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testUnboundedRanges() throws IOException { @@ -173,16 +173,18 @@ public void testUnboundedRanges() throws IOException { assertThat(ranges.get(1).getDocCount(), equalTo(2L)); assertTrue(AggregationInspectionHelper.hasValue(range)); }, - new DateFieldMapper.DateFieldType( + new MappedField( DATE_FIELD_NAME, - randomBoolean(), - randomBoolean(), - true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + randomBoolean(), + randomBoolean(), + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ) ); } @@ -191,19 +193,25 @@ public void testNumberFieldDateRanges() throws IOException { DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(NUMBER_FIELD_NAME) .addRange("2015-11-13", "2015-11-14"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } public void testNumberFieldNumberRanges() throws IOException { DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field(NUMBER_FIELD_NAME) .addRange(0, 5); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(List.of(new NumericDocValuesField(NUMBER_FIELD_NAME, 7), new IntPoint(NUMBER_FIELD_NAME, 7))); @@ -213,7 +221,7 @@ public void testNumberFieldNumberRanges() throws IOException { assertEquals(1, ranges.size()); assertEquals(1, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testMissingDateStringWithNumberField() throws IOException { @@ -221,12 +229,15 @@ public void testMissingDateStringWithNumberField() throws IOException { .addRange("2015-11-13", "2015-11-14") .missing("1979-01-01T00:00:00"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } public void testUnmappedWithMissingNumber() throws IOException { @@ -234,7 +245,10 @@ public void testUnmappedWithMissingNumber() throws IOException { .addRange("2015-11-13", "2015-11-14") .missing(1447438575000L); // 2015-11-13 6:16:15 - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); @@ -244,7 +258,7 @@ public void testUnmappedWithMissingNumber() throws IOException { assertEquals(1, ranges.size()); assertEquals(2, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testUnmappedWithMissingDate() throws IOException { @@ -252,7 +266,10 @@ public void testUnmappedWithMissingDate() throws IOException { .addRange("2015-11-13", "2015-11-14") .missing("2015-11-13T10:11:12"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); @@ -262,14 +279,14 @@ public void testUnmappedWithMissingDate() throws IOException { assertEquals(1, ranges.size()); assertEquals(2, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testKeywordField() { DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("date_range").field("not_a_number") .addRange("2015-11-13", "2015-11-14"); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); + MappedField mappedField = new MappedField("not_a_number", new KeywordFieldMapper.KeywordFieldType()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -278,7 +295,7 @@ public void testKeywordField() { new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, range -> fail("Should have thrown exception"), - fieldType + mappedField ) ); assertEquals("Field [not_a_number] of type [keyword] is not supported for aggregation [date_range]", e.getMessage()); @@ -289,12 +306,15 @@ public void testBadMissingField() { .addRange("2020-01-01T00:00:00", "2020-01-02T00:00:00") .missing("bogus"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } public void testUnmappedWithBadMissingField() { @@ -302,12 +322,15 @@ public void testUnmappedWithBadMissingField() { .addRange("2020-01-01T00:00:00", "2020-01-02T00:00:00") .missing("bogus"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(ElasticsearchParseException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } private void testBothResolutions( @@ -335,22 +358,24 @@ private void testCase( Consumer>> verify, DateFieldMapper.Resolution resolution ) throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType( + MappedField mappedField = new MappedField( DATE_FIELD_NAME, - true, - false, - true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - resolution, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + resolution, + null, + null, + Collections.emptyMap() + ) ); DateRangeAggregationBuilder aggregationBuilder = new DateRangeAggregationBuilder("test_range_agg"); aggregationBuilder.field(DATE_FIELD_NAME); aggregationBuilder.addRange("2015-01-01", "2015-12-31"); aggregationBuilder.addRange("2019-01-01", "2019-12-31"); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } private void testCase( @@ -358,7 +383,7 @@ private void testCase( Query query, CheckedConsumer buildIndex, Consumer>> verify, - MappedFieldType fieldType + MappedField mappedField ) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -372,7 +397,7 @@ private void testCase( indexSearcher, query, aggregationBuilder, - fieldType + mappedField ); verify.accept(agg); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java index 02c528e3a9c4d..3e5c093ab86f8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IpFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -97,10 +97,10 @@ public void testRanges() throws Exception { } w.addDocument(doc); } - MappedFieldType fieldType = new IpFieldMapper.IpFieldType("field"); + MappedField mappedField = new MappedField("field", new IpFieldMapper.IpFieldType()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalBinaryRange range = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldType); + InternalBinaryRange range = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, mappedField); assertEquals(numRanges, range.getBuckets().size()); for (int i = 0; i < range.getBuckets().size(); i++) { Tuple expected = requestedRanges[i]; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java index d131311ff1a25..863e4f19b1ffa 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregatorTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LongScriptFieldType; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.LongFieldScript; @@ -107,7 +107,7 @@ public void testMatchesNumericDocValues() throws IOException { */ public void testDoubleRangesExclusiveEndpoint() throws IOException { final String fieldName = "double"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.DOUBLE); + MappedField field = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); testCase( new RangeAggregationBuilder("range").field(fieldName).addRange("r1", 0, 0.04D).addRange("r2", 0.04D, 1.0D), new MatchAllDocsQuery(), @@ -125,7 +125,7 @@ public void testDoubleRangesExclusiveEndpoint() throws IOException { public void testMinAndMaxLongRangeBounds() throws IOException { final String fieldName = "long_field"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.LONG); + MappedField field = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberType.LONG)); double from = Long.valueOf(Long.MIN_VALUE).doubleValue(); double to = Long.valueOf(Long.MAX_VALUE).doubleValue(); testCase( @@ -145,7 +145,7 @@ public void testMinAndMaxLongRangeBounds() throws IOException { public void testFloatRangeFromAndToValues() throws IOException { final String fieldName = "test"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.FLOAT); + MappedField field = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberType.FLOAT)); testCase( new RangeAggregationBuilder("0").field(fieldName).addRange(5, 6).addRange(6, 10.6).keyed(true), new MatchAllDocsQuery(), @@ -177,7 +177,7 @@ public void testFloatRangeFromAndToValues() throws IOException { public void testDoubleRangeFromAndToValues() throws IOException { final String fieldName = "test"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.DOUBLE); + MappedField field = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); testCase( new RangeAggregationBuilder("0").field(fieldName).addRange(5, 6).addRange(6, 10.6).keyed(true), new MatchAllDocsQuery(), @@ -209,7 +209,7 @@ public void testDoubleRangeFromAndToValues() throws IOException { public void testDoubleRangeWithLongField() throws IOException { final String fieldName = "long_field"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.LONG); + MappedField field = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberType.LONG)); testCase( new RangeAggregationBuilder("0").field(fieldName).addRange(990.0, 999.9).addUnboundedFrom(999.9), new MatchAllDocsQuery(), @@ -232,7 +232,7 @@ public void testDoubleRangeWithLongField() throws IOException { public void testDoubleRangeWithIntegerField() throws IOException { final String fieldName = "integer_field"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.INTEGER); + MappedField mappedField = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase( new RangeAggregationBuilder("0").field(fieldName).addRange(990.0, 999.9).addUnboundedFrom(999.9), new MatchAllDocsQuery(), @@ -249,7 +249,7 @@ public void testDoubleRangeWithIntegerField() throws IOException { assertEquals(2, ranges.get(0).getDocCount()); assertEquals(2, ranges.get(1).getDocCount()); }, - field + mappedField ); } @@ -258,7 +258,7 @@ public void testDoubleRangeWithIntegerField() throws IOException { */ public void testFloatRangesExclusiveEndpoint() throws IOException { final String fieldName = "float"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.FLOAT); + MappedField field = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberType.FLOAT)); testCase( new RangeAggregationBuilder("range").field(fieldName).addRange("r1", 0, 0.04D).addRange("r2", 0.04D, 1.0D), new MatchAllDocsQuery(), @@ -283,7 +283,7 @@ public void testFloatRangesExclusiveEndpoint() throws IOException { */ public void testHalfFloatRangesExclusiveEndpoint() throws IOException { final String fieldName = "halfFloat"; - MappedFieldType field = new NumberFieldMapper.NumberFieldType(fieldName, NumberType.HALF_FLOAT); + MappedField mappedField = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberType.HALF_FLOAT)); testCase( new RangeAggregationBuilder("range").field(fieldName).addRange("r1", 0, 0.0152D).addRange("r2", 0.0152D, 1.0D), new MatchAllDocsQuery(), @@ -299,7 +299,7 @@ public void testHalfFloatRangesExclusiveEndpoint() throws IOException { assertEquals(0, ranges.get(0).getDocCount()); assertEquals(1, ranges.get(1).getDocCount()); }, - field + mappedField ); } @@ -336,33 +336,37 @@ public void testUnboundedRanges() throws IOException { assertThat(ranges.get(1).getDocCount(), equalTo(2L)); assertTrue(AggregationInspectionHelper.hasValue(range)); }, - new NumberFieldMapper.NumberFieldType( + new MappedField( NUMBER_FIELD_NAME, - NumberFieldMapper.NumberType.INTEGER, - randomBoolean(), - randomBoolean(), - true, - false, - null, - Collections.emptyMap(), - null, - false, - null + new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.INTEGER, + randomBoolean(), + randomBoolean(), + true, + false, + null, + Collections.emptyMap(), + null, + false, + null + ) ) ); } public void testDateFieldMillisecondResolution() throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType( + MappedField mappedField = new MappedField( DATE_FIELD_NAME, - randomBoolean(), - randomBoolean(), - true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + randomBoolean(), + randomBoolean(), + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); long milli1 = ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); @@ -379,20 +383,22 @@ public void testDateFieldMillisecondResolution() throws IOException { assertEquals(1, ranges.size()); assertEquals(1, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testDateFieldNanosecondResolution() throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType( + MappedField mappedField = new MappedField( DATE_FIELD_NAME, - true, - false, - true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - DateFieldMapper.Resolution.NANOSECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + DateFieldMapper.Resolution.NANOSECONDS, + null, + null, + Collections.emptyMap() + ) ); // These values should work because aggs scale nanosecond up to millisecond always. @@ -410,20 +416,22 @@ public void testDateFieldNanosecondResolution() throws IOException { assertEquals(1, ranges.size()); assertEquals(1, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testMissingDateWithDateNanosField() throws IOException { - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType( + MappedField mappedField = new MappedField( DATE_FIELD_NAME, - true, - false, - true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - DateFieldMapper.Resolution.NANOSECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + DateFieldMapper.Resolution.NANOSECONDS, + null, + null, + Collections.emptyMap() + ) ); // These values should work because aggs scale nanosecond up to millisecond always. @@ -444,22 +452,24 @@ public void testMissingDateWithDateNanosField() throws IOException { assertEquals(1, ranges.size()); assertEquals(2, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testNotFitIntoDouble() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + MappedField mappedField = new MappedField( NUMBER_FIELD_NAME, - NumberType.LONG, - true, - false, - true, - false, - null, - Collections.emptyMap(), - null, - false, - null + new NumberFieldMapper.NumberFieldType( + NumberType.LONG, + true, + false, + true, + false, + null, + Collections.emptyMap(), + null, + false, + null + ) ); long start = 2L << 54; // Double stores 53 bits of mantissa, so we aggregate a bunch of bigger values @@ -479,7 +489,7 @@ public void testNotFitIntoDouble() throws IOException { // If we had a native `double` range aggregator we'd get 50, 50, 50 assertThat(ranges.stream().mapToLong(InternalRange.Bucket::getDocCount).toArray(), equalTo(new long[] { 44, 48, 58 })); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testMissingDateWithNumberField() throws IOException { @@ -487,12 +497,15 @@ public void testMissingDateWithNumberField() throws IOException { .addRange(-2d, 5d) .missing("1979-01-01T00:00:00"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } public void testUnmappedWithMissingNumber() throws IOException { @@ -500,7 +513,10 @@ public void testUnmappedWithMissingNumber() throws IOException { .addRange(-2d, 5d) .missing(0L); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); @@ -510,7 +526,7 @@ public void testUnmappedWithMissingNumber() throws IOException { assertEquals(1, ranges.size()); assertEquals(2, ranges.get(0).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, fieldType); + }, mappedField); } public void testUnmappedWithMissingDate() throws IOException { @@ -518,18 +534,21 @@ public void testUnmappedWithMissingDate() throws IOException { .addRange(-2d, 5d) .missing("2020-02-13T10:11:12"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } public void testUnsupportedType() { RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("range").field("not_a_number").addRange(-2d, 5d); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); + MappedField mappedField = new MappedField("not_a_number", new KeywordFieldMapper.KeywordFieldType()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -538,7 +557,7 @@ public void testUnsupportedType() { new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, range -> fail("Should have thrown exception"), - fieldType + mappedField ) ); assertEquals("Field [not_a_number] of type [keyword] is not supported for aggregation [range]", e.getMessage()); @@ -549,12 +568,15 @@ public void testBadMissingField() { .addRange(-2d, 5d) .missing("bogus"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } public void testUnmappedWithBadMissingField() { @@ -562,12 +584,15 @@ public void testUnmappedWithBadMissingField() { .addRange(-2d, 5d) .missing("bogus"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(NUMBER_FIELD_NAME, 1))); - }, range -> fail("Should have thrown exception"), fieldType)); + }, range -> fail("Should have thrown exception"), mappedField)); } public void testSubAggCollectsFromSingleBucketIfOneRange() throws IOException { @@ -623,7 +648,7 @@ public void testOverlappingRanges() throws IOException { assertThat(ranges.get(2).getTo(), equalTo(20d)); assertThat(ranges.get(2).getDocCount(), equalTo(1L)); assertTrue(AggregationInspectionHelper.hasValue(range)); - }, new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER)); + }, new MappedField(NUMBER_FIELD_NAME, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER))); } /** @@ -669,7 +694,7 @@ public void execute() { .entry("non-singletons", 0) ) ); - }, new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER)); + }, new MappedField(NUMBER_FIELD_NAME, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER))); } /** @@ -684,8 +709,11 @@ public void execute() { emit((long) getDoc().get(NUMBER_FIELD_NAME).get(0)); } }; - MappedFieldType dummyFt = new LongScriptFieldType("dummy", scriptFactory, new Script("test"), Map.of()); - MappedFieldType numberFt = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField dummyF = new MappedField("dummy", new LongScriptFieldType(scriptFactory, new Script("test"), Map.of())); + MappedField numberF = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); debugTestCase( new RangeAggregationBuilder("r").field("dummy").addRange(0, 1).addRange(1, 2).addRange(2, 3), new MatchAllDocsQuery(), @@ -718,8 +746,8 @@ public void execute() { ) ); }, - dummyFt, - numberFt + dummyF, + numberF ); } @@ -728,24 +756,26 @@ private void testCase( CheckedConsumer buildIndex, Consumer>> verify ) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + MappedField mappedField = new MappedField( NUMBER_FIELD_NAME, - NumberFieldMapper.NumberType.INTEGER, - randomBoolean(), - randomBoolean(), - true, - false, - null, - Collections.emptyMap(), - null, - false, - null + new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.INTEGER, + randomBoolean(), + randomBoolean(), + true, + false, + null, + Collections.emptyMap(), + null, + false, + null + ) ); RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test_range_agg"); aggregationBuilder.field(NUMBER_FIELD_NAME); aggregationBuilder.addRange(0d, 5d); aggregationBuilder.addRange(10d, 20d); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } private void simpleTestCase( @@ -753,12 +783,15 @@ private void simpleTestCase( Query query, Consumer>> verify ) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField( + NUMBER_FIELD_NAME, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField(NUMBER_FIELD_NAME, 7))); iw.addDocument(singleton(new SortedNumericDocValuesField(NUMBER_FIELD_NAME, 2))); iw.addDocument(singleton(new SortedNumericDocValuesField(NUMBER_FIELD_NAME, 3))); - }, verify, fieldType); + }, verify, mappedField); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index 71a6e9c06dd21..06b4421419467 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues.DoublesSupplier; import org.elasticsearch.index.fielddata.plain.SortedDoublesIndexFieldData; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.field.DelegateDocValuesField; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -86,7 +86,7 @@ private void writeBooks(RandomIndexWriter iw) throws IOException { public void testDiversifiedSampler() throws Exception { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre"); + MappedField genreField = new MappedField("genre", new KeywordFieldMapper.KeywordFieldType()); writeBooks(indexWriter); indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); @@ -97,16 +97,16 @@ public void testDiversifiedSampler() throws Exception { assertEquals("0805080481", terms.getBuckets().get(0).getKeyAsString()); assertEquals("0812550706", terms.getBuckets().get(1).getKeyAsString()); }; - testCase(indexSearcher, genreFieldType, "map", verify); - testCase(indexSearcher, genreFieldType, "global_ordinals", verify); - testCase(indexSearcher, genreFieldType, "bytes_hash", verify); + testCase(indexSearcher, genreField, "map", verify); + testCase(indexSearcher, genreField, "global_ordinals", verify); + testCase(indexSearcher, genreField, "bytes_hash", verify); - genreFieldType = new NumberFieldMapper.NumberFieldType("genre_id", NumberFieldMapper.NumberType.LONG); - testCase(indexSearcher, genreFieldType, null, verify); + genreField = new MappedField("genre_id", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + testCase(indexSearcher, genreField, null, verify); // wrong field: - genreFieldType = new KeywordFieldMapper.KeywordFieldType("wrong_field"); - testCase(indexSearcher, genreFieldType, null, result -> { + genreField = new MappedField("wrong_field", new KeywordFieldMapper.KeywordFieldType()); + testCase(indexSearcher, genreField, null, result -> { Terms terms = result.getAggregations().get("terms"); assertEquals(1, terms.getBuckets().size()); assertEquals("0805080481", terms.getBuckets().get(0).getKeyAsString()); @@ -124,7 +124,7 @@ public void testRidiculousSize() throws Exception { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = new IndexSearcher(indexReader); - MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre"); + MappedField genreField = new MappedField("genre", new KeywordFieldMapper.KeywordFieldType()); Consumer verify = result -> { Terms terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), greaterThan(0)); @@ -132,38 +132,34 @@ public void testRidiculousSize() throws Exception { try { // huge shard_size - testCase(indexSearcher, genreFieldType, "map", verify, Integer.MAX_VALUE, 1); - testCase(indexSearcher, genreFieldType, "global_ordinals", verify, Integer.MAX_VALUE, 1); - testCase(indexSearcher, genreFieldType, "bytes_hash", verify, Integer.MAX_VALUE, 1); + testCase(indexSearcher, genreField, "map", verify, Integer.MAX_VALUE, 1); + testCase(indexSearcher, genreField, "global_ordinals", verify, Integer.MAX_VALUE, 1); + testCase(indexSearcher, genreField, "bytes_hash", verify, Integer.MAX_VALUE, 1); // huge maxDocsPerValue - testCase(indexSearcher, genreFieldType, "map", verify, 100, Integer.MAX_VALUE); - testCase(indexSearcher, genreFieldType, "global_ordinals", verify, 100, Integer.MAX_VALUE); - testCase(indexSearcher, genreFieldType, "bytes_hash", verify, 100, Integer.MAX_VALUE); + testCase(indexSearcher, genreField, "map", verify, 100, Integer.MAX_VALUE); + testCase(indexSearcher, genreField, "global_ordinals", verify, 100, Integer.MAX_VALUE); + testCase(indexSearcher, genreField, "bytes_hash", verify, 100, Integer.MAX_VALUE); } finally { indexReader.close(); directory.close(); } } - private void testCase( - IndexSearcher indexSearcher, - MappedFieldType genreFieldType, - String executionHint, - Consumer verify - ) throws IOException { - testCase(indexSearcher, genreFieldType, executionHint, verify, 100, 1); + private void testCase(IndexSearcher indexSearcher, MappedField genreField, String executionHint, Consumer verify) + throws IOException { + testCase(indexSearcher, genreField, executionHint, verify, 100, 1); } private void testCase( IndexSearcher indexSearcher, - MappedFieldType genreFieldType, + MappedField genreField, String executionHint, Consumer verify, int shardSize, int maxDocsPerValue ) throws IOException { - MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType("id"); + MappedField idField = new MappedField("id", new KeywordFieldMapper.KeywordFieldType()); SortedDoublesIndexFieldData fieldData = new SortedDoublesIndexFieldData( "price", @@ -175,13 +171,13 @@ private void testCase( new FieldValueFactorFunction("price", 1, FieldValueFactorFunction.Modifier.RECIPROCAL, null, fieldData) ); - DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreFieldType.name()) + DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreField.name()) .executionHint(executionHint) .maxDocsPerValue(maxDocsPerValue) .shardSize(shardSize) .subAggregation(new TermsAggregationBuilder("terms").field("id")); - InternalSampler result = searchAndReduce(indexSearcher, query, builder, genreFieldType, idFieldType); + InternalSampler result = searchAndReduce(indexSearcher, query, builder, genreField, idField); verify.accept(result); } @@ -192,14 +188,14 @@ public void testDiversifiedSampler_noDocs() throws Exception { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = new IndexSearcher(indexReader); - MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType("id"); + MappedField idField = new MappedField("id", new KeywordFieldMapper.KeywordFieldType()); - MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType("genre"); + MappedField genreField = new MappedField("genre", new KeywordFieldMapper.KeywordFieldType()); - DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreFieldType.name()) + DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name").field(genreField.name()) .subAggregation(new TermsAggregationBuilder("terms").field("id")); - InternalSampler result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType); + InternalSampler result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, genreField, idField); Terms terms = result.getAggregations().get("terms"); assertEquals(0, terms.getBuckets().size()); indexReader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java index 2a9f8188c3dd2..dbe85f828b852 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java @@ -20,7 +20,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; @@ -42,8 +42,8 @@ public class SamplerAggregatorTests extends AggregatorTestCase { * Uses the sampler aggregation to find the minimum value of a field out of the top 3 scoring documents in a search. */ public void testSampler() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); - MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("int", NumberFieldMapper.NumberType.LONG); + MappedField textField = new MappedField("text", new TextFieldType()); + MappedField numericField = new MappedField("int", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); @@ -69,8 +69,8 @@ public void testSampler() throws IOException { searcher, new TermQuery(new Term("text", "good")), aggBuilder, - textFieldType, - numericFieldType + textField, + numericField ); Min min = sampler.getAggregations().get("min"); assertEquals(5.0, min.value(), 0); @@ -80,8 +80,8 @@ public void testSampler() throws IOException { } public void testRidiculousSize() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); - MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("int", NumberFieldMapper.NumberType.LONG); + MappedField textField = new MappedField("text", new TextFieldType()); + MappedField numericField = new MappedField("int", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); @@ -108,8 +108,8 @@ public void testRidiculousSize() throws IOException { searcher, new TermQuery(new Term("text", "good")), aggBuilder, - textFieldType, - numericFieldType + textField, + numericField ); Min min = sampler.getAggregations().get("min"); assertEquals(3.0, min.value(), 0); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java index 3b2fecfc12c32..b2c12bdfc010a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java @@ -19,7 +19,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Numbers; import org.elasticsearch.index.mapper.BinaryFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -154,7 +154,7 @@ private void testSearchCase( configure.accept(aggregationBuilder); } - MappedFieldType binaryFieldType = new BinaryFieldMapper.BinaryFieldType(BINARY_FIELD); + MappedField binaryFieldType = new MappedField(BINARY_FIELD, new BinaryFieldMapper.BinaryFieldType()); InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, binaryFieldType); verify.accept(rareTerms); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java index 0c6616737c854..5df63ff6663a0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java @@ -20,7 +20,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.support.ValueType; @@ -97,18 +97,16 @@ private void testSearchCase( Consumer> verify, ValueType valueType ) throws IOException { - MappedFieldType keywordFieldType = new KeywordFieldMapper.KeywordFieldType( + MappedField keywordField = new MappedField( KEYWORD_FIELD, - randomBoolean(), - true, - Collections.emptyMap() + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) ); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (String value : dataset) { document.add(new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef(value))); - if (keywordFieldType.isIndexed()) { + if (keywordField.isIndexed()) { document.add(new Field(KEYWORD_FIELD, new BytesRef(value), KeywordFieldMapper.Defaults.FIELD_TYPE)); } indexWriter.addDocument(document); @@ -127,7 +125,7 @@ private void testSearchCase( configure.accept(aggregationBuilder); } - InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, keywordFieldType); + InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, keywordField); verify.accept(rareTerms); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java index 190902db7186f..5778eddc88f2f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java @@ -18,7 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -164,9 +164,12 @@ private void testSearchCase( configure.accept(aggregationBuilder); } - MappedFieldType longFieldType = new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG); + MappedField longField = new MappedField( + LONG_FIELD, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); - InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, longFieldType); + InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, longField); verify.accept(rareTerms); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 6048d4760a115..87ff84491bac0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; @@ -240,15 +240,18 @@ public void testUnmapped() throws Exception { document.add(new SortedDocValuesField("string", new BytesRef("a"))); document.add(new NumericDocValuesField("long", 0L)); indexWriter.addDocument(document); - MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("another_string"); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("another_long", NumberFieldMapper.NumberType.LONG); + MappedField mappedField1 = new MappedField("another_string", new KeywordFieldMapper.KeywordFieldType()); + MappedField mappedField2 = new MappedField( + "another_long", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); String[] fieldNames = new String[] { "string", "long" }; for (int i = 0; i < fieldNames.length; i++) { RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name").field(fieldNames[i]); - Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType1, fieldType2); + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField1, mappedField2); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -276,12 +279,12 @@ public void testRangeField() throws Exception { doc.add(new BinaryDocValuesField("field", encodedRange)); indexWriter.addDocument(doc); } - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType("field", rangeType); + MappedField mappedField = new MappedField("field", new RangeFieldMapper.RangeFieldType(rangeType)); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name").field("field"); - expectThrows(IllegalArgumentException.class, () -> createAggregator(aggregationBuilder, indexSearcher, fieldType)); + expectThrows(IllegalArgumentException.class, () -> createAggregator(aggregationBuilder, indexSearcher, mappedField)); } } } @@ -363,9 +366,9 @@ public void testGlobalAggregationWithScore() throws IOException { ) ); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); + MappedField mappedField = new MappedField("keyword", new KeywordFieldMapper.KeywordFieldType()); - InternalGlobal result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), globalBuilder, fieldType); + InternalGlobal result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), globalBuilder, mappedField); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(3)); for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { @@ -398,14 +401,17 @@ public void testWithNestedAggregations() throws IOException { NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object").subAggregation( new RareTermsAggregationBuilder("terms").field("nested_value").maxDocCount(1) ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + "nested_value", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { InternalNested result = searchAndReduce( newIndexSearcher(indexReader), // match root document only new FieldExistsQuery(PRIMARY_TERM_NAME), nested, - fieldType + mappedField ); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(1)); @@ -437,7 +443,10 @@ public void testWithNestedScoringAggregations() throws IOException { ).storedField("_none_") ) ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + "nested_value", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { if (withScore) { @@ -449,7 +458,7 @@ public void testWithNestedScoringAggregations() throws IOException { // match root document only new FieldExistsQuery(PRIMARY_TERM_NAME), nested, - fieldType + mappedField ) ); assertThat( @@ -466,7 +475,7 @@ public void testWithNestedScoringAggregations() throws IOException { // match root document only new FieldExistsQuery(PRIMARY_TERM_NAME), nested, - fieldType + mappedField ); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(2)); @@ -561,11 +570,11 @@ private A executeTestCase(Query query, List unsupportedMappedFieldTypes() { } public void testSignificance(SignificanceHeuristic heuristic) throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + TextFieldType textFieldType = new TextFieldType(); textFieldType.setFielddata(true); + MappedField textField = new MappedField("text", textFieldType); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -128,7 +128,7 @@ public void testSignificance(SignificanceHeuristic heuristic) throws IOException IndexSearcher searcher = new IndexSearcher(reader); // Search "odd" - SignificantStringTerms terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); + SignificantStringTerms terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textField); assertThat(terms.getSubsetSize(), equalTo(5L)); assertEquals(1, terms.getBuckets().size()); @@ -137,7 +137,7 @@ public void testSignificance(SignificanceHeuristic heuristic) throws IOException assertNotNull(terms.getBucketByKey("odd")); // Search even - terms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), sigAgg, textFieldType); + terms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), sigAgg, textField); assertThat(terms.getSubsetSize(), equalTo(5L)); assertEquals(1, terms.getBuckets().size()); @@ -147,7 +147,7 @@ public void testSignificance(SignificanceHeuristic heuristic) throws IOException // Search odd with regex includeexcludes sigAgg.includeExclude(new IncludeExclude("o.d", null, null, null)); - terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); + terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textField); assertThat(terms.getSubsetSize(), equalTo(5L)); assertEquals(1, terms.getBuckets().size()); assertNotNull(terms.getBucketByKey("odd")); @@ -160,7 +160,7 @@ public void testSignificance(SignificanceHeuristic heuristic) throws IOException sigAgg.includeExclude(new IncludeExclude(null, null, oddStrings, evenStrings)); sigAgg.significanceHeuristic(heuristic); - terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); + terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textField); assertThat(terms.getSubsetSize(), equalTo(5L)); assertEquals(1, terms.getBuckets().size()); assertNotNull(terms.getBucketByKey("odd")); @@ -170,7 +170,7 @@ public void testSignificance(SignificanceHeuristic heuristic) throws IOException assertNull(terms.getBucketByKey("regular")); sigAgg.includeExclude(new IncludeExclude(null, null, evenStrings, oddStrings)); - terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); + terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textField); assertThat(terms.getSubsetSize(), equalTo(5L)); assertEquals(0, terms.getBuckets().size()); assertNull(terms.getBucketByKey("odd")); @@ -196,8 +196,9 @@ public void testSignificance() throws IOException { * @throws IOException on test setup failure */ public void testSamplingConsistency() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + TextFieldType textFieldType = new TextFieldType(); textFieldType.setFielddata(true); + MappedField textField = new MappedField("text", textFieldType); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(10_000); @@ -227,7 +228,7 @@ public void testSamplingConsistency() throws IOException { // randomly select the query, but both should hit the same docs, which is all of them. randomBoolean() ? new MatchAllDocsQuery() : new TermQuery(new Term("text", "common")), randomSamplerAggregationBuilder, - textFieldType + textField ); SignificantStringTerms terms = randomSampler.getAggregations().get("sig_text"); assertThat(Strings.toString(terms), terms.subsetSize, equalTo(terms.supersetSize)); @@ -240,7 +241,7 @@ public void testSamplingConsistency() throws IOException { * fields */ public void testNumericSignificance() throws IOException { - NumberFieldType longFieldType = new NumberFieldMapper.NumberFieldType("long_field", NumberFieldMapper.NumberType.LONG); + MappedField longField = new MappedField("long_field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); @@ -272,7 +273,7 @@ public void testNumericSignificance() throws IOException { IndexSearcher searcher = new IndexSearcher(reader); // Search "odd" - SignificantLongTerms terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigNumAgg, longFieldType); + SignificantLongTerms terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigNumAgg, longField); assertEquals(1, terms.getBuckets().size()); assertThat(terms.getSubsetSize(), equalTo(5L)); @@ -280,7 +281,7 @@ public void testNumericSignificance() throws IOException { assertNull(terms.getBucketByKey(Long.toString(COMMON_VALUE))); assertNotNull(terms.getBucketByKey(Long.toString(ODD_VALUE))); - terms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), sigNumAgg, longFieldType); + terms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), sigNumAgg, longField); assertEquals(1, terms.getBuckets().size()); assertThat(terms.getSubsetSize(), equalTo(5L)); @@ -296,8 +297,9 @@ public void testNumericSignificance() throws IOException { * Uses the significant terms aggregation on an index with unmapped field */ public void testUnmapped() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + TextFieldType textFieldType = new TextFieldType(); textFieldType.setFielddata(true); + MappedField textField = new MappedField("text", textFieldType); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -314,7 +316,7 @@ public void testUnmapped() throws IOException { IndexSearcher searcher = new IndexSearcher(reader); // Search "odd" - SignificantTerms terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); + SignificantTerms terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textField); assertEquals(0, terms.getBuckets().size()); assertNull(terms.getBucketByKey("even")); @@ -331,7 +333,7 @@ public void testUnmapped() throws IOException { public void testRangeField() throws IOException { RangeType rangeType = RangeType.DOUBLE; final String fieldName = "rangeField"; - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); + MappedField mappedField = new MappedField(fieldName, new RangeFieldMapper.RangeFieldType(rangeType)); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); @@ -354,14 +356,15 @@ public void testRangeField() throws IOException { try (IndexReader reader = DirectoryReader.open(w)) { IndexSearcher indexSearcher = newIndexSearcher(reader); - expectThrows(IllegalArgumentException.class, () -> createAggregator(sigAgg, indexSearcher, fieldType)); + expectThrows(IllegalArgumentException.class, () -> createAggregator(sigAgg, indexSearcher, mappedField)); } } } public void testFieldAlias() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + TextFieldType textFieldType = new TextFieldType(); textFieldType.setFielddata(true); + MappedField textField = new MappedField("text", textFieldType); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -388,19 +391,14 @@ public void testFieldAlias() throws IOException { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - SignificantTerms evenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), agg, textFieldType); - SignificantTerms aliasEvenTerms = searchAndReduce( - searcher, - new TermQuery(new Term("text", "even")), - aliasAgg, - textFieldType - ); + SignificantTerms evenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), agg, textField); + SignificantTerms aliasEvenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aliasAgg, textField); assertFalse(evenTerms.getBuckets().isEmpty()); assertEquals(evenTerms, aliasEvenTerms); - SignificantTerms oddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), agg, textFieldType); - SignificantTerms aliasOddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aliasAgg, textFieldType); + SignificantTerms oddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), agg, textField); + SignificantTerms aliasOddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aliasAgg, textField); assertFalse(oddTerms.getBuckets().isEmpty()); assertEquals(oddTerms, aliasOddTerms); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java index 83fc0e6cbd36d..416f97fb2a99a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTextAggregatorTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MockFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; @@ -52,7 +52,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase { @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new SignificantTextAggregationBuilder("foo", fieldName); } @@ -72,7 +72,7 @@ protected List unsupportedMappedFieldTypes() { * Uses the significant text aggregation to find the keywords in text fields */ public void testSignificance() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + MappedField mappedField = new MappedField("text", new TextFieldType()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -91,7 +91,7 @@ public void testSignificance() throws IOException { IndexSearcher searcher = new IndexSearcher(reader); // Search "odd" which should have no duplication - InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, textFieldType); + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, mappedField); SignificantTerms terms = sampler.getAggregations().get("sig_text"); assertNull(terms.getBucketByKey("even")); @@ -100,7 +100,7 @@ public void testSignificance() throws IOException { assertNotNull(terms.getBucketByKey("odd")); // Search "even" which will have duplication - sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, textFieldType); + sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, mappedField); terms = sampler.getAggregations().get("sig_text"); assertNull(terms.getBucketByKey("odd")); @@ -121,7 +121,7 @@ public void testSignificance() throws IOException { * Uses the significant text aggregation to find the keywords in text fields and include/exclude selected terms */ public void testIncludeExcludes() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + MappedField mappedField = new MappedField("text", new TextFieldType()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -145,7 +145,7 @@ public void testIncludeExcludes() throws IOException { sigAgg.sourceFieldNames(Arrays.asList(new String[] { "json_only_field" })); } // Search "even" which should have duplication - InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, textFieldType); + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, mappedField); SignificantTerms terms = sampler.getAggregations().get("sig_text"); assertNull(terms.getBucketByKey("even")); @@ -163,7 +163,7 @@ public void testIncludeExcludes() throws IOException { sigAgg.sourceFieldNames(Arrays.asList(new String[] { "json_only_field" })); } // Search "even" which should have duplication - InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, textFieldType); + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, mappedField); SignificantTerms terms = sampler.getAggregations().get("sig_text"); assertNotNull(terms.getBucketByKey("even")); @@ -176,7 +176,7 @@ public void testIncludeExcludes() throws IOException { } public void testMissingField() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + MappedField mappedField = new MappedField("text", new TextFieldType()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); @@ -193,7 +193,7 @@ public void testMissingField() throws IOException { try (IndexReader reader = DirectoryReader.open(w)) { IndexSearcher searcher = new IndexSearcher(reader); - InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, textFieldType); + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, mappedField); SignificantTerms terms = sampler.getAggregations().get("sig_text"); assertTrue(terms.getBuckets().isEmpty()); } @@ -201,7 +201,7 @@ public void testMissingField() throws IOException { } public void testFieldAlias() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + MappedField mappedField = new MappedField("text", new TextFieldType()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -225,12 +225,12 @@ public void testFieldAlias() throws IOException { SamplerAggregationBuilder samplerAgg = sampler("sampler").subAggregation(agg); SamplerAggregationBuilder aliasSamplerAgg = sampler("sampler").subAggregation(aliasAgg); - InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), samplerAgg, textFieldType); + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), samplerAgg, mappedField); InternalSampler aliasSampler = searchAndReduce( searcher, new TermQuery(new Term("text", "odd")), aliasSamplerAgg, - textFieldType + mappedField ); SignificantTerms terms = sampler.getAggregations().get("sig_text"); @@ -238,8 +238,8 @@ public void testFieldAlias() throws IOException { assertFalse(terms.getBuckets().isEmpty()); assertEquals(terms, aliasTerms); - sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), samplerAgg, textFieldType); - aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aliasSamplerAgg, textFieldType); + sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), samplerAgg, mappedField); + aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aliasSamplerAgg, mappedField); terms = sampler.getAggregations().get("sig_text"); aliasTerms = aliasSampler.getAggregations().get("sig_text"); @@ -253,7 +253,7 @@ public void testFieldAlias() throws IOException { } public void testInsideTermsAgg() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + MappedField mappedField = new MappedField("text", new TextFieldType()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -268,7 +268,7 @@ public void testInsideTermsAgg() throws IOException { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - StringTerms terms = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, textFieldType, keywordField("kwd")); + StringTerms terms = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField, keywordField("kwd")); SignificantTerms sigOdd = terms.getBucketByKey("odd").getAggregations().get("sig_text"); assertNull(sigOdd.getBucketByKey("even")); assertNull(sigOdd.getBucketByKey("duplicate")); @@ -310,7 +310,7 @@ private void indexDocuments(IndexWriter writer) throws IOException { * Test documents with arrays of text */ public void testSignificanceOnTextArrays() throws IOException { - TextFieldType textFieldType = new TextFieldType("text"); + MappedField mappedField = new MappedField("text", new TextFieldType()); IndexWriterConfig indexWriterConfig = newIndexWriterConfig(new StandardAnalyzer()); indexWriterConfig.setMaxBufferedDocs(100); @@ -330,7 +330,7 @@ public void testSignificanceOnTextArrays() throws IOException { try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - searchAndReduce(searcher, new TermQuery(new Term("text", "foo")), sigAgg, textFieldType); + searchAndReduce(searcher, new TermQuery(new Term("text", "foo")), sigAgg, mappedField); // No significant results to be found in this test - only checking we don't end up // with the internal exception discovered in issue https://github.com/elastic/elasticsearch/issues/25029 } @@ -338,11 +338,11 @@ public void testSignificanceOnTextArrays() throws IOException { } @Override - protected FieldMapper buildMockFieldMapper(MappedFieldType ft) { - return new MockFieldMapper(ft) { + protected FieldMapper buildMockFieldMapper(MappedField mappedField) { + return new MockFieldMapper(mappedField) { @Override public Map indexAnalyzers() { - return Map.of(ft.name(), ft.getTextSearchInfo().searchAnalyzer()); + return Map.of(mappedField.name(), mappedField.getTextSearchInfo().searchAnalyzer()); } }; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 3aab0c7983e93..1578d27f08c3c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -53,7 +53,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.KeywordScriptFieldType; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; @@ -192,7 +192,7 @@ protected A createAggregator(AggregationBuilder aggregati } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new TermsAggregationBuilder("foo").field(fieldName); } @@ -219,37 +219,37 @@ public void testUsesGlobalOrdinalsByDefault() throws Exception { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.STRING) .field("string"); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string"); + MappedField mappedField = new MappedField("string", new KeywordFieldMapper.KeywordFieldType()); - TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); GlobalOrdinalsStringTermsAggregator globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.descriptCollectionStrategy(), equalTo("dense")); // Infers depth_first because the maxOrd is 0 which is less than the size aggregationBuilder.subAggregation(AggregationBuilders.cardinality("card").field("string")); - aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.collectMode, equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); assertThat(globalAgg.descriptCollectionStrategy(), equalTo("remap using single bucket ords")); aggregationBuilder.collectMode(Aggregator.SubAggCollectionMode.DEPTH_FIRST); - aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.collectMode, equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); assertThat(globalAgg.descriptCollectionStrategy(), equalTo("remap using single bucket ords")); aggregationBuilder.collectMode(Aggregator.SubAggCollectionMode.BREADTH_FIRST); - aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.collectMode, equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST)); assertThat(globalAgg.descriptCollectionStrategy(), equalTo("dense")); aggregationBuilder.order(BucketOrder.aggregation("card", true)); - aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; assertThat(globalAgg.descriptCollectionStrategy(), equalTo("remap using single bucket ords")); @@ -259,15 +259,18 @@ public void testUsesGlobalOrdinalsByDefault() throws Exception { } public void testSimple() throws Exception { - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", randomBoolean(), true, Collections.emptyMap()); + MappedField mappedField = new MappedField( + "string", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").executionHint( randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString() ).field("string").order(BucketOrder.key(true)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(doc(fieldType, "a", "b")); - iw.addDocument(doc(fieldType, "", "c", "a")); - iw.addDocument(doc(fieldType, "b", "d")); - iw.addDocument(doc(fieldType, "")); + iw.addDocument(doc(mappedField, "a", "b")); + iw.addDocument(doc(mappedField, "", "c", "a")); + iw.addDocument(doc(mappedField, "b", "d")); + iw.addDocument(doc(mappedField, "")); }, (InternalTerms result) -> { assertEquals(5, result.getBuckets().size()); assertEquals("", result.getBuckets().get(0).getKeyAsString()); @@ -281,11 +284,11 @@ public void testSimple() throws Exception { assertEquals("d", result.getBuckets().get(4).getKeyAsString()); assertEquals(1L, result.getBuckets().get(4).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, fieldType); + }, mappedField); } public void testStringShardMinDocCount() throws IOException { - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", true, true, Collections.emptyMap()); + MappedField mappedField = new MappedField("string", new KeywordFieldMapper.KeywordFieldType(true, true, Collections.emptyMap())); for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field("string") .executionHint(executionMode.toString()) @@ -296,7 +299,12 @@ public void testStringShardMinDocCount() throws IOException { testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { // force single shard/segment iw.addDocuments( - Arrays.asList(doc(fieldType, "a", "b"), doc(fieldType, "", "c", "d"), doc(fieldType, "b", "d"), doc(fieldType, "b")) + Arrays.asList( + doc(mappedField, "a", "b"), + doc(mappedField, "", "c", "d"), + doc(mappedField, "b", "d"), + doc(mappedField, "b") + ) ); }, (InternalTerms result) -> { assertEquals(2, result.getBuckets().size()); @@ -304,12 +312,15 @@ public void testStringShardMinDocCount() throws IOException { assertEquals(3L, result.getBuckets().get(0).getDocCount()); assertEquals("d", result.getBuckets().get(1).getKeyAsString()); assertEquals(2L, result.getBuckets().get(1).getDocCount()); - }, fieldType); + }, mappedField); } } public void testManyTerms() throws Exception { - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", randomBoolean(), true, Collections.emptyMap()); + MappedField mappedField = new MappedField( + "string", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").executionHint(randomHint()).field("string"); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { /* @@ -321,9 +332,9 @@ public void testManyTerms() throws Exception { List> docs = new ArrayList<>(); for (int i = 0; i < TermsAggregatorFactory.MAX_ORDS_TO_TRY_FILTERS - 200; i++) { String s = String.format(Locale.ROOT, "b%03d", i); - docs.add(doc(fieldType, s)); + docs.add(doc(mappedField, s)); if (i % 100 == 7) { - docs.add(doc(fieldType, s)); + docs.add(doc(mappedField, s)); } } iw.addDocuments(docs); @@ -332,12 +343,15 @@ public void testManyTerms() throws Exception { result.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), equalTo(List.of("b007", "b107", "b207", "b307", "b407", "b507", "b607", "b707", "b000", "b001")) ); - }, fieldType); + }, mappedField); } public void testManyTermsOrderBySubAgg() throws Exception { - MappedFieldType kft = new KeywordFieldMapper.KeywordFieldType("string", randomBoolean(), true, Collections.emptyMap()); - MappedFieldType lft = new NumberFieldType("long", NumberType.LONG); + MappedField mappedField = new MappedField( + "string", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); + MappedField lft = new MappedField("long", new NumberFieldType(NumberType.LONG)); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").executionHint(randomHint()) .order(BucketOrder.aggregation("max", false)) @@ -351,7 +365,7 @@ public void testManyTermsOrderBySubAgg() throws Exception { List> docs = new ArrayList<>(); for (int i = 0; i < TermsAggregatorFactory.MAX_ORDS_TO_TRY_FILTERS - 200; i++) { String s = String.format(Locale.ROOT, "b%03d", i); - List doc = doc(kft, s); + List doc = doc(mappedField, s); doc.add(new SortedNumericDocValuesField("long", i)); docs.add(doc); } @@ -363,7 +377,7 @@ public void testManyTermsOrderBySubAgg() throws Exception { ).mapToObj(l -> String.format(Locale.ROOT, "b%03d", l)).collect(toList()); Collections.reverse(expected); assertThat(result.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), equalTo(expected)); - }, kft, lft); + }, mappedField, lft); } /** @@ -371,8 +385,14 @@ public void testManyTermsOrderBySubAgg() throws Exception { * a {@link TooManyBucketsException} if we built the sub-aggs eagerly. */ public void testDelaysSubAggs() throws Exception { - MappedFieldType s1ft = new KeywordFieldMapper.KeywordFieldType("string1", randomBoolean(), true, Collections.emptyMap()); - MappedFieldType s2ft = new KeywordFieldMapper.KeywordFieldType("string2", randomBoolean(), true, Collections.emptyMap()); + MappedField s1f = new MappedField( + "string1", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); + MappedField s2f = new MappedField( + "string2", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").executionHint(randomHint()) .field("string1") .shardSize(1000) @@ -384,8 +404,8 @@ public void testDelaysSubAggs() throws Exception { for (int i2 = 0; i2 < 50; i2++) { String s2 = String.format(Locale.ROOT, "b%03d", i2); List doc = new ArrayList<>(); - doc.addAll(doc(s1ft, s1)); - doc.addAll(doc(s2ft, s2)); + doc.addAll(doc(s1f, s1)); + doc.addAll(doc(s2f, s2)); iw.addDocument(doc); if (i1 % 100 == 7) { iw.addDocument(doc); @@ -399,7 +419,7 @@ public void testDelaysSubAggs() throws Exception { * lets us create a fairly small test index. */ int maxBuckets = 200; - StringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggregationBuilder, maxBuckets, s1ft, s2ft); + StringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggregationBuilder, maxBuckets, s1f, s2f); assertThat( result.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), equalTo(List.of("b007", "b107", "b207", "b307", "b407", "b507", "b607", "b707", "b807", "b907", "b000")) @@ -407,28 +427,34 @@ public void testDelaysSubAggs() throws Exception { }); } - private List doc(MappedFieldType ft, String... values) { + private List doc(MappedField mappedField, String... values) { List doc = new ArrayList(); for (String v : values) { BytesRef bytes = new BytesRef(v); - doc.add(new SortedSetDocValuesField(ft.name(), bytes)); - if (ft.isIndexed()) { - doc.add(new KeywordField(ft.name(), bytes, KeywordFieldMapper.Defaults.FIELD_TYPE)); + doc.add(new SortedSetDocValuesField(mappedField.name(), bytes)); + if (mappedField.isIndexed()) { + doc.add(new KeywordField(mappedField.name(), bytes, KeywordFieldMapper.Defaults.FIELD_TYPE)); } } return doc; } public void testStringIncludeExclude() throws Exception { - MappedFieldType ft1 = new KeywordFieldMapper.KeywordFieldType("mv_field", randomBoolean(), true, Collections.emptyMap()); - MappedFieldType ft2 = new KeywordFieldMapper.KeywordFieldType("sv_field", randomBoolean(), true, Collections.emptyMap()); + MappedField f1 = new MappedField( + "mv_field", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); + MappedField f2 = new MappedField( + "sv_field", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); CheckedConsumer buildIndex = iw -> { - iw.addDocument(doc(ft1, ft2, "val000", "val001", "val001")); - iw.addDocument(doc(ft1, ft2, "val002", "val003", "val003")); - iw.addDocument(doc(ft1, ft2, "val004", "val005", "val005")); - iw.addDocument(doc(ft1, ft2, "val006", "val007", "val007")); - iw.addDocument(doc(ft1, ft2, "val008", "val009", "val009")); - iw.addDocument(doc(ft1, ft2, "val010", "val011", "val011")); + iw.addDocument(doc(f1, f2, "val000", "val001", "val001")); + iw.addDocument(doc(f1, f2, "val002", "val003", "val003")); + iw.addDocument(doc(f1, f2, "val004", "val005", "val005")); + iw.addDocument(doc(f1, f2, "val006", "val007", "val007")); + iw.addDocument(doc(f1, f2, "val008", "val009", "val009")); + iw.addDocument(doc(f1, f2, "val010", "val011", "val011")); }; String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); @@ -460,7 +486,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val009", result.getBuckets().get(9).getKeyAsString()); assertEquals(1L, result.getBuckets().get(9).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude(new IncludeExclude("val00.+", null, null, null)) @@ -479,7 +505,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val009", result.getBuckets().get(4).getKeyAsString()); assertEquals(1L, result.getBuckets().get(4).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude(new IncludeExclude("val00.+", null, null, null)) @@ -498,7 +524,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val009", result.getBuckets().get(4).getKeyAsString()); assertEquals(1L, result.getBuckets().get(4).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude(new IncludeExclude("val00.+", "(val000|val001)", null, null)) @@ -523,7 +549,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val009", result.getBuckets().get(7).getKeyAsString()); assertEquals(1L, result.getBuckets().get(7).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude(new IncludeExclude(null, "val00.+", null, null)) @@ -536,7 +562,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val011", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude(new IncludeExclude(null, null, new TreeSet<>(Set.of(new BytesRef("val000"), new BytesRef("val010"))), null)) @@ -549,7 +575,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val010", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude( @@ -582,7 +608,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val010", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude( @@ -613,7 +639,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val009", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); builder = new TermsAggregationBuilder("_name").executionHint(executionHint) .includeExclude( @@ -633,20 +659,20 @@ public void testStringIncludeExclude() throws Exception { assertEquals("val002", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(result)); - }, ft1, ft2); + }, f1, f2); } - private List doc(MappedFieldType ft1, MappedFieldType ft2, String f1v1, String f1v2, String f2v) { + private List doc(MappedField f1, MappedField f2, String f1v1, String f1v2, String f2v) { List doc = new ArrayList(); - doc.add(new SortedSetDocValuesField(ft1.name(), new BytesRef(f1v1))); - doc.add(new SortedSetDocValuesField(ft1.name(), new BytesRef(f1v2))); - if (ft1.isIndexed()) { - doc.add(new KeywordField(ft1.name(), new BytesRef(f1v1), KeywordFieldMapper.Defaults.FIELD_TYPE)); - doc.add(new KeywordField(ft1.name(), new BytesRef(f1v2), KeywordFieldMapper.Defaults.FIELD_TYPE)); + doc.add(new SortedSetDocValuesField(f1.name(), new BytesRef(f1v1))); + doc.add(new SortedSetDocValuesField(f1.name(), new BytesRef(f1v2))); + if (f1.isIndexed()) { + doc.add(new KeywordField(f1.name(), new BytesRef(f1v1), KeywordFieldMapper.Defaults.FIELD_TYPE)); + doc.add(new KeywordField(f1.name(), new BytesRef(f1v2), KeywordFieldMapper.Defaults.FIELD_TYPE)); } - doc.add(new SortedDocValuesField(ft2.name(), new BytesRef(f2v))); - if (ft2.isIndexed()) { - doc.add(new KeywordField(ft2.name(), new BytesRef(f2v), KeywordFieldMapper.Defaults.FIELD_TYPE)); + doc.add(new SortedDocValuesField(f2.name(), new BytesRef(f2v))); + if (f2.isIndexed()) { + doc.add(new KeywordField(f2.name(), new BytesRef(f2v), KeywordFieldMapper.Defaults.FIELD_TYPE)); } return doc; } @@ -681,7 +707,10 @@ public void testNumericIncludeExclude() throws Exception { indexWriter.addDocument(document); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("long_field", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField( + "long_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.LONG) @@ -689,7 +718,7 @@ public void testNumericIncludeExclude() throws Exception { .includeExclude(new IncludeExclude(null, null, new TreeSet<>(Set.of(new BytesRef("0"), new BytesRef("5"))), null)) .field("long_field") .order(BucketOrder.key(true)); - AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); + AggregationContext context = createAggregationContext(indexSearcher, null, mappedField); TermsAggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -707,7 +736,7 @@ public void testNumericIncludeExclude() throws Exception { .includeExclude(new IncludeExclude(null, null, null, new TreeSet<>(Set.of(new BytesRef("0"), new BytesRef("5"))))) .field("long_field") .order(BucketOrder.key(true)); - context = createAggregationContext(indexSearcher, null, fieldType); + context = createAggregationContext(indexSearcher, null, mappedField); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -724,7 +753,10 @@ public void testNumericIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(3).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue((InternalTerms) result)); - fieldType = new NumberFieldMapper.NumberFieldType("double_field", NumberFieldMapper.NumberType.DOUBLE); + mappedField = new MappedField( + "double_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) + ); aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.DOUBLE) .executionHint(executionHint) .includeExclude( @@ -732,7 +764,7 @@ public void testNumericIncludeExclude() throws Exception { ) .field("double_field") .order(BucketOrder.key(true)); - context = createAggregationContext(indexSearcher, null, fieldType); + context = createAggregationContext(indexSearcher, null, mappedField); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -752,7 +784,7 @@ public void testNumericIncludeExclude() throws Exception { ) .field("double_field") .order(BucketOrder.key(true)); - context = createAggregationContext(indexSearcher, null, fieldType); + context = createAggregationContext(indexSearcher, null, mappedField); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -774,7 +806,10 @@ public void testNumericIncludeExclude() throws Exception { } public void testStringTermsAggregator() throws Exception { - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("field", randomBoolean(), true, Collections.emptyMap()); + MappedField mappedField = new MappedField( + "field", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); BiFunction> luceneFieldFactory = (val, mv) -> { List result = new ArrayList<>(2); if (mv) { @@ -782,13 +817,18 @@ public void testStringTermsAggregator() throws Exception { } else { result.add(new SortedDocValuesField("field", new BytesRef(val))); } - if (fieldType.isIndexed()) { + if (mappedField.isIndexed()) { result.add(new KeywordField("field", new BytesRef(val), KeywordFieldMapper.Defaults.FIELD_TYPE)); } return result; }; - termsAggregator(ValueType.STRING, fieldType, i -> Integer.toString(i), String::compareTo, luceneFieldFactory); - termsAggregatorWithNestedMaxAgg(ValueType.STRING, fieldType, i -> Integer.toString(i), val -> luceneFieldFactory.apply(val, false)); + termsAggregator(ValueType.STRING, mappedField, i -> Integer.toString(i), String::compareTo, luceneFieldFactory); + termsAggregatorWithNestedMaxAgg( + ValueType.STRING, + mappedField, + i -> Integer.toString(i), + val -> luceneFieldFactory.apply(val, false) + ); } public void testLongTermsAggregator() throws Exception { @@ -799,9 +839,9 @@ public void testLongTermsAggregator() throws Exception { return List.of(new NumericDocValuesField("field", val)); } }; - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); - termsAggregator(ValueType.LONG, fieldType, Integer::longValue, Long::compareTo, luceneFieldFactory); - termsAggregatorWithNestedMaxAgg(ValueType.LONG, fieldType, Integer::longValue, val -> luceneFieldFactory.apply(val, false)); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + termsAggregator(ValueType.LONG, mappedField, Integer::longValue, Long::compareTo, luceneFieldFactory); + termsAggregatorWithNestedMaxAgg(ValueType.LONG, mappedField, Integer::longValue, val -> luceneFieldFactory.apply(val, false)); } public void testDoubleTermsAggregator() throws Exception { @@ -812,13 +852,13 @@ public void testDoubleTermsAggregator() throws Exception { return List.of(new NumericDocValuesField("field", Double.doubleToRawLongBits(val))); } }; - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); - termsAggregator(ValueType.DOUBLE, fieldType, Integer::doubleValue, Double::compareTo, luceneFieldFactory); - termsAggregatorWithNestedMaxAgg(ValueType.DOUBLE, fieldType, Integer::doubleValue, val -> luceneFieldFactory.apply(val, false)); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); + termsAggregator(ValueType.DOUBLE, mappedField, Integer::doubleValue, Double::compareTo, luceneFieldFactory); + termsAggregatorWithNestedMaxAgg(ValueType.DOUBLE, mappedField, Integer::doubleValue, val -> luceneFieldFactory.apply(val, false)); } public void testIpTermsAggregator() throws Exception { - IpFieldMapper.IpFieldType fieldType = new IpFieldMapper.IpFieldType("field"); + MappedField mappedField = new MappedField("field", new IpFieldMapper.IpFieldType()); BiFunction> luceneFieldFactory = (val, mv) -> { List result = new ArrayList<>(2); if (mv) { @@ -826,7 +866,7 @@ public void testIpTermsAggregator() throws Exception { } else { result.add(new SortedDocValuesField("field", new BytesRef(InetAddressPoint.encode(val)))); } - if (fieldType.isIndexed()) { + if (mappedField.isIndexed()) { result.add(new InetAddressPoint("field", val)); } return result; @@ -837,12 +877,12 @@ public void testIpTermsAggregator() throws Exception { BytesRef b2 = new BytesRef(InetAddressPoint.encode(o2)); return b1.compareTo(b2); }; - termsAggregator(ValueType.IP, fieldType, i -> base[0] = InetAddressPoint.nextUp(base[0]), comparator, luceneFieldFactory); + termsAggregator(ValueType.IP, mappedField, i -> base[0] = InetAddressPoint.nextUp(base[0]), comparator, luceneFieldFactory); } private void termsAggregator( ValueType valueType, - MappedFieldType fieldType, + MappedField mappedField, Function valueFactory, Comparator keyComparator, BiFunction> luceneFieldFactory @@ -927,7 +967,7 @@ private void termsAggregator( .field("field") .order(bucketOrder); - AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType); + AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), mappedField); Aggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -946,7 +986,7 @@ private void termsAggregator( } if (multiValued == false) { - MappedFieldType filterFieldType = new KeywordFieldMapper.KeywordFieldType("include"); + MappedField filterField = new MappedField("include", new KeywordFieldMapper.KeywordFieldType()); aggregationBuilder = new FilterAggregationBuilder("_name1", QueryBuilders.termQuery("include", "yes")); aggregationBuilder.subAggregation( new TermsAggregationBuilder("_name2").userValueTypeHint(valueType) @@ -955,7 +995,7 @@ private void termsAggregator( .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) .field("field") ); - context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType, filterFieldType); + context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), mappedField, filterField); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -985,7 +1025,7 @@ private void termsAggregator( private void termsAggregatorWithNestedMaxAgg( ValueType valueType, - MappedFieldType fieldType, + MappedField mappedField, Function valueFactory, Function> luceneFieldFactory ) throws Exception { @@ -1034,8 +1074,16 @@ private void termsAggregatorWithNestedMaxAgg( .order(bucketOrder) .subAggregation(AggregationBuilders.max("_max").field("value")); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.LONG); - AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType, fieldType2); + MappedField mappedField2 = new MappedField( + "value", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + AggregationContext context = createAggregationContext( + indexSearcher, + new MatchAllDocsQuery(), + mappedField, + mappedField2 + ); Aggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -1055,14 +1103,14 @@ private void termsAggregatorWithNestedMaxAgg( public void testEmpty() throws Exception { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("string"); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG); - MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField1 = new MappedField("string", new KeywordFieldMapper.KeywordFieldType()); + MappedField mappedField2 = new MappedField("long", new NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField mappedField3 = new MappedField("double", new NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.STRING) .field("string"); - AggregationContext context = createAggregationContext(indexSearcher, null, fieldType1); + AggregationContext context = createAggregationContext(indexSearcher, null, mappedField1); Aggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -1072,7 +1120,7 @@ public void testEmpty() throws Exception { assertEquals(0, result.getBuckets().size()); aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.LONG).field("long"); - context = createAggregationContext(indexSearcher, null, fieldType2); + context = createAggregationContext(indexSearcher, null, mappedField2); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -1082,7 +1130,7 @@ public void testEmpty() throws Exception { assertEquals(0, result.getBuckets().size()); aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(ValueType.DOUBLE).field("double"); - context = createAggregationContext(indexSearcher, null, fieldType3); + context = createAggregationContext(indexSearcher, null, mappedField3); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -1130,7 +1178,7 @@ public void testUnmappedWithMissing() throws Exception { try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("unrelated_value"); + MappedField field1 = new MappedField("unrelated_value", new KeywordFieldMapper.KeywordFieldType()); IndexSearcher indexSearcher = newIndexSearcher(indexReader); ValueType[] valueTypes = new ValueType[] { ValueType.STRING, ValueType.LONG, ValueType.DOUBLE }; @@ -1141,7 +1189,7 @@ public void testUnmappedWithMissing() throws Exception { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").userValueTypeHint(valueTypes[i]) .field(fieldNames[i]) .missing(missingValues[i]); - AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType1); + AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), field1); Aggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -1170,10 +1218,10 @@ public void testRangeField() throws Exception { document.add(field); indexWriter.addDocument(document); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); + MappedField mappedField = new MappedField(fieldName, new RangeFieldMapper.RangeFieldType(rangeType)); IndexSearcher indexSearcher = newIndexSearcher(indexReader); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field(fieldName); - expectThrows(IllegalArgumentException.class, () -> { createAggregator(aggregationBuilder, indexSearcher, fieldType); }); + expectThrows(IllegalArgumentException.class, () -> createAggregator(aggregationBuilder, indexSearcher, mappedField)); } } } @@ -1188,31 +1236,28 @@ public void testGeoPointField() throws Exception { document.add(new LatLonDocValuesField(field, point.getLat(), point.getLon())); indexWriter.addDocument(document); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + MappedField mappedFieldType = new MappedField("field", new GeoPointFieldMapper.GeoPointFieldType()); IndexSearcher indexSearcher = newIndexSearcher(indexReader); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field(field); - expectThrows(IllegalArgumentException.class, () -> { createAggregator(aggregationBuilder, indexSearcher, fieldType); }); + expectThrows( + IllegalArgumentException.class, + () -> createAggregator(aggregationBuilder, indexSearcher, mappedFieldType) + ); } } } } public void testIpField() throws Exception { - MappedFieldType fieldType = new IpFieldMapper.IpFieldType( + MappedField mappedField = new MappedField( "field", - randomBoolean(), - false, - true, - null, - null, - Collections.emptyMap(), - false + new IpFieldMapper.IpFieldType(randomBoolean(), false, true, null, null, Collections.emptyMap(), false) ); testCase(new TermsAggregationBuilder("_name").field("field"), new MatchAllDocsQuery(), iw -> { Document document = new Document(); InetAddress point = InetAddresses.forString("192.168.100.42"); document.add(new SortedSetDocValuesField("field", new BytesRef(InetAddressPoint.encode(point)))); - if (fieldType.isIndexed()) { + if (mappedField.isIndexed()) { document.add(new InetAddressPoint("field", point)); } iw.addDocument(document); @@ -1221,25 +1266,31 @@ public void testIpField() throws Exception { assertEquals(1, result.getBuckets().size()); assertEquals("192.168.100.42", result.getBuckets().get(0).getKey()); assertEquals(1, result.getBuckets().get(0).getDocCount()); - }, fieldType); + }, mappedField); } public void testNestedTermsAgg() throws Exception { - MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("field1", randomBoolean(), true, Collections.emptyMap()); - MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("field2", randomBoolean(), true, Collections.emptyMap()); + MappedField mappedField1 = new MappedField( + "field1", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); + MappedField mappedField2 = new MappedField( + "field2", + new KeywordFieldMapper.KeywordFieldType(randomBoolean(), true, Collections.emptyMap()) + ); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { List document = new ArrayList<>(); - document.addAll(doc(fieldType1, "a")); - document.addAll(doc(fieldType2, "b")); + document.addAll(doc(mappedField1, "a")); + document.addAll(doc(mappedField2, "b")); indexWriter.addDocument(document); document = new ArrayList<>(); - document.addAll(doc(fieldType1, "c")); - document.addAll(doc(fieldType2, "d")); + document.addAll(doc(mappedField1, "c")); + document.addAll(doc(mappedField2, "d")); indexWriter.addDocument(document); document = new ArrayList<>(); - document.addAll(doc(fieldType1, "e")); - document.addAll(doc(fieldType2, "f")); + document.addAll(doc(mappedField1, "e")); + document.addAll(doc(mappedField2, "f")); indexWriter.addDocument(document); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); @@ -1257,7 +1308,12 @@ public void testNestedTermsAgg() throws Exception { .field("field2") .order(BucketOrder.key(true)) ); - AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType1, fieldType2); + AggregationContext context = createAggregationContext( + indexSearcher, + new MatchAllDocsQuery(), + mappedField1, + mappedField2 + ); Aggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -1294,8 +1350,11 @@ public void testMixLongAndDouble() throws Exception { try (IndexReader reader = createIndexWithLongs()) { dir = ((DirectoryReader) reader).directory(); IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher)); + MappedField mappedField = new MappedField( + "number", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + aggs.add(buildInternalAggregation(aggregationBuilder, mappedField, searcher)); } dir.close(); } @@ -1305,8 +1364,11 @@ public void testMixLongAndDouble() throws Exception { try (IndexReader reader = createIndexWithDoubles()) { dir = ((DirectoryReader) reader).directory(); IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.DOUBLE); - aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher)); + MappedField mappedField = new MappedField( + "number", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) + ); + aggs.add(buildInternalAggregation(aggregationBuilder, mappedField, searcher)); } dir.close(); } @@ -1368,9 +1430,9 @@ public void testGlobalAggregationWithScore() throws IOException { ) ); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("keyword"); + MappedField mappedField = new MappedField("keyword", new KeywordFieldMapper.KeywordFieldType()); - InternalGlobal result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), globalBuilder, fieldType); + InternalGlobal result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), globalBuilder, mappedField); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertThat(terms.getBuckets().size(), equalTo(3)); for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { @@ -1413,9 +1475,9 @@ public void testWithNestedAggregations() throws IOException { ).storedField("_none_") ) ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + MappedField mappedField = new MappedField( "nested_value", - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { { @@ -1424,7 +1486,7 @@ public void testWithNestedAggregations() throws IOException { // match root document only new FieldExistsQuery(PRIMARY_TERM_NAME), nested, - fieldType + mappedField ); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); assertNestedTopHitsScore(terms, withScore); @@ -1438,7 +1500,7 @@ public void testWithNestedAggregations() throws IOException { // match root document only new FieldExistsQuery(PRIMARY_TERM_NAME), filter, - fieldType + mappedField ); InternalNested nestedResult = result.getAggregations().get("nested"); InternalMultiBucketAggregation terms = nestedResult.getAggregations().get("terms"); @@ -1452,14 +1514,14 @@ public void testWithNestedAggregations() throws IOException { } public void testHeisenpig() throws IOException { - MappedFieldType nestedFieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - KeywordFieldType animalFieldType = new KeywordFieldType("str", randomBoolean(), true, Collections.emptyMap()); + MappedField nestedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField animalField = new MappedField("str", new KeywordFieldType(randomBoolean(), true, Collections.emptyMap())); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { String[] tags = new String[] { "danger", "fluffiness" }; - indexWriter.addDocuments(generateAnimalDocsWithNested("1", animalFieldType, "sheep", tags, new int[] { 1, 10 })); - indexWriter.addDocuments(generateAnimalDocsWithNested("2", animalFieldType, "cow", tags, new int[] { 3, 1 })); - indexWriter.addDocuments(generateAnimalDocsWithNested("3", animalFieldType, "pig", tags, new int[] { 100, 1 })); + indexWriter.addDocuments(generateAnimalDocsWithNested("1", animalField, "sheep", tags, new int[] { 1, 10 })); + indexWriter.addDocuments(generateAnimalDocsWithNested("2", animalField, "cow", tags, new int[] { 3, 1 })); + indexWriter.addDocuments(generateAnimalDocsWithNested("3", animalField, "pig", tags, new int[] { 100, 1 })); indexWriter.commit(); NestedAggregationBuilder nested = new NestedAggregationBuilder("nested", "nested_object").subAggregation( new MaxAggregationBuilder("max_number").field("number") @@ -1475,8 +1537,8 @@ public void testHeisenpig() throws IOException { // match root document only Queries.newNonNestedFilter(), terms, - animalFieldType, - nestedFieldType + animalField, + nestedField ); assertThat(result.getBuckets().get(0).getKeyAsString(), equalTo("pig")); assertThat(result.getBuckets().get(0).docCount, equalTo(1L)); @@ -1509,16 +1571,19 @@ public void testSortingWithNestedAggregations() throws IOException { .shardSize(1) .size(1) .order(BucketOrder.aggregation("nested>max_val", false)); - MappedFieldType nestedFieldType = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.LONG); + MappedField nestedField = new MappedField( + "nested_value", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + MappedField valueField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { LongTerms result = searchAndReduce( newSearcher(indexReader, false, true), // match root document only new FieldExistsQuery(PRIMARY_TERM_NAME), terms, - fieldType, - nestedFieldType + valueField, + nestedField ); assertThat(result.getBuckets().get(0).term, equalTo(3L)); assertThat( @@ -1532,8 +1597,8 @@ public void testSortingWithNestedAggregations() throws IOException { } public void testManySegmentsStillSingleton() throws IOException { - NumberFieldType nFt = new NumberFieldType("n", NumberFieldMapper.NumberType.LONG); - KeywordFieldType strFt = new KeywordFieldType("str", true, true, Collections.emptyMap()); + MappedField nF = new MappedField("n", new NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField strF = new MappedField("str", new KeywordFieldType(true, true, Collections.emptyMap())); AggregationBuilder builder = new TermsAggregationBuilder("n").field("n") .subAggregation(new TermsAggregationBuilder("str").field("str")); withNonMergingIndex(iw -> { @@ -1564,8 +1629,8 @@ public void testManySegmentsStillSingleton() throws IOException { assertThat(subDebug, hasEntry("segments_with_single_valued_ords", 2)); assertThat(subDebug, hasEntry("segments_with_multi_valued_ords", 0)); }, - nFt, - strFt + nF, + strF ) ); } @@ -1578,7 +1643,7 @@ public void topLevelProfileTestCase( Function extraMatcher ) throws IOException { randomizeAggregatorImpl = false; - KeywordFieldType strFt = new KeywordFieldType("str", false, true, Collections.emptyMap()); + MappedField strF = new MappedField("str", new KeywordFieldType(false, true, Collections.emptyMap())); AggregationBuilder builder = new TermsAggregationBuilder("str").field("str").includeExclude(includeExclude); CheckedConsumer buildIndex = iw -> { for (int i = 0; i < count; i++) { @@ -1608,7 +1673,7 @@ public void topLevelProfileTestCase( ) ); }, - strFt + strF ); } @@ -1647,7 +1712,7 @@ public void testLowCardinalityProfile() throws IOException { } public void testNumberToStringValueScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name").userValueTypeHint(ValueType.STRING) .field("number") @@ -1658,7 +1723,7 @@ public void testNumberToStringValueScript() throws IOException { for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("number", i + 1))); } - }, (Consumer>) terms -> { assertTrue(AggregationInspectionHelper.hasValue(terms)); }, fieldType); + }, (Consumer>) terms -> { assertTrue(AggregationInspectionHelper.hasValue(terms)); }, mappedField); } public void testThreeLayerStringViaGlobalOrds() throws IOException { @@ -1670,9 +1735,9 @@ public void testThreeLayerStringViaMap() throws IOException { } private void threeLayerStringTestCase(String executionHint) throws IOException { - MappedFieldType ift = new KeywordFieldType("i", randomBoolean(), true, Collections.emptyMap()); - MappedFieldType jft = new KeywordFieldType("j", randomBoolean(), true, Collections.emptyMap()); - MappedFieldType kft = new KeywordFieldType("k", randomBoolean(), true, Collections.emptyMap()); + MappedField iField = new MappedField("i", new KeywordFieldType(randomBoolean(), true, Collections.emptyMap())); + MappedField jField = new MappedField("j", new KeywordFieldType(randomBoolean(), true, Collections.emptyMap())); + MappedField kField = new MappedField("k", new KeywordFieldType(randomBoolean(), true, Collections.emptyMap())); try (Directory dir = newDirectory()) { try (RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { @@ -1680,9 +1745,9 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { for (int j = 0; j < 10; j++) { for (int k = 0; k < 10; k++) { List d = new ArrayList<>(); - d.addAll(doc(ift, Integer.toString(i))); - d.addAll(doc(jft, Integer.toString(j))); - d.addAll(doc(kft, Integer.toString(k))); + d.addAll(doc(iField, Integer.toString(i))); + d.addAll(doc(jField, Integer.toString(j))); + d.addAll(doc(kField, Integer.toString(k))); writer.addDocument(d); } } @@ -1696,7 +1761,7 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { .executionHint(executionHint) .subAggregation(new TermsAggregationBuilder("k").field("k").executionHint(executionHint)) ); - StringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, ift, jft, kft); + StringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, iField, jField, kField); for (int i = 0; i < 10; i++) { StringTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); assertThat(iBucket.getDocCount(), equalTo(100L)); @@ -1791,11 +1856,11 @@ public void testOrderByPipelineAggregation() throws Exception { .order(BucketOrder.aggregation("script", true)) .subAggregation(bucketScriptAgg); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("field"); + MappedField mappedField = new MappedField("field", new KeywordFieldMapper.KeywordFieldType()); AggregationExecutionException e = expectThrows( AggregationExecutionException.class, - () -> createAggregator(termsAgg, indexSearcher, fieldType) + () -> createAggregator(termsAgg, indexSearcher, mappedField) ); assertEquals( "Invalid aggregation order path [script]. The provided aggregation [script] " @@ -1808,7 +1873,7 @@ public void testOrderByPipelineAggregation() throws Exception { } public void testFormatWithMissing() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name").field("number") .format("$###.00") @@ -1820,11 +1885,11 @@ public void testFormatWithMissing() throws IOException { for (int i = 1; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("number", i + 1))); } - }, (Consumer>) terms -> assertTrue(AggregationInspectionHelper.hasValue(terms)), fieldType); + }, (Consumer>) terms -> assertTrue(AggregationInspectionHelper.hasValue(terms)), mappedField); } public void testFormatCannotParseMissing() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("name").field("number").format("$###.00").missing("42"); @@ -1834,7 +1899,7 @@ public void testFormatCannotParseMissing() throws IOException { for (int i = 1; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("number", i + 1))); } - }, (Consumer>) terms -> fail("Should have thrown"), fieldType)); + }, (Consumer>) terms -> fail("Should have thrown"), mappedField)); assertThat(ex.getMessage(), equalTo("Cannot parse the value [42] using the pattern [$###.00]")); } @@ -1886,10 +1951,13 @@ public void testOrderByCardinality() throws IOException { aggregationBuilder, Integer.MAX_VALUE, false, - new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER), - bIsString - ? new KeywordFieldMapper.KeywordFieldType("b") - : new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER) + new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)), + new MappedField( + "b", + bIsString + ? new KeywordFieldMapper.KeywordFieldType() + : new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ) ); assertThat( terms.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(toList()), @@ -1904,8 +1972,10 @@ public void testOrderByCardinality() throws IOException { } public void testAsSubAgg() throws IOException { - DateFieldType dft = new DateFieldType("d"); - KeywordFieldType kft = new KeywordFieldType("k", false, true, Collections.emptyMap()); + DateFieldType dft = new DateFieldType(); + MappedField df = new MappedField("d", dft); + KeywordFieldType kft = new KeywordFieldType(false, true, Collections.emptyMap()); + MappedField kf = new MappedField("k", kft); AggregationBuilder builder = new DateHistogramAggregationBuilder("dh").field("d") .calendarInterval(DateHistogramInterval.YEAR) .subAggregation(new TermsAggregationBuilder("k").field("k")); @@ -1955,17 +2025,17 @@ public void testAsSubAgg() throws IOException { assertThat(terms.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), equalTo(List.of("a", "b"))); terms = dh.getBuckets().get(1).getAggregations().get("k"); assertThat(terms.getBuckets().stream().map(StringTerms.Bucket::getKey).collect(toList()), equalTo(List.of("a"))); - }, dft, kft); + }, df, kf); withAggregator(builder, new MatchAllDocsQuery(), buildIndex, (searcher, aggregator) -> { TermsAggregator terms = (TermsAggregator) aggregator.subAggregator("k"); Map info = new HashMap<>(); terms.collectDebugInfo(info::put); assertThat(info, hasEntry("collection_strategy", "remap using many bucket ords packed using [2/62] bits")); - }, dft, kft); + }, df, kf); } public void testWithFilterAndPreciseSize() throws IOException { - KeywordFieldType kft = new KeywordFieldType("k", true, true, Collections.emptyMap()); + MappedField kf = new MappedField("k", new KeywordFieldType(true, true, Collections.emptyMap())); CheckedConsumer buildIndex = iw -> { iw.addDocument( List.of( @@ -2031,7 +2101,7 @@ public void testWithFilterAndPreciseSize() throws IOException { ) ); }, - kft + kf ); } @@ -2084,7 +2154,7 @@ public void execute() { .entry("collection_strategy", either(equalTo("remap using single bucket ords")).or(equalTo("dense"))) ) ); - }, new KeywordFieldType("k", true, true, Collections.emptyMap())); + }, new MappedField("k", new KeywordFieldType(true, true, Collections.emptyMap()))); } /** @@ -2106,8 +2176,8 @@ public void execute() { } }; BytesRef[] values = new BytesRef[] { new BytesRef("stuff"), new BytesRef("more_stuff"), new BytesRef("other_stuff"), }; - MappedFieldType keywordFt = new KeywordFieldType("k", true, true, Collections.emptyMap()); - MappedFieldType dummyFt = new KeywordScriptFieldType("dummy", scriptFactory, new Script("test"), Map.of()); + MappedField keywordF = new MappedField("k", new KeywordFieldType(true, true, Collections.emptyMap())); + MappedField dummyF = new MappedField("dummy", new KeywordScriptFieldType(scriptFactory, new Script("test"), Map.of())); debugTestCase(new TermsAggregationBuilder("t").field("dummy"), new MatchAllDocsQuery(), iw -> { for (int d = 0; d < totalDocs; d++) { BytesRef value = values[d % values.length]; @@ -2132,7 +2202,7 @@ public void execute() { .entry("collection_strategy", "from Field [dummy] of type [keyword]") ) ); - }, keywordFt, dummyFt); + }, keywordF, dummyF); } /** @@ -2144,7 +2214,7 @@ public void testOneBucket() throws IOException { long totalDocs = 500; long[] totalCount = new long[] { 0 }; BytesRef value = new BytesRef("stuff"); - MappedFieldType keywordFt = new KeywordFieldType("k", true, true, Collections.emptyMap()); + MappedField keywordF = new MappedField("k", new KeywordFieldType(true, true, Collections.emptyMap())); debugTestCase(new TermsAggregationBuilder("t").field("k"), new MatchAllDocsQuery(), iw -> { for (int d = 0; d < totalDocs; d++) { List doc = new ArrayList<>(); @@ -2175,7 +2245,7 @@ public void testOneBucket() throws IOException { ) ) ); - }, keywordFt); + }, keywordF); } public void testFewBuckets() throws IOException { @@ -2184,7 +2254,7 @@ public void testFewBuckets() throws IOException { long totalDocs = 500; long[] totalCounts = new long[] { 0, 0, 0 }; BytesRef[] values = new BytesRef[] { new BytesRef("a"), new BytesRef("b"), new BytesRef("c") }; - MappedFieldType keywordFt = new KeywordFieldType("k", true, true, Collections.emptyMap()); + MappedField keywordF = new MappedField("k", new KeywordFieldType(true, true, Collections.emptyMap())); debugTestCase(new TermsAggregationBuilder("t").field("k").order(BucketOrder.key(true)), new MatchAllDocsQuery(), iw -> { for (int d = 0; d < totalDocs; d++) { BytesRef value = values[d % values.length]; @@ -2228,7 +2298,7 @@ public void testFewBuckets() throws IOException { ) ); } - }, keywordFt); + }, keywordF); } private final SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); @@ -2256,7 +2326,7 @@ private List> generateDocsWithNested(String id, int val private List> generateAnimalDocsWithNested( String id, - KeywordFieldType animalFieldType, + MappedField animalField, String animal, String[] tags, int[] nestedValues @@ -2275,7 +2345,7 @@ private List> generateAnimalDocsWithNested( LuceneDocument document = new LuceneDocument(); document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(id), ProvidedIdFieldMapper.Defaults.FIELD_TYPE)); - document.addAll(doc(animalFieldType, animal)); + document.addAll(doc(animalField, animal)); document.add(new Field(NestedPathFieldMapper.NAME, "docs", NestedPathFieldMapper.Defaults.FIELD_TYPE)); sequenceIDFields.addFields(document); documents.add(document); @@ -2321,9 +2391,9 @@ private IndexReader createIndexWithDoubles() throws IOException { return DirectoryReader.open(directory); } - private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedFieldType fieldType, IndexSearcher searcher) + private InternalAggregation buildInternalAggregation(TermsAggregationBuilder builder, MappedField mappedField, IndexSearcher searcher) throws IOException { - TermsAggregator aggregator = createAggregator(builder, searcher, fieldType); + TermsAggregator aggregator = createAggregator(builder, searcher, mappedField); aggregator.preCollection(); searcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java index 81555d8a8ebdc..5507dd6377f34 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java @@ -25,8 +25,9 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; @@ -243,7 +244,7 @@ public void testUnmappedWithMissingField() throws IOException { private void verifyAvgOfDoubles(double[] values, double expected, double delta) throws IOException { AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("number"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.DOUBLE); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { List> docs = new ArrayList<>(); for (double value : values) { @@ -258,7 +259,7 @@ private void verifyAvgOfDoubles(double[] values, double expected, double delta) * test. */ iw.addDocuments(docs); - }, avg -> assertEquals(expected, avg.getValue(), delta), fieldType); + }, avg -> assertEquals(expected, avg.getValue(), delta), mappedField); } public void testSingleValuedFieldPartiallyUnmapped() throws IOException { @@ -278,10 +279,10 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("number"); - AvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + AvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -310,7 +311,7 @@ public void testSingleValuedField() throws IOException { } public void testSingleValuedField_WithFormatter() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").format("#") .field("value") @@ -325,11 +326,11 @@ public void testSingleValuedField_WithFormatter() throws IOException { assertEquals((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); assertEquals("6", avg.getValueAsString()); - }, fieldType); + }, mappedField); } public void testSingleValuedFieldWithValueScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); @@ -342,11 +343,11 @@ public void testSingleValuedFieldWithValueScript() throws IOException { }, avg -> { assertEquals((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testScriptSingleValued() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_FIELD_SCRIPT, Collections.emptyMap()) @@ -360,11 +361,11 @@ public void testScriptSingleValued() throws IOException { }, avg -> { assertEquals((double) (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testScriptSingleValuedWithParams() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); Map params = new HashMap<>(); params.put("inc", 1); @@ -382,7 +383,7 @@ public void testScriptSingleValuedWithParams() throws IOException { }, avg -> { assertEquals((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testMultiValuedField() throws IOException { @@ -401,7 +402,7 @@ public void testMultiValuedField() throws IOException { } public void testScriptMultiValued() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap()) @@ -422,7 +423,7 @@ public void testScriptMultiValued() throws IOException { 0 ); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testScriptMultiValuedWithParams() throws Exception { @@ -430,7 +431,7 @@ public void testScriptMultiValuedWithParams() throws Exception { params.put("inc", 1); params.put("field", "values"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params) @@ -451,11 +452,11 @@ public void testScriptMultiValuedWithParams() throws Exception { 0 ); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testSingleValuedFieldWithValueScriptWithParams() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); Map params = Collections.singletonMap("inc", 1); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("value") @@ -469,11 +470,11 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws IOException }, avg -> { assertEquals((double) (2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11) / 10, avg.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); Map params = Collections.singletonMap("inc", 1); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("values") @@ -494,11 +495,11 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { 0 ); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testMultiValuedFieldWithValueScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); @@ -518,11 +519,11 @@ public void testMultiValuedFieldWithValueScript() throws IOException { 0 ); assertTrue(AggregationInspectionHelper.hasValue(avg)); - }, fieldType); + }, mappedField); } public void testOrderByEmptyAggregation() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) .field("value") @@ -542,7 +543,7 @@ public void testOrderByEmptyAggregation() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -574,9 +575,9 @@ public void testOrderByEmptyAggregation() throws IOException { private void testAggregation(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("_name").field("number"); - testAggregation(aggregationBuilder, query, buildIndex, verify, fieldType); + testAggregation(aggregationBuilder, query, buildIndex, verify, mappedField); } private void testAggregation( @@ -584,9 +585,9 @@ private void testAggregation( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(aggregationBuilder, query, buildIndex, verify, fieldTypes); + testCase(aggregationBuilder, query, buildIndex, verify, mappedFields); } /** @@ -610,10 +611,10 @@ public void testCacheAggregation() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg").field("value"); - AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); + AggregationContext context = createAggregationContext(indexSearcher, null, mappedField); AvgAggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -655,11 +656,11 @@ public void testScriptCaching() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); AvgAggregationBuilder aggregationBuilder = new AvgAggregationBuilder("avg").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); - AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); + AggregationContext context = createAggregationContext(indexSearcher, null, mappedField); AvgAggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -677,7 +678,7 @@ public void testScriptCaching() throws IOException { aggregationBuilder = new AvgAggregationBuilder("avg").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, RANDOM_SCRIPT, Collections.emptyMap())); - context = createAggregationContext(indexSearcher, null, fieldType); + context = createAggregationContext(indexSearcher, null, mappedField); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -704,7 +705,7 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new AvgAggregationBuilder("foo").field(fieldName); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java index 64baf8d4e4b18..707ac07883f92 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java @@ -29,8 +29,9 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.script.MockScriptEngine; @@ -128,7 +129,7 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new CardinalityAggregationBuilder("cardinality").field(fieldName); } @@ -146,7 +147,7 @@ public void testRangeFieldValues() throws IOException { final RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true); final RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(rangeType, 6.0D, 10.0D, true, true); final String fieldName = "rangeField"; - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); + MappedField mappedField = new MappedField(fieldName, new RangeFieldMapper.RangeFieldType(rangeType)); final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("_name").field(fieldName); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range1))))); @@ -156,7 +157,7 @@ public void testRangeFieldValues() throws IOException { }, card -> { assertEquals(3.0, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, fieldType); + }, mappedField); } public void testNoMatchingField() throws IOException { @@ -211,7 +212,7 @@ public void testQueryFiltersAll() throws IOException { public void testSingleValuedString() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("str_value"); - final MappedFieldType mappedFieldTypes = new KeywordFieldMapper.KeywordFieldType("str_value"); + MappedField mappedField = new MappedField("str_value", new KeywordFieldMapper.KeywordFieldType()); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedDocValuesField("str_value", new BytesRef("one")))); @@ -221,13 +222,13 @@ public void testSingleValuedString() throws IOException { }, card -> { assertEquals(2, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testSingleValuedStringValueScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("str_value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_value", emptyMap())); - final MappedFieldType mappedFieldTypes = new KeywordFieldMapper.KeywordFieldType("str_value"); + MappedField mappedField = new MappedField("str_value", new KeywordFieldMapper.KeywordFieldType()); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedDocValuesField("str_value", new BytesRef("one")))); @@ -237,14 +238,14 @@ public void testSingleValuedStringValueScript() throws IOException { }, card -> { assertEquals(2, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testSingleValuedStringScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, "doc['str_value'].value", emptyMap()) ); - final MappedFieldType mappedFieldTypes = new KeywordFieldMapper.KeywordFieldType("str_value"); + MappedField mappedField = new MappedField("str_value", new KeywordFieldMapper.KeywordFieldType()); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedDocValuesField("str_value", new BytesRef("one")))); @@ -254,14 +255,14 @@ public void testSingleValuedStringScript() throws IOException { }, card -> { assertEquals(2, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testMultiValuedStringScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, "doc['str_values']", emptyMap()) ); - final MappedFieldType mappedFieldTypes = new KeywordFieldMapper.KeywordFieldType("str_values"); + MappedField mappedField = new MappedField("str_values", new KeywordFieldMapper.KeywordFieldType()); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument( @@ -297,13 +298,13 @@ public void testMultiValuedStringScript() throws IOException { }, card -> { assertEquals(3, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testMultiValuedStringValueScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("str_values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_value", emptyMap())); - final MappedFieldType mappedFieldTypes = new KeywordFieldMapper.KeywordFieldType("str_values"); + MappedField mappedField = new MappedField("str_values", new KeywordFieldMapper.KeywordFieldType()); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument( @@ -339,12 +340,12 @@ public void testMultiValuedStringValueScript() throws IOException { }, card -> { assertEquals(3, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testMultiValuedString() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("str_values"); - final MappedFieldType mappedFieldTypes = new KeywordFieldMapper.KeywordFieldType("str_values"); + MappedField mappedField = new MappedField("str_values", new KeywordFieldMapper.KeywordFieldType()); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument( @@ -380,7 +381,7 @@ public void testMultiValuedString() throws IOException { }, card -> { assertEquals(3, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testUnmappedMissingString() throws IOException { @@ -427,10 +428,10 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { final MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); final IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - final MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); final AggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("cardinality").field("number"); - final CardinalityAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + final CardinalityAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -449,7 +450,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { public void testSingleValuedNumericValueScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_value", emptyMap())); - final MappedFieldType mappedFieldTypes = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("number", 10))); @@ -459,14 +460,14 @@ public void testSingleValuedNumericValueScript() throws IOException { }, card -> { assertEquals(2, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testSingleValuedNumericScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, "doc['number'].value", emptyMap()) ); - final MappedFieldType mappedFieldTypes = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("number", 10))); @@ -476,13 +477,13 @@ public void testSingleValuedNumericScript() throws IOException { }, card -> { assertEquals(2, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testMultiValuedNumericValueScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("numbers") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "_value", emptyMap())); - final MappedFieldType mappedFieldTypes = new NumberFieldMapper.NumberFieldType("numbers", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("numbers", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(List.of(new SortedNumericDocValuesField("numbers", 10), new SortedNumericDocValuesField("numbers", 12))); @@ -495,14 +496,14 @@ public void testMultiValuedNumericValueScript() throws IOException { }, card -> { assertEquals(4, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testMultiValuedNumericScript() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, "doc['numbers']", emptyMap()) ); - final MappedFieldType mappedFieldTypes = new NumberFieldMapper.NumberFieldType("numbers", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("numbers", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(List.of(new SortedNumericDocValuesField("numbers", 10), new SortedNumericDocValuesField("numbers", 12))); @@ -515,12 +516,12 @@ public void testMultiValuedNumericScript() throws IOException { }, card -> { assertEquals(4, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testMultiValuedNumeric() throws IOException { final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("name").field("number"); - final MappedFieldType mappedFieldTypes = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(List.of(new SortedNumericDocValuesField("number", 7), new SortedNumericDocValuesField("number", 8))); @@ -530,11 +531,11 @@ public void testMultiValuedNumeric() throws IOException { }, card -> { assertEquals(3, card.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(card)); - }, mappedFieldTypes); + }, mappedField); } public void testSingleValuedFieldGlobalAggregation() throws IOException { - final MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final AggregationBuilder aggregationBuilder = AggregationBuilders.global("global") .subAggregation(AggregationBuilders.cardinality("cardinality").field("number")); @@ -560,7 +561,7 @@ public void testSingleValuedFieldGlobalAggregation() throws IOException { assertEquals(cardinality, ((InternalAggregation) global).getProperty("cardinality")); assertEquals(numDocs, (double) ((InternalAggregation) global).getProperty("cardinality.value"), 0); assertEquals(numDocs, (double) ((InternalAggregation) cardinality).getProperty("value"), 0); - }, fieldType); + }, mappedField); } public void testUnmappedMissingGeoPoint() throws IOException { @@ -578,9 +579,9 @@ public void testUnmappedMissingGeoPoint() throws IOException { } public void testAsSubAggregation() throws IOException { - final MappedFieldType mappedFieldTypes[] = { - new KeywordFieldMapper.KeywordFieldType("str_value"), - new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG) }; + final MappedField mappedFields[] = { + new MappedField("str_value", new KeywordFieldMapper.KeywordFieldType()), + new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.LONG)) }; final AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").field("str_value") .missing("unknown") @@ -615,7 +616,7 @@ public void testAsSubAggregation() throws IOException { assertEquals("cardinality", cardinality.getName()); assertEquals(5, cardinality.getValue()); } - }, mappedFieldTypes); + }, mappedFields); } public void testCacheAggregation() throws IOException { @@ -636,10 +637,10 @@ public void testCacheAggregation() throws IOException { final MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); final IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - final MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + final MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.INTEGER)); final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("cardinality").field("number"); - final AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); + final AggregationContext context = createAggregationContext(indexSearcher, null, mappedField); final CardinalityAggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -664,9 +665,9 @@ private void testAggregation( CheckedConsumer buildIndex, Consumer verify ) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder("_name").field("number"); - testAggregation(aggregationBuilder, query, buildIndex, verify, fieldType); + testAggregation(aggregationBuilder, query, buildIndex, verify, mappedField); } private void testAggregation( @@ -674,12 +675,12 @@ private void testAggregation( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(aggregationBuilder, query, buildIndex, verify, fieldTypes); + testCase(aggregationBuilder, query, buildIndex, verify, mappedFields); for (CardinalityAggregatorFactory.ExecutionMode mode : CardinalityAggregatorFactory.ExecutionMode.values()) { aggregationBuilder.executionHint(mode.toString().toLowerCase(Locale.ROOT)); - testCase(aggregationBuilder, query, buildIndex, verify, fieldTypes); + testCase(aggregationBuilder, query, buildIndex, verify, mappedFields); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index 861d79f27982b..955f516857169 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -15,8 +15,9 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; @@ -31,8 +32,8 @@ public class ExtendedStatsAggregatorTests extends AggregatorTestCase { // TODO: Add script test cases. Should fail with defaultValuesSourceType() commented out. public void testEmpty() throws IOException { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); - testCase(ft, iw -> {}, stats -> { + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + testCase(mappedField, iw -> {}, stats -> { assertEquals(0d, stats.getCount(), 0); assertEquals(0d, stats.getSum(), 0); assertEquals(Float.NaN, stats.getAvg(), 0); @@ -50,9 +51,9 @@ public void testEmpty() throws IOException { } public void testRandomDoubles() throws IOException { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); - testCase(ft, iw -> { + testCase(mappedField, iw -> { int numDocs = randomIntBetween(10, 50); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); @@ -116,9 +117,9 @@ public void testRandomDoubles() throws IOException { * Testcase for https://github.com/elastic/elasticsearch/issues/37303 */ public void testVarianceNonNegative() throws IOException { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); - testCase(ft, iw -> { + testCase(mappedField, iw -> { int numDocs = 3; for (int i = 0; i < numDocs; i++) { Document doc = new Document(); @@ -140,9 +141,9 @@ public void testVarianceNonNegative() throws IOException { } public void testRandomLongs() throws IOException { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); - testCase(ft, iw -> { + testCase(mappedField, iw -> { int numDocs = randomIntBetween(10, 50); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); @@ -235,7 +236,7 @@ public void testSummationAccuracy() throws IOException { private void verifyStatsOfDoubles(double[] values, double expectedSum, double expectedSumOfSqrs, double delta) throws IOException { final String fieldName = "field"; - MappedFieldType ft = new NumberFieldMapper.NumberFieldType(fieldName, NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); double max = Double.NEGATIVE_INFINITY; double min = Double.POSITIVE_INFINITY; for (double value : values) { @@ -244,7 +245,7 @@ private void verifyStatsOfDoubles(double[] values, double expectedSum, double ex } double expectedMax = max; double expectedMin = min; - testCase(ft, iw -> { + testCase(mappedField, iw -> { for (double value : values) { iw.addDocument(singleton(new NumericDocValuesField(fieldName, NumericUtils.doubleToSortableLong(value)))); } @@ -259,14 +260,14 @@ private void verifyStatsOfDoubles(double[] values, double expectedSum, double ex } public void testCase( - MappedFieldType ft, + MappedField mappedField, CheckedConsumer buildIndex, Consumer verify ) throws IOException { ExtendedStatsAggregationBuilder aggBuilder = new ExtendedStatsAggregationBuilder("my_agg").field("field") .sigma(randomDoubleBetween(0, 10, true)); - testCase(aggBuilder, new MatchAllDocsQuery(), buildIndex, verify, ft); + testCase(aggBuilder, new MatchAllDocsQuery(), buildIndex, verify, mappedField); } static class ExtendedSimpleStatsAggregator extends StatsAggregatorTests.SimpleStatsAggregator { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java index e8fd9aeefdc89..df1dcfa7d534c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; @@ -42,10 +42,10 @@ public void testEmpty() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + final MappedField mappedField = new MappedField("field", new GeoPointFieldMapper.GeoPointFieldType()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -67,10 +67,10 @@ public void testUnmappedFieldWithDocs() throws Exception { GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("non_existent").wrapLongitude(false); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + final MappedField mappedField = new MappedField("field", new GeoPointFieldMapper.GeoPointFieldType()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -88,7 +88,7 @@ public void testMissing() throws Exception { doc.add(new NumericDocValuesField("not_field", 1000L)); w.addDocument(doc); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + final MappedField mappedField = new MappedField("field", new GeoPointFieldMapper.GeoPointFieldType()); Point point = GeometryTestUtils.randomPoint(false); double lon = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(point.getX())); @@ -102,7 +102,7 @@ public void testMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertThat(bounds.top, equalTo(lat)); assertThat(bounds.bottom, equalTo(lat)); assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY)); @@ -120,7 +120,7 @@ public void testInvalidMissing() throws Exception { doc.add(new NumericDocValuesField("not_field", 1000L)); w.addDocument(doc); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + final MappedField mappedField = new MappedField("field", new GeoPointFieldMapper.GeoPointFieldType()); GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field") .missing("invalid") @@ -129,7 +129,7 @@ public void testInvalidMissing() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); ElasticsearchParseException exception = expectThrows( ElasticsearchParseException.class, - () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType) + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField) ); assertThat(exception.getMessage(), startsWith("unsupported symbol")); } @@ -174,10 +174,10 @@ public void testRandom() throws Exception { } GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + final MappedField mappedField = new MappedField("field", new GeoPointFieldMapper.GeoPointFieldType()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE)); assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE)); assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE)); @@ -190,7 +190,7 @@ public void testRandom() throws Exception { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new GeoBoundsAggregationBuilder("foo").field(fieldName); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java index 3aeca19bb5f85..34e1fb4b0b05a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.mapper.GeoPointFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -36,10 +37,15 @@ public void testEmpty() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType(); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + new MappedField("field", fieldType) + ); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -56,12 +62,12 @@ public void testUnmapped() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + MappedField mappedField = new MappedField("another_field", new GeoPointFieldMapper.GeoPointFieldType()); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertNull(result.centroid()); - fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + mappedField = new MappedField("another_field", new GeoPointFieldMapper.GeoPointFieldType()); + result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -83,8 +89,8 @@ public void testUnmappedWithMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + MappedField mappedField = new MappedField("another_field", new GeoPointFieldMapper.GeoPointFieldType()); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertEquals(result.centroid(), expectedCentroid); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -145,11 +151,11 @@ public void testMultiValuedField() throws Exception { } private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) throws IOException { - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); + MappedField mappedField = new MappedField("field", new GeoPointFieldMapper.GeoPointFieldType()); GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertEquals("my_agg", result.getName()); GeoPoint centroid = result.centroid(); @@ -161,7 +167,7 @@ private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) thro } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new GeoCentroidAggregationBuilder("foo").field(fieldName); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index c41ee73378b7c..86e339938bcea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -17,7 +17,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -33,7 +33,7 @@ public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase { @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentileRanksAggregationBuilder("hdr_ranks", new double[] { 0.1, 0.5, 12 }).field(fieldName) .percentilesConfig(new PercentilesConfig.Hdr()); } @@ -46,10 +46,13 @@ protected List getSupportedValuesSourceTypes() { public void testEmpty() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.5 }).field("field") .method(PercentilesMethod.HDR); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + final MappedField mappedField = new MappedField( + "field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) + ); try (IndexReader reader = new MultiReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); @@ -68,10 +71,13 @@ public void testSimple() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) .field("field") .method(PercentilesMethod.HDR); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + final MappedField mappedField = new MappedField( + "field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) + ); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java index c8cae2f879149..c42828a3dd22d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java @@ -24,7 +24,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; @@ -47,7 +47,7 @@ public class HDRPercentilesAggregatorTests extends AggregatorTestCase { @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentilesAggregationBuilder("hdr_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.Hdr()); } @@ -70,12 +70,12 @@ public void testNoDocs() throws IOException { */ public void testStringField() throws IOException { final String fieldName = "string"; - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(fieldName); + MappedField mappedField = new MappedField(fieldName, new KeywordFieldMapper.KeywordFieldType()); expectThrows(IllegalArgumentException.class, () -> testCase(new FieldExistsQuery(fieldName), iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("bogus")))); iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("zwomp")))); iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foobar")))); - }, hdr -> {}, fieldType, fieldName)); + }, hdr -> {}, mappedField, fieldName)); } /** @@ -85,7 +85,7 @@ public void testStringField() throws IOException { public void testRangeField() throws IOException { // Currently fails (throws ClassCast exception), but should be fixed once HDRPercentileAggregation uses the ValuesSource registry final String fieldName = "range"; - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, RangeType.DOUBLE); + MappedField mappedField = new MappedField(fieldName, new RangeFieldMapper.RangeFieldType(RangeType.DOUBLE)); RangeFieldMapper.Range range = new RangeFieldMapper.Range(RangeType.DOUBLE, 1.0D, 5.0D, true, true); BytesRef encodedRange = RangeType.DOUBLE.encodeRanges(Collections.singleton(range)); expectThrows( @@ -94,7 +94,7 @@ public void testRangeField() throws IOException { new FieldExistsQuery(fieldName), iw -> { iw.addDocument(singleton(new BinaryDocValuesField(fieldName, encodedRange))); }, hdr -> {}, - fieldType, + mappedField, fieldName ) ); @@ -177,15 +177,15 @@ public void testHdrThenTdigestSettings() throws Exception { private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - testCase(query, buildIndex, verify, fieldType, "number"); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + testCase(query, buildIndex, verify, mappedField, "number"); } private void testCase( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType fieldType, + MappedField mappedField, String fieldName ) throws IOException { try (Directory directory = newDirectory()) { @@ -205,7 +205,7 @@ private void testCase( builder = new PercentilesAggregationBuilder("test").field(fieldName).percentilesConfig(hdr); } - HDRPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, fieldType); + HDRPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java index c01363c81c195..3e19d9a336674 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; @@ -157,7 +157,7 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new MaxAggregationBuilder("_name").field(fieldName); } @@ -244,7 +244,7 @@ public void testUnmappedWithMissingField() throws IOException { } public void testMissingFieldOptimization() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); AggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number").missing(19L); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { @@ -254,11 +254,11 @@ public void testMissingFieldOptimization() throws IOException { }, max -> { assertEquals(max.value(), 19.0, 0); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); AggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap())); @@ -269,14 +269,14 @@ public void testScript() throws IOException { }, max -> { assertEquals(max.value(), SCRIPT_VALUE, 0); // Note this is the script value (19L), not the doc values above assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } private void testAggregation(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number"); - testAggregation(aggregationBuilder, query, buildIndex, verify, fieldType); + testAggregation(aggregationBuilder, query, buildIndex, verify, mappedField); } private void testAggregation( @@ -284,9 +284,9 @@ private void testAggregation( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(aggregationBuilder, query, buildIndex, verify, fieldTypes); + testCase(aggregationBuilder, query, buildIndex, verify, mappedFields); } public void testMaxShortcutRandom() throws Exception { @@ -406,7 +406,7 @@ public void testSingleValuedField() throws IOException { } public void testSingleValuedFieldWithFormatter() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").format("0000.0").field("value"); @@ -419,11 +419,11 @@ public void testSingleValuedFieldWithFormatter() throws IOException { assertEquals(10.0, max.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); assertEquals("0010.0", max.getValueAsString()); - }, fieldType); + }, mappedField); } public void testSingleValuedFieldGetProperty() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); AggregationBuilder aggregationBuilder = AggregationBuilders.global("global") .subAggregation(AggregationBuilders.max("max").field("value")); @@ -439,7 +439,7 @@ public void testSingleValuedFieldGetProperty() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -481,10 +481,10 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); AggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value"); - MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -501,7 +501,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { } public void testSingleValuedFieldWithValueScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); @@ -515,11 +515,11 @@ public void testSingleValuedFieldWithValueScript() throws IOException { assertTrue(AggregationInspectionHelper.hasValue(max)); assertEquals(10.0, max.value(), 0); assertEquals("max", max.getName()); - }, fieldType); + }, mappedField); } public void testSingleValuedFieldWithValueScriptWithParams() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); Map params = Collections.singletonMap("inc", 1); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value") @@ -534,7 +534,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws IOException assertEquals(11.0, max.value(), 0); assertEquals("max", max.getName()); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testMultiValuedField() throws IOException { @@ -553,7 +553,7 @@ public void testMultiValuedField() throws IOException { } public void testMultiValuedFieldWithValueScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); @@ -569,11 +569,11 @@ public void testMultiValuedFieldWithValueScript() throws IOException { }, max -> { assertEquals(12.0, max.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); Map params = Collections.singletonMap("inc", 1); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("values") @@ -590,11 +590,11 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { }, max -> { assertEquals(13.0, max.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testScriptSingleValued() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_FIELD_SCRIPT, Collections.emptyMap()) @@ -608,11 +608,11 @@ public void testScriptSingleValued() throws IOException { }, max -> { assertEquals(10.0, max.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testScriptSingleValuedWithParams() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); Map params = new HashMap<>(); params.put("inc", 1); @@ -630,11 +630,11 @@ public void testScriptSingleValuedWithParams() throws IOException { }, max -> { assertEquals(11.0, max.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testScriptMultiValued() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap()) @@ -651,7 +651,7 @@ public void testScriptMultiValued() throws IOException { }, max -> { assertEquals(12.0, max.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testScriptMultiValuedWithParams() throws IOException { @@ -659,7 +659,7 @@ public void testScriptMultiValuedWithParams() throws IOException { params.put("inc", 1); params.put("field", "values"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params) @@ -676,11 +676,11 @@ public void testScriptMultiValuedWithParams() throws IOException { }, max -> { assertEquals(13.0, max.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); - }, fieldType); + }, mappedField); } public void testEmptyAggregation() throws Exception { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); AggregationBuilder aggregationBuilder = AggregationBuilders.global("global") .subAggregation(AggregationBuilders.max("max").field("value")); @@ -693,7 +693,7 @@ public void testEmptyAggregation() throws Exception { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -715,7 +715,7 @@ public void testEmptyAggregation() throws Exception { } public void testOrderByEmptyAggregation() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) .field("value") @@ -735,7 +735,7 @@ public void testOrderByEmptyAggregation() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -766,7 +766,7 @@ public void testOrderByEmptyAggregation() throws IOException { } public void testEarlyTermination() throws Exception { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("values", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -785,8 +785,8 @@ public void testEarlyTermination() throws Exception { MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max").field("values"); ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count").field("values"); - MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, fieldType); - ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, indexSearcher, fieldType); + MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, mappedField); + ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, indexSearcher, mappedField); BucketCollector bucketCollector = MultiBucketCollector.wrap(true, List.of(maxAggregator, countAggregator)); bucketCollector.preCollection(); @@ -808,8 +808,14 @@ public void testEarlyTermination() throws Exception { } public void testNestedEarlyTermination() throws Exception { - MappedFieldType multiValuesfieldType = new NumberFieldMapper.NumberFieldType("values", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType singleValueFieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField multiValuesfield = new MappedField( + "values", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); + MappedField singleValueField = new MappedField( + "value", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -834,9 +840,9 @@ public void testNestedEarlyTermination() throws Exception { .collectMode(collectionMode) .subAggregation(new MaxAggregationBuilder("sub_max").field("invalid")); - MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, multiValuesfieldType); - ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, indexSearcher, multiValuesfieldType); - TermsAggregator termsAggregator = createAggregator(termsAggregationBuilder, indexSearcher, singleValueFieldType); + MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, multiValuesfield); + ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, indexSearcher, multiValuesfield); + TermsAggregator termsAggregator = createAggregator(termsAggregationBuilder, indexSearcher, singleValueField); BucketCollector bucketCollector = MultiBucketCollector.wrap(true, List.of(maxAggregator, countAggregator, termsAggregator)); bucketCollector.preCollection(); @@ -890,10 +896,10 @@ public void testCacheAggregation() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value"); - AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); + AggregationContext context = createAggregationContext(indexSearcher, null, mappedField); MaxAggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -935,11 +941,11 @@ public void testScriptCaching() throws Exception { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); - AggregationContext context = createAggregationContext(indexSearcher, null, fieldType); + AggregationContext context = createAggregationContext(indexSearcher, null, mappedField); MaxAggregator aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); @@ -956,7 +962,7 @@ public void testScriptCaching() throws Exception { aggregationBuilder = new MaxAggregationBuilder("max").field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, RANDOM_SCRIPT, Collections.emptyMap())); - context = createAggregationContext(indexSearcher, null, fieldType); + context = createAggregationContext(indexSearcher, null, mappedField); aggregator = createAggregator(aggregationBuilder, context); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java index 07a98c2a5744d..bd852092214b9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java @@ -18,7 +18,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; @@ -172,7 +172,7 @@ public void testUnmappedMissing() throws IOException { } public void testValueScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField(FIELD_NAME, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); MedianAbsoluteDeviationAggregationBuilder aggregationBuilder = new MedianAbsoluteDeviationAggregationBuilder("foo").field( FIELD_NAME @@ -186,7 +186,7 @@ public void testValueScript() throws IOException { }), agg -> { assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); assertTrue(AggregationInspectionHelper.hasValue(agg)); - }, fieldType); + }, mappedField); } public void testSingleScript() throws IOException { @@ -194,7 +194,7 @@ public void testSingleScript() throws IOException { new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap()) ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField(FIELD_NAME, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); final int size = randomIntBetween(100, 1000); final List sample = new ArrayList<>(size); @@ -205,7 +205,7 @@ public void testSingleScript() throws IOException { }, agg -> { assertEquals(0, agg.getMedianAbsoluteDeviation(), 0); assertTrue(AggregationInspectionHelper.hasValue(agg)); - }, fieldType); + }, mappedField); } private void testAggregation( @@ -216,9 +216,9 @@ private void testAggregation( MedianAbsoluteDeviationAggregationBuilder builder = new MedianAbsoluteDeviationAggregationBuilder("mad").field(FIELD_NAME) .compression(randomDoubleBetween(20, 1000, true)); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField(FIELD_NAME, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); - testAggregation(builder, query, buildIndex, verify, fieldType); + testAggregation(builder, query, buildIndex, verify, mappedField); } private void testAggregation( @@ -226,9 +226,9 @@ private void testAggregation( Query query, CheckedConsumer indexer, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(aggregationBuilder, query, indexer, verify, fieldTypes); + testCase(aggregationBuilder, query, indexer, verify, mappedFields); } public static class IsCloseToRelative extends TypeSafeMatcher { @@ -311,7 +311,7 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new MedianAbsoluteDeviationAggregationBuilder("foo").field(fieldName); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index ef1855891ccaa..79eced225b182 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -39,7 +39,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; @@ -230,18 +230,18 @@ public void testIpField() throws IOException { final String fieldName = "IP_field"; MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field(fieldName); - MappedFieldType fieldType = new IpFieldMapper.IpFieldType(fieldName); + MappedField mappedField = new MappedField(fieldName, new IpFieldMapper.IpFieldType()); boolean v4 = randomBoolean(); expectThrows(IllegalArgumentException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(randomIp(v4)))))); iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(randomIp(v4)))))); - }, min -> fail("expected an exception"), fieldType)); + }, min -> fail("expected an exception"), mappedField)); } public void testUnmappedWithMissingField() throws IOException { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("does_not_exist").missing(0L); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -249,13 +249,13 @@ public void testUnmappedWithMissingField() throws IOException { }, (Consumer) min -> { assertEquals(0.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testUnsupportedType() { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("not_a_number"); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); + MappedField mappedField = new MappedField("not_a_number", new KeywordFieldMapper.KeywordFieldType()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -264,7 +264,7 @@ public void testUnsupportedType() { new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, (Consumer) min -> { fail("Should have thrown exception"); }, - fieldType + mappedField ) ); assertEquals("Field [not_a_number] of type [keyword] is not supported for aggregation [min]", e.getMessage()); @@ -273,23 +273,23 @@ public void testUnsupportedType() { public void testBadMissingField() { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number").missing("not_a_number"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 1))); - }, (Consumer) min -> { fail("Should have thrown exception"); }, fieldType)); + }, (Consumer) min -> { fail("Should have thrown exception"); }, mappedField)); } public void testUnmappedWithBadMissingField() { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("does_not_exist").missing("not_a_number"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 1))); - }, (Consumer) min -> { fail("Should have thrown exception"); }, fieldType)); + }, (Consumer) min -> { fail("Should have thrown exception"); }, mappedField)); } public void testEmptyBucket() throws IOException { @@ -298,7 +298,7 @@ public void testEmptyBucket() throws IOException { .minDocCount(0) .subAggregation(new MinAggregationBuilder("min").field("number")); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(histogram, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 1))); @@ -321,13 +321,13 @@ public void testEmptyBucket() throws IOException { assertEquals(3.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testFormatter() throws IOException { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number").format("0000.0"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -336,7 +336,7 @@ public void testFormatter() throws IOException { assertEquals(1.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); assertEquals("0001.0", min.getValueAsString()); - }, fieldType); + }, mappedField); } public void testGetProperty() throws IOException { @@ -344,7 +344,7 @@ public void testGetProperty() throws IOException { new MinAggregationBuilder("min").field("number") ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(globalBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -359,12 +359,12 @@ public void testGetProperty() throws IOException { assertThat(global.getProperty("min"), equalTo(min)); assertThat(global.getProperty("min.value"), equalTo(1.0)); assertThat(min.getProperty("value"), equalTo(1.0)); - }, fieldType); + }, mappedField); } public void testSingleValuedFieldPartiallyUnmapped() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number"); try (Directory directory = newDirectory(); Directory unmappedDirectory = newDirectory()) { @@ -385,7 +385,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - Min min = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, fieldType); + Min min = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, mappedField); assertEquals(2.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); } @@ -394,7 +394,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws IOException { public void testSingleValuedFieldPartiallyUnmappedWithMissing() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number").missing(-19L); try (Directory directory = newDirectory(); Directory unmappedDirectory = newDirectory()) { @@ -416,7 +416,7 @@ public void testSingleValuedFieldPartiallyUnmappedWithMissing() throws IOExcepti MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); - Min min = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, fieldType); + Min min = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, mappedField); assertEquals(-19.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); } @@ -424,7 +424,7 @@ public void testSingleValuedFieldPartiallyUnmappedWithMissing() throws IOExcepti } public void testSingleValuedFieldWithValueScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERT_SCRIPT, Collections.emptyMap())); @@ -437,11 +437,11 @@ public void testSingleValuedFieldWithValueScript() throws IOException { }, (Consumer) min -> { assertEquals(-10.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testSingleValuedFieldWithValueScriptAndMissing() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .missing(-100L) @@ -456,11 +456,11 @@ public void testSingleValuedFieldWithValueScriptAndMissing() throws IOException }, (Consumer) min -> { assertEquals(-100.0, min.value(), 0); // Note: this comes straight from missing, and is not inverted from script assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testSingleValuedFieldWithValueScriptAndParams() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.singletonMap("inc", 5))); @@ -473,11 +473,11 @@ public void testSingleValuedFieldWithValueScriptAndParams() throws IOException { }, (Consumer) min -> { assertEquals(6.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").script( new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap()) @@ -491,13 +491,13 @@ public void testScript() throws IOException { }, (Consumer) min -> { assertEquals(19.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testMultiValuedField() throws IOException { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -510,14 +510,14 @@ public void testMultiValuedField() throws IOException { }, (Consumer) min -> { assertEquals(2.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testMultiValuedFieldWithScript() throws IOException { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERT_SCRIPT, Collections.emptyMap())); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -530,14 +530,14 @@ public void testMultiValuedFieldWithScript() throws IOException { }, (Consumer) min -> { assertEquals(-12.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testMultiValuedFieldWithScriptParams() throws IOException { MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.singletonMap("inc", 5))); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; @@ -550,7 +550,7 @@ public void testMultiValuedFieldWithScriptParams() throws IOException { }, (Consumer) min -> { assertEquals(7.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - }, fieldType); + }, mappedField); } public void testOrderByEmptyAggregation() throws IOException { @@ -562,7 +562,7 @@ public void testOrderByEmptyAggregation() throws IOException { ) ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); int numDocs = 10; testCase(termsBuilder, new MatchAllDocsQuery(), iw -> { @@ -586,12 +586,12 @@ public void testOrderByEmptyAggregation() throws IOException { assertEquals(Double.POSITIVE_INFINITY, min.value(), 0); assertFalse(AggregationInspectionHelper.hasValue(min)); } - }, fieldType); + }, mappedField); } public void testCaching() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number"); try (Directory directory = newDirectory()) { @@ -604,7 +604,7 @@ public void testCaching() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType); + AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), mappedField); createAggregator(aggregationBuilder, context); assertTrue(context.isCacheable()); } @@ -613,7 +613,7 @@ public void testCaching() throws IOException { public void testScriptCaching() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, INVERT_SCRIPT, Collections.emptyMap())); @@ -630,11 +630,11 @@ public void testScriptCaching() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType); + AggregationContext context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), mappedField); createAggregator(nonDeterministicAggregationBuilder, context); assertFalse(context.isCacheable()); - context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), fieldType); + context = createAggregationContext(indexSearcher, new MatchAllDocsQuery(), mappedField); createAggregator(aggregationBuilder, context); assertTrue(context.isCacheable()); } @@ -716,8 +716,8 @@ private void testMinShortcutCase( private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("min").field("number"); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java index 99215ba755bdb..e62573dbd05be 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -20,7 +20,7 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.MockScriptEngine; @@ -63,21 +63,21 @@ public class StatsAggregatorTests extends AggregatorTestCase { // TODO: Script tests, should fail with defaultValuesSourceType disabled. public void testEmpty() throws IOException { - final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); - testCase(stats("_name").field(ft.name()), iw -> {}, stats -> { + final MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); + testCase(stats("_name").field(mappedField.name()), iw -> {}, stats -> { assertEquals(0d, stats.getCount(), 0); assertEquals(0d, stats.getSum(), 0); assertEquals(Float.NaN, stats.getAvg(), 0); assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); assertFalse(AggregationInspectionHelper.hasValue(stats)); - }, ft); + }, mappedField); } public void testRandomDoubles() throws IOException { - final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.DOUBLE); + final MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); final SimpleStatsAggregator expected = new SimpleStatsAggregator(); - testCase(stats("_name").field(ft.name()), iw -> { + testCase(stats("_name").field(mappedField.name()), iw -> { int numDocs = randomIntBetween(10, 50); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); @@ -85,7 +85,7 @@ public void testRandomDoubles() throws IOException { for (int j = 0; j < numValues; j++) { double value = randomDoubleBetween(-100d, 100d, true); long valueAsLong = NumericUtils.doubleToSortableLong(value); - doc.add(new SortedNumericDocValuesField(ft.name(), valueAsLong)); + doc.add(new SortedNumericDocValuesField(mappedField.name(), valueAsLong)); expected.add(value); } iw.addDocument(doc); @@ -97,7 +97,7 @@ public void testRandomDoubles() throws IOException { assertEquals(expected.max, stats.getMax(), 0); assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, ft); + }, mappedField); } public void testRandomLongs() throws IOException { @@ -149,7 +149,7 @@ private void verifySummationOfDoubles( double singleSegmentDelta, double manySegmentDelta ) throws IOException { - MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.DOUBLE); + final MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); double max = Double.NEGATIVE_INFINITY; double min = Double.POSITIVE_INFINITY; @@ -159,10 +159,10 @@ private void verifySummationOfDoubles( } double expectedMax = max; double expectedMin = min; - testCase(stats("_name").field(ft.name()), iw -> { + testCase(stats("_name").field(mappedField.name()), iw -> { List> docs = new ArrayList<>(); for (double value : values) { - docs.add(singletonList(new NumericDocValuesField(ft.name(), NumericUtils.doubleToSortableLong(value)))); + docs.add(singletonList(new NumericDocValuesField(mappedField.name(), NumericUtils.doubleToSortableLong(value)))); } iw.addDocuments(docs); }, stats -> { @@ -172,10 +172,10 @@ private void verifySummationOfDoubles( assertEquals(expectedMax, stats.getMax(), 0d); assertEquals(expectedMin, stats.getMin(), 0d); assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, ft); - testCase(stats("_name").field(ft.name()), iw -> { + }, mappedField); + testCase(stats("_name").field(mappedField.name()), iw -> { for (double value : values) { - iw.addDocument(singletonList(new NumericDocValuesField(ft.name(), NumericUtils.doubleToSortableLong(value)))); + iw.addDocument(singletonList(new NumericDocValuesField(mappedField.name(), NumericUtils.doubleToSortableLong(value)))); } }, stats -> { assertEquals(values.length, stats.getCount()); @@ -184,7 +184,7 @@ private void verifySummationOfDoubles( assertEquals(expectedMax, stats.getMax(), 0d); assertEquals(expectedMin, stats.getMin(), 0d); assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, ft); + }, mappedField); } public void testUnmapped() throws IOException { @@ -206,15 +206,15 @@ public void testPartiallyUnmapped() throws IOException { RandomIndexWriter unmappedWriter = new RandomIndexWriter(random(), unmappedDirectory) ) { - final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); + final MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final SimpleStatsAggregator expected = new SimpleStatsAggregator(); final int numDocs = randomIntBetween(10, 50); for (int i = 0; i < numDocs; i++) { final long value = randomLongBetween(-100, 100); - mappedWriter.addDocument(singleton(new SortedNumericDocValuesField(ft.name(), value))); + mappedWriter.addDocument(singleton(new SortedNumericDocValuesField(mappedField.name(), value))); expected.add(value); } - final StatsAggregationBuilder builder = stats("_name").field(ft.name()); + final StatsAggregationBuilder builder = stats("_name").field(mappedField.name()); try ( IndexReader mappedReader = mappedWriter.getReader(); @@ -223,7 +223,7 @@ public void testPartiallyUnmapped() throws IOException { ) { final IndexSearcher searcher = new IndexSearcher(multiReader); - final InternalStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, ft); + final InternalStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, mappedField); assertEquals(expected.count, stats.getCount(), 0); assertEquals(expected.sum, stats.getSum(), TOLERANCE); @@ -326,7 +326,7 @@ public void testFieldScriptMultiValuedField() throws IOException { } public void testMissing() throws IOException { - final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); + final MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final long missingValue = randomIntBetween(-100, 100); @@ -336,7 +336,7 @@ public void testMissing() throws IOException { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { final long value = randomLongBetween(-100, 100); - docs.add(singleton(new SortedNumericDocValuesField(ft.name(), value))); + docs.add(singleton(new SortedNumericDocValuesField(mappedField.name(), value))); expected.add(value); } else { docs.add(emptySet()); @@ -344,14 +344,14 @@ public void testMissing() throws IOException { } } - testCase(stats("_name").field(ft.name()).missing(missingValue), iw -> iw.addDocuments(docs), stats -> { + testCase(stats("_name").field(mappedField.name()).missing(missingValue), iw -> iw.addDocuments(docs), stats -> { assertEquals(expected.count, stats.getCount(), 0); assertEquals(expected.sum, stats.getSum(), TOLERANCE); assertEquals(expected.max, stats.getMax(), 0); assertEquals(expected.min, stats.getMin(), 0); assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); assertTrue(AggregationInspectionHelper.hasValue(stats)); - }, ft); + }, mappedField); } public void testMissingUnmapped() throws IOException { @@ -374,27 +374,27 @@ private void randomLongsTestCase( BiConsumer verify ) throws IOException { - final MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); + final MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final int numDocs = randomIntBetween(10, 50); final List> docs = new ArrayList<>(numDocs); final SimpleStatsAggregator expected = new SimpleStatsAggregator(); for (int iDoc = 0; iDoc < numDocs; iDoc++) { List values = randomList(valuesPerField, valuesPerField, () -> randomLongBetween(-100, 100)); - docs.add(values.stream().map(value -> new SortedNumericDocValuesField(ft.name(), value)).collect(toSet())); + docs.add(values.stream().map(value -> new SortedNumericDocValuesField(mappedField.name(), value)).collect(toSet())); values.forEach(expected::add); } - testCase(builder, iw -> iw.addDocuments(docs), stats -> verify.accept(expected, stats), ft); + testCase(builder, iw -> iw.addDocuments(docs), stats -> verify.accept(expected, stats), mappedField); } private void testCase( StatsAggregationBuilder builder, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(builder, new MatchAllDocsQuery(), buildIndex, verify, fieldTypes); + testCase(builder, new MatchAllDocsQuery(), buildIndex, verify, mappedFields); } static class SimpleStatsAggregator { @@ -430,7 +430,7 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new StatsAggregationBuilder("_name").field(fieldName); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java index cbb6a292fc662..b9cdf92b5c55a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.MockScriptEngine; @@ -202,7 +202,7 @@ private void verifySummationOfDoubles(double[] values, double expected, double d .mapToObj(value -> singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value)))) .collect(toList()) ); - }, result -> assertEquals(expected, result.value(), delta), defaultFieldType(NumberType.DOUBLE)); + }, result -> assertEquals(expected, result.value(), delta), defaultMappedField(NumberType.DOUBLE)); } public void testUnmapped() throws IOException { @@ -213,9 +213,9 @@ public void testUnmapped() throws IOException { } public void testPartiallyUnmapped() throws IOException { - final MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberType.LONG); + final MappedField mappedField = new MappedField(FIELD_NAME, new NumberFieldMapper.NumberFieldType(NumberType.LONG)); - final SumAggregationBuilder builder = sum("_name").field(fieldType.name()); + final SumAggregationBuilder builder = sum("_name").field(mappedField.name()); final int numDocs = randomIntBetween(10, 100); final List> docs = new ArrayList<>(numDocs); @@ -223,7 +223,7 @@ public void testPartiallyUnmapped() throws IOException { for (int i = 0; i < numDocs; i++) { final long value = randomLongBetween(0, 1000); sum += value; - docs.add(singleton(new NumericDocValuesField(fieldType.name(), value))); + docs.add(singleton(new NumericDocValuesField(mappedField.name(), value))); } try (Directory mappedDirectory = newDirectory(); Directory unmappedDirectory = newDirectory()) { @@ -241,7 +241,7 @@ public void testPartiallyUnmapped() throws IOException { final IndexSearcher searcher = newSearcher(multiReader, true, true); - final Sum internalSum = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldType); + final Sum internalSum = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, mappedField); assertEquals(sum, internalSum.value(), 0d); assertTrue(AggregationInspectionHelper.hasValue(internalSum)); } @@ -295,8 +295,8 @@ public void testFieldScriptMultiValuedField() throws IOException { } public void testMissing() throws IOException { - final MappedFieldType aggField = defaultFieldType(); - final MappedFieldType irrelevantField = new NumberFieldMapper.NumberFieldType("irrelevant_field", NumberType.LONG); + final MappedField aggField = defaultMappedField(); + final MappedField irrelevantField = new MappedField("irrelevant_field", new NumberFieldMapper.NumberFieldType(NumberType.LONG)); final int numDocs = randomIntBetween(10, 100); final long missingValue = randomLongBetween(1, 1000); @@ -341,7 +341,7 @@ private void sumRandomDocsTestCase( TriConsumer>, Sum> verify ) throws IOException { - final MappedFieldType fieldType = defaultFieldType(); + final MappedField mappedField = defaultMappedField(); final int numDocs = randomIntBetween(10, 100); final List> docs = new ArrayList<>(numDocs); @@ -351,7 +351,7 @@ private void sumRandomDocsTestCase( for (int iValue = 0; iValue < valuesPerField; iValue++) { final long value = randomLongBetween(0, 1000); sum += value; - doc.add(new SortedNumericDocValuesField(fieldType.name(), value)); + doc.add(new SortedNumericDocValuesField(mappedField.name(), value)); } docs.add(doc); } @@ -362,14 +362,14 @@ private void sumRandomDocsTestCase( new MatchAllDocsQuery(), writer -> writer.addDocuments(docs), internalSum -> verify.apply(finalSum, docs, internalSum), - fieldType + mappedField ); } private void testAggregation(Query query, CheckedConsumer indexer, Consumer verify) throws IOException { AggregationBuilder aggregationBuilder = sum("_name").field(FIELD_NAME); - testAggregation(aggregationBuilder, query, indexer, verify, defaultFieldType()); + testAggregation(aggregationBuilder, query, indexer, verify, defaultMappedField()); } private void testAggregation( @@ -377,9 +377,9 @@ private void testAggregation( Query query, CheckedConsumer indexer, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(aggregationBuilder, query, indexer, verify, fieldTypes); + testCase(aggregationBuilder, query, indexer, verify, mappedFields); } @Override @@ -388,7 +388,7 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new SumAggregationBuilder("_name").field(fieldName); } @@ -409,11 +409,11 @@ protected ScriptService getMockScriptService() { return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS, () -> 1L); } - private static MappedFieldType defaultFieldType() { - return defaultFieldType(NumberType.LONG); + private static MappedField defaultMappedField() { + return defaultMappedField(NumberType.LONG); } - private static MappedFieldType defaultFieldType(NumberType numberType) { - return new NumberFieldMapper.NumberFieldType(FIELD_NAME, numberType); + private static MappedField defaultMappedField(NumberType numberType) { + return new MappedField(FIELD_NAME, new NumberFieldMapper.NumberFieldType(numberType)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index f77567d462589..a78aad559879a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -17,8 +17,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; @@ -33,7 +34,7 @@ public class TDigestPercentileRanksAggregatorTests extends AggregatorTestCase { @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentileRanksAggregationBuilder("tdigest_ranks", new double[] { 0.1, 0.5, 12 }).field(fieldName) .percentilesConfig(new PercentilesConfig.TDigest()); } @@ -46,10 +47,10 @@ protected List getSupportedValuesSourceTypes() { public void testEmpty() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.5 }).field("field") .method(PercentilesMethod.TDIGEST); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); try (IndexReader reader = new MultiReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); @@ -68,10 +69,10 @@ public void testSimple() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) .field("field") .method(PercentilesMethod.TDIGEST); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberType.DOUBLE)); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java index 799cf72d82f62..185a34b94dca6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java @@ -20,7 +20,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -40,7 +40,7 @@ public class TDigestPercentilesAggregatorTests extends AggregatorTestCase { @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentilesAggregationBuilder("tdist_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.TDigest()); } @@ -179,8 +179,11 @@ private void testCase( builder = new PercentilesAggregationBuilder("test").field("number").percentilesConfig(hdr); } - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); - TDigestPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, fieldType); + final MappedField mappedField = new MappedField( + "number", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + TDigestPercentilesAggregator aggregator = createAggregator(builder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 98c9d0ffe0472..7f80d05b295af 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -30,7 +30,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.ProvidedIdFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.SearchHits; @@ -116,7 +116,7 @@ public void testInsideTerms() throws Exception { assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top")))); } - private static final MappedFieldType STRING_FIELD_TYPE = new KeywordFieldMapper.KeywordFieldType("string"); + private static final MappedField STRING_FIELD = new MappedField("string", new KeywordFieldMapper.KeywordFieldType()); private Aggregation testCase(Query query, AggregationBuilder builder) throws IOException { Directory directory = newDirectory(); @@ -130,7 +130,7 @@ private Aggregation testCase(Query query, AggregationBuilder builder) throws IOE // We do not use LuceneTestCase.newSearcher because we need a DirectoryReader for "testInsideTerms" IndexSearcher indexSearcher = new IndexSearcher(indexReader); - Aggregation result = searchAndReduce(indexSearcher, query, builder, STRING_FIELD_TYPE); + Aggregation result = searchAndReduce(indexSearcher, query, builder, STRING_FIELD); indexReader.close(); directory.close(); return result; @@ -185,7 +185,7 @@ public void testSetScorer() throws Exception { .add(new TermQuery(new Term("string", "baz")), Occur.SHOULD) .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); - TopHits result = searchAndReduce(searcher, query, agg, STRING_FIELD_TYPE); + TopHits result = searchAndReduce(searcher, query, agg, STRING_FIELD); assertEquals(3, result.getHits().getTotalHits().value); reader.close(); directory.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java index bb30878c78d76..eaf99f7e4c9ea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.RangeFieldMapper; @@ -68,7 +69,7 @@ public class ValueCountAggregatorTests extends AggregatorTestCase { private static final String SINGLE_SCRIPT = "single"; @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new ValueCountAggregationBuilder("foo").field(fieldName); } @@ -240,7 +241,7 @@ public void testRangeFieldValues() throws IOException { final RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true); final RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(rangeType, 6.0D, 10.0D, true, true); final String fieldName = "rangeField"; - MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(fieldName, rangeType); + MappedFieldType fieldType = new RangeFieldMapper.RangeFieldType(rangeType); final ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("_name").field(fieldName); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(singleton(range1))))); @@ -250,14 +251,14 @@ public void testRangeFieldValues() throws IOException { }, count -> { assertEquals(4.0, count.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(count)); - }, fieldType); + }, new MappedField(fieldName, fieldType)); } public void testValueScriptNumber() throws IOException { ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").field(FIELD_NAME) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, NUMBER_VALUE_SCRIPT, Collections.emptyMap())); - MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.NUMERIC); + MappedFieldType fieldType = createMappedFieldType(ValueType.NUMERIC); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 7))); @@ -266,7 +267,7 @@ public void testValueScriptNumber() throws IOException { }, valueCount -> { assertEquals(3, valueCount.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(valueCount)); - }, fieldType); + }, new MappedField(FIELD_NAME, fieldType)); } public void testSingleScriptNumber() throws IOException { @@ -274,7 +275,7 @@ public void testSingleScriptNumber() throws IOException { new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap()) ); - MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.NUMERIC); + MappedFieldType fieldType = createMappedFieldType(ValueType.NUMERIC); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { Document doc = new Document(); @@ -296,14 +297,14 @@ public void testSingleScriptNumber() throws IOException { // once per document, and only expect a count of 3 assertEquals(3, valueCount.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(valueCount)); - }, fieldType); + }, new MappedField(FIELD_NAME, fieldType)); } public void testValueScriptString() throws IOException { ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("name").field(FIELD_NAME) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, STRING_VALUE_SCRIPT, Collections.emptyMap())); - MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.STRING); + MappedFieldType fieldType = createMappedFieldType(ValueType.STRING); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedDocValuesField(FIELD_NAME, new BytesRef("1")))); @@ -312,7 +313,7 @@ public void testValueScriptString() throws IOException { }, valueCount -> { assertEquals(3, valueCount.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(valueCount)); - }, fieldType); + }, new MappedField(FIELD_NAME, fieldType)); } public void testSingleScriptString() throws IOException { @@ -320,7 +321,7 @@ public void testSingleScriptString() throws IOException { new Script(ScriptType.INLINE, MockScriptEngine.NAME, SINGLE_SCRIPT, Collections.emptyMap()) ); - MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, ValueType.STRING); + MappedFieldType fieldType = createMappedFieldType(ValueType.STRING); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { Document doc = new Document(); @@ -343,7 +344,7 @@ public void testSingleScriptString() throws IOException { // once per document, and only expect a count of 3 assertEquals(3, valueCount.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(valueCount)); - }, fieldType); + }, new MappedField(FIELD_NAME, fieldType)); } private void testAggregation( @@ -364,7 +365,7 @@ private void testAggregation( Consumer verify, boolean testWithHint ) throws IOException { - MappedFieldType fieldType = createMappedFieldType(FIELD_NAME, valueType); + MappedFieldType fieldType = createMappedFieldType(valueType); ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("_name"); if (valueType != null && testWithHint) { @@ -372,7 +373,7 @@ private void testAggregation( } aggregationBuilder.field(FIELD_NAME); - testAggregation(aggregationBuilder, query, indexer, verify, fieldType); + testAggregation(aggregationBuilder, query, indexer, verify, new MappedField(FIELD_NAME, fieldType)); } private void testAggregation( @@ -380,21 +381,21 @@ private void testAggregation( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(aggregationBuilder, query, buildIndex, verify, fieldTypes); + testCase(aggregationBuilder, query, buildIndex, verify, mappedFields); } - private static MappedFieldType createMappedFieldType(String name, ValueType valueType) { + private static MappedFieldType createMappedFieldType(ValueType valueType) { return switch (valueType) { - case BOOLEAN -> new BooleanFieldMapper.BooleanFieldType(name); - case STRING -> new KeywordFieldMapper.KeywordFieldType(name); - case DOUBLE -> new NumberFieldMapper.NumberFieldType(name, NumberFieldMapper.NumberType.DOUBLE); - case NUMBER, NUMERIC, LONG -> new NumberFieldMapper.NumberFieldType(name, NumberFieldMapper.NumberType.LONG); - case DATE -> new DateFieldMapper.DateFieldType(name); - case IP -> new IpFieldMapper.IpFieldType(name); - case GEOPOINT -> new GeoPointFieldMapper.GeoPointFieldType(name); - case RANGE -> new RangeFieldMapper.RangeFieldType(name, RangeType.DOUBLE); + case BOOLEAN -> new BooleanFieldMapper.BooleanFieldType(); + case STRING -> new KeywordFieldMapper.KeywordFieldType(); + case DOUBLE -> new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); + case NUMBER, NUMERIC, LONG -> new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + case DATE -> new DateFieldMapper.DateFieldType(); + case IP -> new IpFieldMapper.IpFieldType(); + case GEOPOINT -> new GeoPointFieldMapper.GeoPointFieldType(); + case RANGE -> new RangeFieldMapper.RangeFieldType(RangeType.DOUBLE); }; } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java index 3776515c72cfa..e3552098f0b59 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -527,9 +527,9 @@ private void testCase( IndexSearcher indexSearcher = newSearcher(indexReader, true, true); try { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("value_field", fieldNumberType); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("weight_field", fieldNumberType); - WeightedAvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType, fieldType2); + MappedField mappedField = new MappedField("value_field", new NumberFieldMapper.NumberFieldType(fieldNumberType)); + MappedField mappedField2 = new MappedField("weight_field", new NumberFieldMapper.NumberFieldType(fieldNumberType)); + WeightedAvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField, mappedField2); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index e7c657861861d..ac50fb3c9914a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -24,7 +24,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.Aggregation; @@ -107,12 +107,14 @@ public void testSameAggNames() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD); - - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD, NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField(DATE_FIELD, new DateFieldMapper.DateFieldType()); + MappedField valueField = new MappedField( + VALUE_FIELD, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); - avgResult = searchAndReduce(indexSearcher, query, avgBuilder, 10000, new MappedFieldType[] { fieldType, valueFieldType }); - histogramResult = searchAndReduce(indexSearcher, query, histo, 10000, new MappedFieldType[] { fieldType, valueFieldType }); + avgResult = searchAndReduce(indexSearcher, query, avgBuilder, 10000, mappedField, valueField); + histogramResult = searchAndReduce(indexSearcher, query, histo, 10000, mappedField, valueField); } // Finally, reduce the pipeline agg @@ -162,16 +164,14 @@ public void testComplicatedBucketPath() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(DATE_FIELD); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD, NumberFieldMapper.NumberType.LONG); - MappedFieldType keywordField = keywordField(textField); - - filterResult = searchAndReduce( - indexSearcher, - query, - filterAggregationBuilder, - new MappedFieldType[] { fieldType, valueFieldType, keywordField } + MappedField mappedField = new MappedField(DATE_FIELD, new DateFieldMapper.DateFieldType()); + MappedField valueField = new MappedField( + VALUE_FIELD, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); + MappedField keywordField = keywordField(textField); + + filterResult = searchAndReduce(indexSearcher, query, filterAggregationBuilder, mappedField, valueField, keywordField); } // Finally, reduce the pipeline agg diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java index 4c065d864329b..78eb5c3b4d862 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.script.MockScriptEngine; @@ -61,8 +61,11 @@ protected ScriptService getMockScriptService() { } public void testScript() throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number_field", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("the_field"); + MappedField mappedField = new MappedField( + "number_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); + MappedField mappedField1 = new MappedField("the_field", new KeywordFieldMapper.KeywordFieldType()); FiltersAggregationBuilder filters = new FiltersAggregationBuilder("placeholder", new MatchAllQueryBuilder()).subAggregation( new TermsAggregationBuilder("the_terms").userValueTypeHint(ValueType.STRING) @@ -91,8 +94,8 @@ public void testScript() throws IOException { f -> { assertThat(((InternalSimpleValue) (f.getBuckets().get(0).getAggregations().get("bucket_script"))).value, equalTo(19.0)); }, - fieldType, - fieldType1 + mappedField, + mappedField1 ); } @@ -101,7 +104,7 @@ private void testCase( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldType + MappedField... mappedFields ) throws IOException { try (Directory directory = newDirectory()) { @@ -113,7 +116,7 @@ private void testCase( IndexSearcher indexSearcher = newIndexSearcher(indexReader); InternalFilters filters; - filters = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); + filters = searchAndReduce(indexSearcher, query, aggregationBuilder, mappedFields); verify.accept(filters); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index f99f79d19e147..b4bf90c76546d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -308,11 +308,14 @@ private void executeTestCase( try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(HISTO_FIELD); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField(HISTO_FIELD, new DateFieldMapper.DateFieldType()); + MappedField valueField = new MappedField( + "value_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalAggregation histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, new MappedFieldType[] { fieldType, valueFieldType }); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, new MappedField[] { mappedField, valueField }); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java index 15802181e25b6..5e63a58af8d22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -712,10 +712,13 @@ private void executeTestCase( try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(SINGLE_VALUED_FIELD_NAME); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField(SINGLE_VALUED_FIELD_NAME, new DateFieldMapper.DateFieldType()); + MappedField valueField = new MappedField( + "value_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); - InternalAggregation histogram = searchAndReduce(indexSearcher, query, aggBuilder, fieldType, valueFieldType); + InternalAggregation histogram = searchAndReduce(indexSearcher, query, aggBuilder, mappedField, valueField); verify.accept(histogram); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java index 478349225b885..3dc52881560eb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; @@ -133,11 +133,14 @@ private void executeTestCase(Query query, DateHistogramAggregationBuilder aggBui try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField(aggBuilder.field(), new DateFieldMapper.DateFieldType()); + MappedField valueField = new MappedField( + "value_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalDateHistogram histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, new MappedFieldType[] { fieldType, valueFieldType }); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, mappedField, valueField); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java index 59e444ee41748..be3d40618e73d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceTypeTests.java @@ -77,7 +77,7 @@ private void datePrepareRoundingWithQueryFoundCase(Function wrap) long max = randomLongBetween(min + 10, 100000000000L); MapperService mapperService = dateMapperService(); Query query = wrap.apply( - mapperService.fieldType("field") + mapperService.mappedField("field") .rangeQuery(min, max, true, true, ShapeRelation.CONTAINS, null, null, createSearchExecutionContext(mapperService)) ); withAggregationContext(null, mapperService, List.of(), query, context -> { @@ -100,7 +100,7 @@ private void datePrepareRoundingWithQueryNotFoundCase(Function wra long max = randomLongBetween(min + 10, 100000000000L); MapperService mapperService = dateMapperService(); Query query = wrap.apply( - mapperService.fieldType("field") + mapperService.mappedField("field") .rangeQuery(min, max, true, true, ShapeRelation.CONTAINS, null, null, createSearchExecutionContext(mapperService)) ); withAggregationContext(null, mapperService, List.of(), query, context -> { @@ -130,7 +130,7 @@ public void testDatePrepareRoundingWithDocAndQuery() throws IOException { maxDocs = max; } MapperService mapperService = dateMapperService(); - Query query = mapperService.fieldType("field") + Query query = mapperService.mappedField("field") .rangeQuery(minQuery, maxQuery, true, true, ShapeRelation.CONTAINS, null, null, createSearchExecutionContext(mapperService)); withAggregationContext(null, mapperService, docsWithDatesBetween(minDocs, maxDocs), query, context -> { Rounding rounding = mock(Rounding.class); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java index f105b77b67ad3..dfbc2b6522a16 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper.TimeSeriesIdBuilder; @@ -68,9 +68,9 @@ public void testStandAloneTimeSeriesWithSum() throws IOException { assertThat(((Sum) ts.getBucketByKey("{dim1=bbb, dim2=zzz}").getAggregations().get("sum")).value(), equalTo(22.0)); }, - new KeywordFieldMapper.KeywordFieldType("dim1"), - new KeywordFieldMapper.KeywordFieldType("dim2"), - new NumberFieldMapper.NumberFieldType("val1", NumberFieldMapper.NumberType.INTEGER) + new MappedField("dim1", new KeywordFieldMapper.KeywordFieldType()), + new MappedField("dim2", new KeywordFieldMapper.KeywordFieldType()), + new MappedField("val1", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)) ); } @@ -104,14 +104,14 @@ private void timeSeriesTestCase( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - MappedFieldType[] newFieldTypes = new MappedFieldType[fieldTypes.length + 2]; - newFieldTypes[0] = TimeSeriesIdFieldMapper.FIELD_TYPE; - newFieldTypes[1] = new DateFieldMapper.DateFieldType("@timestamp"); - System.arraycopy(fieldTypes, 0, newFieldTypes, 2, fieldTypes.length); + MappedField[] newMappedFields = new MappedField[mappedFields.length + 2]; + newMappedFields[0] = new MappedField(TimeSeriesIdFieldMapper.NAME, TimeSeriesIdFieldMapper.FIELD_TYPE); + newMappedFields[1] = new MappedField("@timestamp", new DateFieldMapper.DateFieldType()); + System.arraycopy(mappedFields, 0, newMappedFields, 2, mappedFields.length); - testCase(builder, query, buildIndex, verify, newFieldTypes); + testCase(builder, query, buildIndex, verify, newMappedFields); } } diff --git a/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java b/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java index 8cc32c0f39045..689ff22c6b3cd 100644 --- a/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TextSearchInfo; @@ -135,14 +136,14 @@ public void testBuild() throws IOException { try (IndexReader reader = DirectoryReader.open(dir)) { when(searchExecutionContext.getIndexReader()).thenReturn(reader); - MappedFieldType numberFieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); - when(searchExecutionContext.getFieldType("field")).thenReturn(numberFieldType); + MappedFieldType numberFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + MappedField numberField = new MappedField("field", numberFieldType); + when(searchExecutionContext.getMappedField("field")).thenReturn(numberField); CollapseBuilder builder = new CollapseBuilder("field"); CollapseContext collapseContext = builder.build(searchExecutionContext); - assertEquals(collapseContext.getFieldType(), numberFieldType); + assertEquals(collapseContext.getMappedField(), numberField); numberFieldType = new NumberFieldMapper.NumberFieldType( - "field", NumberFieldMapper.NumberType.LONG, true, false, @@ -154,12 +155,12 @@ public void testBuild() throws IOException { false, null ); - when(searchExecutionContext.getFieldType("field")).thenReturn(numberFieldType); + numberField = new MappedField("field", numberFieldType); + when(searchExecutionContext.getMappedField("field")).thenReturn(numberField); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> builder.build(searchExecutionContext)); assertEquals(exc.getMessage(), "cannot collapse on field `field` without `doc_values`"); numberFieldType = new NumberFieldMapper.NumberFieldType( - "field", NumberFieldMapper.NumberType.LONG, false, false, @@ -171,7 +172,8 @@ public void testBuild() throws IOException { false, null ); - when(searchExecutionContext.getFieldType("field")).thenReturn(numberFieldType); + numberField = new MappedField("field", numberFieldType); + when(searchExecutionContext.getMappedField("field")).thenReturn(numberField); builder.setInnerHits(new InnerHitBuilder()); exc = expectThrows(IllegalArgumentException.class, () -> builder.build(searchExecutionContext)); assertEquals( @@ -179,19 +181,22 @@ public void testBuild() throws IOException { "cannot expand `inner_hits` for collapse field `field`, only indexed field can retrieve `inner_hits`" ); - MappedFieldType keywordFieldType = new KeywordFieldMapper.KeywordFieldType("field"); - when(searchExecutionContext.getFieldType("field")).thenReturn(keywordFieldType); + MappedFieldType keywordFieldType = new KeywordFieldMapper.KeywordFieldType(); + MappedField keywordField = new MappedField("field", keywordFieldType); + when(searchExecutionContext.getMappedField("field")).thenReturn(keywordField); CollapseBuilder kbuilder = new CollapseBuilder("field"); collapseContext = kbuilder.build(searchExecutionContext); - assertEquals(collapseContext.getFieldType(), keywordFieldType); + assertEquals(collapseContext.getMappedField(), keywordField); - keywordFieldType = new KeywordFieldMapper.KeywordFieldType("field", true, false, Collections.emptyMap()); - when(searchExecutionContext.getFieldType("field")).thenReturn(keywordFieldType); + keywordFieldType = new KeywordFieldMapper.KeywordFieldType(true, false, Collections.emptyMap()); + keywordField = new MappedField("field", keywordFieldType); + when(searchExecutionContext.getMappedField("field")).thenReturn(keywordField); exc = expectThrows(IllegalArgumentException.class, () -> kbuilder.build(searchExecutionContext)); assertEquals(exc.getMessage(), "cannot collapse on field `field` without `doc_values`"); - keywordFieldType = new KeywordFieldMapper.KeywordFieldType("field", false, true, Collections.emptyMap()); - when(searchExecutionContext.getFieldType("field")).thenReturn(keywordFieldType); + keywordFieldType = new KeywordFieldMapper.KeywordFieldType(false, true, Collections.emptyMap()); + keywordField = new MappedField("field", keywordFieldType); + when(searchExecutionContext.getMappedField("field")).thenReturn(keywordField); kbuilder.setInnerHits(new InnerHitBuilder()); exc = expectThrows(IllegalArgumentException.class, () -> builder.build(searchExecutionContext)); assertEquals( @@ -211,27 +216,29 @@ public void testBuildWithExceptions() { } { - MappedFieldType fieldType = new MappedFieldType("field", true, false, true, TextSearchInfo.NONE, Collections.emptyMap()) { + MappedFieldType fieldType = new MappedFieldType(true, false, true, TextSearchInfo.NONE, Collections.emptyMap()) { @Override public String typeName() { return "some_type"; } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { return null; } - public Query existsQuery(SearchExecutionContext context) { + @Override + public Query existsQuery(String name, SearchExecutionContext context) { return null; } }; - when(searchExecutionContext.getFieldType("field")).thenReturn(fieldType); + MappedField mappedField = new MappedField("field", fieldType); + when(searchExecutionContext.getMappedField("field")).thenReturn(mappedField); CollapseBuilder builder = new CollapseBuilder("field"); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> builder.build(searchExecutionContext)); assertEquals(exc.getMessage(), "collapse is not supported for the field [field] of the type [some_type]"); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index 6a596bf5af490..804e300f73dc2 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.LongFieldScriptTests; import org.elasticsearch.index.mapper.LuceneDocument; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.mapper.NestedPathFieldMapper; @@ -1239,7 +1239,7 @@ private static SearchExecutionContext newSearchExecutionContext(MapperService ma private static SearchExecutionContext newSearchExecutionContext( MapperService mapperService, - TriFunction, IndexFieldData> indexFieldDataLookup + TriFunction, IndexFieldData> indexFieldDataLookup ) { Settings settings = Settings.builder() .put("index.version.created", Version.CURRENT) diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 42f95718c522a..78bdff8f2a7fb 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -315,9 +315,9 @@ public void testBuildSearchContextHighlight() throws IOException { emptyMap() ) { @Override - public MappedFieldType getFieldType(String name) { + public MappedField getMappedField(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); - return builder.build(MapperBuilderContext.ROOT).fieldType(); + return builder.build(MapperBuilderContext.ROOT).field(); } }; mockContext.setMapUnmappedFieldAsString(true); diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java index 9367f4646b5cc..67e2370daa001 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java @@ -10,7 +10,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.index.mapper.DynamicFieldType; +import org.elasticsearch.index.mapper.DynamicMappedField; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.flattened.FlattenedFieldMapper; @@ -39,18 +40,18 @@ public void setUp() throws Exception { docValues = mock(ScriptDocValues.class); MappedFieldType fieldType1 = mock(MappedFieldType.class); - when(fieldType1.name()).thenReturn("field"); when(fieldType1.valueForDisplay(any())).then(returnsFirstArg()); + MappedField field1 = new MappedField("field", fieldType1); IndexFieldData fieldData1 = createFieldData(docValues, "field"); MappedFieldType fieldType2 = mock(MappedFieldType.class); - when(fieldType1.name()).thenReturn("alias"); when(fieldType1.valueForDisplay(any())).then(returnsFirstArg()); + MappedField field2 = new MappedField("alias", fieldType2); IndexFieldData fieldData2 = createFieldData(docValues, "alias"); docLookup = new LeafDocLookup( - field -> field.equals("field") ? fieldType1 : field.equals("alias") ? fieldType2 : null, - fieldType -> fieldType == fieldType1 ? fieldData1 : fieldType == fieldType2 ? fieldData2 : null, + field -> field.equals("field") ? field1 : field.equals("alias") ? field2 : null, + field -> field == field1 ? fieldData1 : field == field2 ? fieldData2 : null, null ); } @@ -73,21 +74,21 @@ public void testFlattenedField() throws IOException { IndexFieldData fieldData2 = createFieldData(docValues2, "flattened.key2"); FlattenedFieldMapper fieldMapper = new FlattenedFieldMapper.Builder("field").build(MapperBuilderContext.ROOT); - DynamicFieldType fieldType = fieldMapper.fieldType(); - MappedFieldType fieldType1 = fieldType.getChildFieldType("key1"); - MappedFieldType fieldType2 = fieldType.getChildFieldType("key2"); + DynamicMappedField dynamicMappedField = (DynamicMappedField) fieldMapper.field(); + MappedField mappedField1 = dynamicMappedField.getChildField("key1"); + MappedField mappedField2 = dynamicMappedField.getChildField("key2"); - Function> fieldDataSupplier = ft -> { - FlattenedFieldMapper.KeyedFlattenedFieldType keyedFieldType = (FlattenedFieldMapper.KeyedFlattenedFieldType) ft; + Function> fieldDataSupplier = ft -> { + FlattenedFieldMapper.KeyedFlattenedFieldType keyedFieldType = (FlattenedFieldMapper.KeyedFlattenedFieldType) ft.type(); return keyedFieldType.key().equals("key1") ? fieldData1 : fieldData2; }; LeafDocLookup docLookup = new LeafDocLookup(field -> { if (field.equals("flattened.key1")) { - return fieldType1; + return mappedField1; } if (field.equals("flattened.key2")) { - return fieldType2; + return mappedField2; } return null; }, fieldDataSupplier, null); diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookupTests.java index a9111aa8e8e28..818d9e1a52d17 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookupTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafStoredFieldsLookupTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.VectorSimilarityFunction; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -30,9 +31,9 @@ public void setUp() throws Exception { super.setUp(); MappedFieldType fieldType = mock(MappedFieldType.class); - when(fieldType.name()).thenReturn("field"); // Add 10 when valueForDisplay is called so it is easy to be sure it *was* called when(fieldType.valueForDisplay(any())).then(invocation -> (Double) invocation.getArguments()[0] + 10); + MappedField mappedField = new MappedField("field", fieldType); FieldInfo mockFieldInfo = new FieldInfo( "field", @@ -53,14 +54,14 @@ public void setUp() throws Exception { ); fieldsLookup = new LeafStoredFieldsLookup( - field -> field.equals("field") || field.equals("alias") ? fieldType : null, + field -> field.equals("field") || field.equals("alias") ? mappedField : null, (doc, visitor) -> visitor.doubleField(mockFieldInfo, 2.718) ); } public void testBasicLookup() { FieldLookup fieldLookup = fieldsLookup.get("field"); - assertEquals("field", fieldLookup.fieldType().name()); + assertEquals("field", fieldLookup.mappedField().name()); List values = fieldLookup.getValues(); assertNotNull(values); @@ -70,7 +71,7 @@ public void testBasicLookup() { public void testLookupWithFieldAlias() { FieldLookup fieldLookup = fieldsLookup.get("alias"); - assertEquals("field", fieldLookup.fieldType().name()); + assertEquals("field", fieldLookup.mappedField().name()); List values = fieldLookup.getValues(); assertNotNull(values); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 33a9ba5ea661f..92029de0aaee1 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -59,7 +59,7 @@ import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.SearchExecutionContext; @@ -640,11 +640,11 @@ public void testDisableTopScoreCollection() throws Exception { public void testNumericSortOptimization() throws Exception { final String fieldNameLong = "long-field"; final String fieldNameDate = "date-field"; - MappedFieldType fieldTypeLong = new NumberFieldMapper.NumberFieldType(fieldNameLong, NumberFieldMapper.NumberType.LONG); - MappedFieldType fieldTypeDate = new DateFieldMapper.DateFieldType(fieldNameDate); + MappedField fieldLong = new MappedField(fieldNameLong, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField fieldDate = new MappedField(fieldNameDate, new DateFieldMapper.DateFieldType()); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); - when(searchExecutionContext.getFieldType(fieldNameLong)).thenReturn(fieldTypeLong); - when(searchExecutionContext.getFieldType(fieldNameDate)).thenReturn(fieldTypeDate); + when(searchExecutionContext.getMappedField(fieldNameLong)).thenReturn(fieldLong); + when(searchExecutionContext.getMappedField(fieldNameDate)).thenReturn(fieldDate); // enough docs to have a tree with several leaf nodes final int numDocs = atLeast(3500 * 2); Directory dir = newDirectory(); @@ -676,7 +676,7 @@ public void testNumericSortOptimization() throws Exception { final Sort sortDate = new Sort(sortFieldDate); final Sort sortLongDate = new Sort(sortFieldLong, sortFieldDate); final Sort sortDateLong = new Sort(sortFieldDate, sortFieldLong); - final DocValueFormat dvFormatDate = fieldTypeDate.docValueFormat(null, null); + final DocValueFormat dvFormatDate = fieldDate.docValueFormat(null, null); final SortAndFormats formatsLong = new SortAndFormats(sortLong, new DocValueFormat[] { DocValueFormat.RAW }); final SortAndFormats formatsDate = new SortAndFormats(sortDate, new DocValueFormat[] { dvFormatDate }); final SortAndFormats formatsLongDate = new SortAndFormats(sortLongDate, new DocValueFormat[] { DocValueFormat.RAW, dvFormatDate }); diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 7685119e7b660..264516927b657 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -153,9 +153,9 @@ public void testBuildRescoreSearchContext() throws ElasticsearchParseException, emptyMap() ) { @Override - public MappedFieldType getFieldType(String name) { + public MappedField getMappedField(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); - return builder.build(MapperBuilderContext.ROOT).fieldType(); + return builder.build(MapperBuilderContext.ROOT).field(); } }; @@ -215,9 +215,9 @@ public void testRewritingKeepsSettings() throws IOException { emptyMap() ) { @Override - public MappedFieldType getFieldType(String name) { + public MappedField getMappedField(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); - return builder.build(MapperBuilderContext.ROOT).fieldType(); + return builder.build(MapperBuilderContext.ROOT).field(); } }; diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index ee16d22a90f25..24ebb6be30f22 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.ValueFetcher; @@ -136,17 +137,10 @@ private SearchExecutionContext createShardContext( String fieldName, DocValuesType dvType ) { - MappedFieldType fieldType = new MappedFieldType( - fieldName, - true, - false, - dvType != null, - TextSearchInfo.NONE, - Collections.emptyMap() - ) { + MappedFieldType fieldType = new MappedFieldType(true, false, dvType != null, TextSearchInfo.NONE, Collections.emptyMap()) { @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } @@ -156,22 +150,24 @@ public String typeName() { } @Override - public Query termQuery(Object value, @Nullable SearchExecutionContext context) { + public Query termQuery(String name, Object value, @Nullable SearchExecutionContext context) { return null; } - public Query existsQuery(SearchExecutionContext context) { + @Override + public Query existsQuery(String name, SearchExecutionContext context) { return null; } }; + MappedField mappedField = new MappedField(fieldName, fieldType); SearchExecutionContext context = mock(SearchExecutionContext.class); - when(context.getFieldType(fieldName)).thenReturn(fieldType); + when(context.getMappedField(fieldName)).thenReturn(mappedField); when(context.getIndexReader()).thenReturn(reader); IndexSettings indexSettings = createIndexSettings(indexVersionCreated); when(context.getIndexSettings()).thenReturn(indexSettings); if (dvType != null) { IndexNumericFieldData fd = mock(IndexNumericFieldData.class); - when(context.getForField(fieldType)).thenReturn(fd); + when(context.getForField(mappedField)).thenReturn(fd); } return context; diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 9c16d5fc275b3..aded74059562a 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -21,7 +21,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; @@ -189,7 +189,7 @@ protected final SearchExecutionContext createMockSearchExecutionContext(IndexSea Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build() ); BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, Mockito.mock(BitsetFilterCache.Listener.class)); - TriFunction, IndexFieldData> indexFieldDataLookup = ( + TriFunction, IndexFieldData> indexFieldDataLookup = ( fieldType, fieldIndexName, searchLookup) -> { @@ -222,8 +222,8 @@ protected final SearchExecutionContext createMockSearchExecutionContext(IndexSea ) { @Override - public MappedFieldType getFieldType(String name) { - return provideMappedFieldType(name); + public MappedField getMappedField(String name) { + return provideMappedField(name); } @Override @@ -237,12 +237,9 @@ public NestedLookup nestedLookup() { * Return a field type. We use {@link NumberFieldMapper.NumberFieldType} by default since it is compatible with all sort modes * Tests that require other field types can override this. */ - protected MappedFieldType provideMappedFieldType(String name) { - NumberFieldMapper.NumberFieldType doubleFieldType = new NumberFieldMapper.NumberFieldType( - name, - NumberFieldMapper.NumberType.DOUBLE - ); - return doubleFieldType; + protected MappedField provideMappedField(String name) { + NumberFieldMapper.NumberFieldType doubleFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); + return new MappedField(name, doubleFieldType); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index fca0c90b95a3e..51986b33a3fd0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -67,7 +68,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase { /** - * {@link #provideMappedFieldType(String)} will return a + * {@link #provideMappedField(String)} will return a */ private static final String MAPPED_STRING_FIELDNAME = "_stringField"; @@ -342,26 +343,26 @@ public void testInvalidFormat() { } @Override - protected MappedFieldType provideMappedFieldType(String name) { + protected MappedField provideMappedField(String name) { if (name.equals(MAPPED_STRING_FIELDNAME)) { - return new KeywordFieldMapper.KeywordFieldType(name); + return new MappedField(name, new KeywordFieldMapper.KeywordFieldType()); } else if (name.startsWith("custom-")) { final MappedFieldType fieldType; if (name.startsWith("custom-keyword")) { - fieldType = new KeywordFieldMapper.KeywordFieldType(name); + fieldType = new KeywordFieldMapper.KeywordFieldType(); } else if (name.startsWith("custom-date")) { - fieldType = new DateFieldMapper.DateFieldType(name); + fieldType = new DateFieldMapper.DateFieldType(); } else { String type = name.split("-")[1]; if (type.equals("INT")) { type = "integer"; } NumberFieldMapper.NumberType numberType = NumberFieldMapper.NumberType.valueOf(type.toUpperCase(Locale.ENGLISH)); - fieldType = new NumberFieldMapper.NumberFieldType(name, numberType); + fieldType = new NumberFieldMapper.NumberFieldType(numberType); } - return fieldType; + return new MappedField(name, fieldType); } else { - return super.provideMappedFieldType(name); + return super.provideMappedField(name); } } @@ -578,7 +579,7 @@ public void testIsBottomSortShardDisjoint() throws Exception { try (DirectoryReader reader = writer.getReader()) { SearchExecutionContext context = createMockSearchExecutionContext(new IndexSearcher(reader)); DocValueFormat[] dateValueFormat = new DocValueFormat[] { - context.getFieldType("custom-date").docValueFormat(null, null) }; + context.getMappedField("custom-date").docValueFormat(null, null) }; assertTrue( fieldSort.isBottomSortShardDisjoint(context, new SearchSortValuesAndFormats(new Object[] { 0L }, dateValueFormat)) ); @@ -595,7 +596,7 @@ public void testIsBottomSortShardDisjoint() throws Exception { try (DirectoryReader reader = writer.getReader()) { SearchExecutionContext context = createMockSearchExecutionContext(new IndexSearcher(reader)); DocValueFormat[] dateValueFormat = new DocValueFormat[] { - context.getFieldType("custom-date").docValueFormat(null, null) }; + context.getMappedField("custom-date").docValueFormat(null, null) }; assertFalse(fieldSort.isBottomSortShardDisjoint(context, null)); assertFalse( fieldSort.isBottomSortShardDisjoint( diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index bc3c9fbbc6e70..bb10ea034da82 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NestedPathFieldMapper; import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -98,8 +98,8 @@ public static GeoDistanceSortBuilder randomGeoDistanceSortBuilder() { } @Override - protected MappedFieldType provideMappedFieldType(String name) { - return new GeoPointFieldMapper.GeoPointFieldType(name); + protected MappedField provideMappedField(String name) { + return new MappedField(name, new GeoPointFieldMapper.GeoPointFieldType()); } private static GeoPoint[] points(GeoPoint[] original) { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 35b1fa8001d7b..c24cc9db8ae5e 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; @@ -159,7 +160,7 @@ public void testBuild() throws IOException { indexSettings ); ScriptService scriptService = mock(ScriptService.class); - MappedFieldType fieldType = mockFieldType(suggestionBuilder.field()); + MappedField mappedField = mockField(suggestionBuilder.field()); IndexAnalyzers indexAnalyzers = new IndexAnalyzers(new HashMap<>() { @Override public NamedAnalyzer get(Object key) { @@ -171,7 +172,7 @@ public NamedAnalyzer get(Object key) { when(scriptService.compile(any(Script.class), any())).then( invocation -> new TestTemplateService.MockTemplateScript.Factory(((Script) invocation.getArguments()[0]).getIdOrCode()) ); - List mappers = Collections.singletonList(new MockFieldMapper(fieldType)); + List mappers = Collections.singletonList(new MockFieldMapper(mappedField)); MappingLookup lookup = MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList(), emptyList()); SearchExecutionContext mockContext = new SearchExecutionContext( 0, @@ -264,14 +265,13 @@ public void testBuildWithUnmappedField() { */ protected abstract void assertSuggestionContext(SB builder, SuggestionContext context) throws IOException; - protected MappedFieldType mockFieldType(String fieldName) { + protected MappedField mockField(String fieldName) { MappedFieldType fieldType = mock(MappedFieldType.class); - when(fieldType.name()).thenReturn(fieldName.intern()); // intern field name to not trip assertions that ensure all field names are - // interned NamedAnalyzer searchAnalyzer = new NamedAnalyzer("fieldSearchAnalyzer", AnalyzerScope.INDEX, new SimpleAnalyzer()); TextSearchInfo tsi = new TextSearchInfo(TextFieldMapper.Defaults.FIELD_TYPE, null, searchAnalyzer, searchAnalyzer); when(fieldType.getTextSearchInfo()).thenReturn(tsi); - return fieldType; + return new MappedField(fieldName.intern(), fieldType); // intern field name to not trip assertions that ensure all field names are + // interned } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 26f3e1d2e24b7..579899940230c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -777,7 +777,7 @@ public void testUnknownQueryContextParsing() throws Exception { .endObject(); MapperService mapperService = createIndex("test", Settings.EMPTY, mapping).mapperService(); - CompletionFieldType completionFieldType = (CompletionFieldType) mapperService.fieldType("completion"); + CompletionFieldType completionFieldType = (CompletionFieldType) mapperService.mappedField("completion").type(); Exception e = expectThrows(IllegalArgumentException.class, () -> completionFieldType.getContextMappings().get("brand")); assertEquals("Unknown context name [brand], must be one of [ctx, type]", e.getMessage()); @@ -787,19 +787,17 @@ public void testParsingContextFromDocument() throws Exception { CategoryContextMapping mapping = ContextBuilder.category("cat").field("category").build(); LuceneDocument document = new LuceneDocument(); - KeywordFieldMapper.KeywordFieldType keyword = new KeywordFieldMapper.KeywordFieldType("category"); - document.add(new KeywordFieldMapper.KeywordField(keyword.name(), new BytesRef("category1"), new FieldType())); + document.add(new KeywordFieldMapper.KeywordField("category", new BytesRef("category1"), new FieldType())); // Ignore doc values - document.add(new SortedSetDocValuesField(keyword.name(), new BytesRef("category1"))); + document.add(new SortedSetDocValuesField("category", new BytesRef("category1"))); Set context = mapping.parseContext(document); assertThat(context.size(), equalTo(1)); assertTrue(context.contains("category1")); document = new LuceneDocument(); - TextFieldMapper.TextFieldType text = new TextFieldMapper.TextFieldType("category"); - document.add(new Field(text.name(), "category1", TextFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field("category", "category1", TextFieldMapper.Defaults.FIELD_TYPE)); // Ignore stored field - document.add(new StoredField(text.name(), "category1", TextFieldMapper.Defaults.FIELD_TYPE)); + document.add(new StoredField("category", "category1", TextFieldMapper.Defaults.FIELD_TYPE)); context = mapping.parseContext(document); assertThat(context.size(), equalTo(1)); assertTrue(context.contains("category1")); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 990c39772b4d1..fcba381b7bb35 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.CompletionFieldMapper.CompletionFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; @@ -138,14 +138,13 @@ protected void mutateSpecificParameters(CompletionSuggestionBuilder builder) thr } @Override - protected MappedFieldType mockFieldType(String fieldName) { + protected MappedField mockField(String fieldName) { CompletionFieldType completionFieldType = new CompletionFieldType( - fieldName, new NamedAnalyzer("fieldSearchAnalyzer", AnalyzerScope.INDEX, new SimpleAnalyzer()), Collections.emptyMap() ); completionFieldType.setContextMappings(new ContextMappings(contextMappings)); - return completionFieldType; + return new MappedField(fieldName, completionFieldType); } @Override @@ -153,7 +152,7 @@ protected void assertSuggestionContext(CompletionSuggestionBuilder builder, Sugg assertThat(context, instanceOf(CompletionSuggestionContext.class)); assertThat(context.getSuggester(), instanceOf(CompletionSuggester.class)); CompletionSuggestionContext completionSuggestionCtx = (CompletionSuggestionContext) context; - assertThat(completionSuggestionCtx.getFieldType(), instanceOf(CompletionFieldType.class)); + assertThat(completionSuggestionCtx.getMappedField().type(), instanceOf(CompletionFieldType.class)); assertEquals(builder.fuzzyOptions, completionSuggestionCtx.getFuzzyOptions()); Map> parsedContextBytes; parsedContextBytes = CompletionSuggestionBuilder.parseContextBytes( diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 7f373e11cdb35..2648b7b8edd6e 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -56,7 +56,7 @@ public void testIndexingWithNoContexts() throws Exception { .endObject(); MapperService mapperService = createIndex("test", Settings.EMPTY, mapping).mapperService(); - MappedFieldType completionFieldType = mapperService.fieldType("completion"); + MappedField completionField = mapperService.mappedField("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( @@ -82,7 +82,7 @@ public void testIndexingWithNoContexts() throws Exception { XContentType.JSON ) ); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionField.name()); assertContextSuggestFields(fields, 7); } @@ -104,7 +104,7 @@ public void testIndexingWithSimpleContexts() throws Exception { .endObject(); MapperService mapperService = createIndex("test", Settings.EMPTY, mapping).mapperService(); - MappedFieldType completionFieldType = mapperService.fieldType("completion"); + MappedField completionField = mapperService.mappedField("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( @@ -128,7 +128,7 @@ public void testIndexingWithSimpleContexts() throws Exception { XContentType.JSON ) ); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionField.name()); assertContextSuggestFields(fields, 3); } @@ -150,7 +150,7 @@ public void testIndexingWithContextList() throws Exception { .endObject(); MapperService mapperService = createIndex("test", Settings.EMPTY, mapping).mapperService(); - MappedFieldType completionFieldType = mapperService.fieldType("completion"); + MappedField completionField = mapperService.mappedField("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( @@ -178,7 +178,7 @@ public void testIndexingWithContextList() throws Exception { XContentType.JSON ) ); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionField.name()); assertContextSuggestFields(fields, 3); } @@ -204,7 +204,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject(); MapperService mapperService = createIndex("test", Settings.EMPTY, mapping).mapperService(); - MappedFieldType completionFieldType = mapperService.fieldType("completion"); + MappedField completionField = mapperService.mappedField("completion"); XContentBuilder builder = jsonBuilder().startObject() .startArray("completion") .startObject() @@ -219,7 +219,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject(); ParsedDocument parsedDocument = mapperService.documentMapper() .parse(new SourceToParse("1", BytesReference.bytes(builder), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(completionField.name()); assertContextSuggestFields(fields, 3); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index efd38b71e97e1..3340b2f257a26 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -44,29 +44,29 @@ private static SearchExecutionContext createMockSearchExecutionContext(boolean a return searchExecutionContext; } - public static List fetchSourceValue(MappedFieldType fieldType, Object sourceValue) throws IOException { - return fetchSourceValue(fieldType, sourceValue, null); + public static List fetchSourceValue(MappedField mappedField, Object sourceValue) throws IOException { + return fetchSourceValue(mappedField, sourceValue, null); } - public static List fetchSourceValue(MappedFieldType fieldType, Object sourceValue, String format) throws IOException { - String field = fieldType.name(); + public static List fetchSourceValue(MappedField mappedField, Object sourceValue, String format) throws IOException { + String field = mappedField.name(); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.isSourceEnabled()).thenReturn(true); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); - ValueFetcher fetcher = fieldType.valueFetcher(searchExecutionContext, format); + ValueFetcher fetcher = mappedField.valueFetcher(searchExecutionContext, format); SourceLookup lookup = new SourceLookup(); lookup.setSource(Collections.singletonMap(field, sourceValue)); return fetcher.fetchValues(lookup, new ArrayList<>()); } - public static List fetchSourceValues(MappedFieldType fieldType, Object... values) throws IOException { - String field = fieldType.name(); + public static List fetchSourceValues(MappedField mappedField, Object... values) throws IOException { + String field = mappedField.name(); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.isSourceEnabled()).thenReturn(true); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); - ValueFetcher fetcher = fieldType.valueFetcher(searchExecutionContext, null); + ValueFetcher fetcher = mappedField.valueFetcher(searchExecutionContext, null); SourceLookup lookup = new SourceLookup(); lookup.setSource(Collections.singletonMap(field, List.of(values))); return fetcher.fetchValues(lookup, new ArrayList<>()); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index d79a82232cafb..33f34ec44a633 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -418,8 +418,8 @@ public IndexSettings getIndexSettings() { } @Override - public MappedFieldType getFieldType(String path) { - return mapperService.fieldType(path); + public MappedField getMappedField(String path) { + return mapperService.mappedField(path); } @Override @@ -449,8 +449,8 @@ public Query filterQuery(Query query) { } @Override - protected IndexFieldData buildFieldData(MappedFieldType ft) { - return ft.fielddataBuilder("test", null).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); + protected IndexFieldData buildFieldData(MappedField mappedField) { + return mappedField.fielddataBuilder("test", null).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); } @Override @@ -643,7 +643,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { ); } - protected BiFunction, IndexFieldData> fieldDataLookup() { + protected BiFunction, IndexFieldData> fieldDataLookup() { return (mft, lookupSource) -> mft.fielddataBuilder("test", lookupSource) .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 7ec5c6b382856..deb7a8e442eb7 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -116,13 +116,13 @@ public final void testExistsQueryMinimalMapping() throws IOException { protected void assertExistsQuery(MapperService mapperService) throws IOException { LuceneDocument fields = mapperService.documentMapper().parse(source(this::writeField)).rootDoc(); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); - MappedFieldType fieldType = mapperService.fieldType("field"); - Query query = fieldType.existsQuery(searchExecutionContext); - assertExistsQuery(fieldType, query, fields); + MappedField mappedField = mapperService.mappedField("field"); + Query query = mappedField.existsQuery(searchExecutionContext); + assertExistsQuery(mappedField, query, fields); } - protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { - if (fieldType.hasDocValues() || fieldType.getTextSearchInfo().hasNorms()) { + protected void assertExistsQuery(MappedField mappedField, Query query, LuceneDocument fields) { + if (mappedField.hasDocValues() || mappedField.getTextSearchInfo().hasNorms()) { assertThat(query, instanceOf(FieldExistsQuery.class)); FieldExistsQuery fieldExistsQuery = (FieldExistsQuery) query; assertEquals("field", fieldExistsQuery.getField()); @@ -135,7 +135,7 @@ protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneD // is not added to _field_names because it is not indexed nor stored assertEquals("field", termQuery.getTerm().text()); assertNoDocValuesField(fields, "field"); - if (fieldType.isIndexed() || fieldType.isStored()) { + if (mappedField.isIndexed() || mappedField.isStored()) { assertNotNull(fields.getField(FieldNamesFieldMapper.NAME)); } else { assertNoFieldNamesField(fields); @@ -173,7 +173,7 @@ protected void assertDimension(boolean isDimension, Function che })); @SuppressWarnings("unchecked") // Syntactic sugar in tests - T fieldType = (T) mapperService.fieldType("field"); + T fieldType = (T) mapperService.mappedField("field").type(); assertThat(checker.apply(fieldType), equalTo(isDimension)); } @@ -184,7 +184,7 @@ protected void assertMetricType(String metricType, Function fetchFromDocValues(MapperService mapperService, MappedFieldType ft, DocValueFormat format, Object sourceValue) - throws IOException { + protected final List fetchFromDocValues( + MapperService mapperService, + MappedField mappedField, + DocValueFormat format, + Object sourceValue + ) throws IOException { SetOnce> result = new SetOnce<>(); withLuceneIndex( mapperService, - iw -> { iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field(ft.name(), sourceValue))).rootDoc()); }, iw -> { - SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); - ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft)); + iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field(mappedField.name(), sourceValue))).rootDoc()); + }, + iw -> { + SearchLookup lookup = new SearchLookup(mapperService::mappedField, fieldDataLookup()); + ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(mappedField)); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); lookup.source().setSegmentAndDocument(context, 0); @@ -452,19 +458,19 @@ public void testUpdates() throws IOException { public final void testTextSearchInfoConsistency() throws IOException { MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); - MappedFieldType fieldType = mapperService.fieldType("field"); - if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE) { - expectThrows(IllegalArgumentException.class, () -> fieldType.termQuery(null, null)); + MappedField mappedField = mapperService.mappedField("field"); + if (mappedField.getTextSearchInfo() == TextSearchInfo.NONE) { + expectThrows(IllegalArgumentException.class, () -> mappedField.termQuery(null, null)); } else { SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); - assertNotNull(fieldType.termQuery(getSampleValueForQuery(), searchExecutionContext)); + assertNotNull(mappedField.termQuery(getSampleValueForQuery(), searchExecutionContext)); } - assertSearchable(fieldType); + assertSearchable(mappedField); assertParseMinimalWarnings(); } - protected void assertSearchable(MappedFieldType fieldType) { - assertEquals(fieldType.isIndexed(), fieldType.getTextSearchInfo() != TextSearchInfo.NONE); + protected void assertSearchable(MappedField mappedField) { + assertEquals(mappedField.isIndexed(), mappedField.getTextSearchInfo() != TextSearchInfo.NONE); } /** @@ -480,8 +486,8 @@ protected void assertSearchable(MappedFieldType fieldType) { public final void testFetch() throws IOException { MapperService mapperService = randomFetchTestMapper(); try { - MappedFieldType ft = mapperService.fieldType("field"); - assertFetch(mapperService, "field", generateRandomInputValue(ft), randomFetchTestFormat()); + MappedField field = mapperService.mappedField("field"); + assertFetch(mapperService, "field", generateRandomInputValue(field.type()), randomFetchTestFormat()); } finally { assertParseMinimalWarnings(); } @@ -500,11 +506,11 @@ public final void testFetch() throws IOException { public final void testFetchMany() throws IOException { MapperService mapperService = randomFetchTestMapper(); try { - MappedFieldType ft = mapperService.fieldType("field"); + MappedField field = mapperService.mappedField("field"); int count = between(2, 10); List values = new ArrayList<>(count); while (values.size() < count) { - values.add(generateRandomInputValue(ft)); + values.add(generateRandomInputValue(field.type())); } assertFetch(mapperService, "field", values, randomFetchTestFormat()); } finally { @@ -571,19 +577,19 @@ protected void registerDimensionChecks(ParameterChecker checker) throws IOExcept * produces the same value as fetching using doc values. */ protected void assertFetch(MapperService mapperService, String field, Object value, String format) throws IOException { - MappedFieldType ft = mapperService.fieldType(field); - SourceToParse source = source(b -> b.field(ft.name(), value)); + MappedField mappedField = mapperService.mappedField(field); + SourceToParse source = source(b -> b.field(mappedField.name(), value)); ValueFetcher docValueFetcher = new DocValueFetcher( - ft.docValueFormat(format, null), - ft.fielddataBuilder("test", () -> null).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) + mappedField.docValueFormat(format, null), + mappedField.fielddataBuilder("test", () -> null).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) ); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.isSourceEnabled()).thenReturn(true); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); - when(searchExecutionContext.getForField(ft)).thenAnswer( - inv -> fieldDataLookup().apply(ft, () -> { throw new UnsupportedOperationException(); }) + when(searchExecutionContext.getForField(mappedField)).thenAnswer( + inv -> fieldDataLookup().apply(mappedField, () -> { throw new UnsupportedOperationException(); }) ); - ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format); + ValueFetcher nativeFetcher = mappedField.valueFetcher(searchExecutionContext, format); ParsedDocument doc = mapperService.documentMapper().parse(source); withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> { SourceLookup sourceLookup = new SourceLookup(); @@ -642,8 +648,8 @@ public final void testIndexTimeFieldData() throws IOException { assumeTrue("Field type does not support access via search lookup", supportsSearchLookup()); MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); assertParseMinimalWarnings(); - MappedFieldType fieldType = mapperService.fieldType("field"); - if (fieldType.isAggregatable() == false) { + MappedField mappedField = mapperService.mappedField("field"); + if (mappedField.isAggregatable() == false) { return; // No field data available, so we ignore } SourceToParse source = source(this::writeField); @@ -653,7 +659,7 @@ public final void testIndexTimeFieldData() throws IOException { LeafReaderContext ctx = ir.leaves().get(0); - DocValuesScriptFieldFactory docValuesFieldSource = fieldType.fielddataBuilder( + DocValuesScriptFieldFactory docValuesFieldSource = mappedField.fielddataBuilder( "test", () -> { throw new UnsupportedOperationException(); } ).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()).load(ctx).getScriptFieldFactory("test"); @@ -661,7 +667,10 @@ public final void testIndexTimeFieldData() throws IOException { docValuesFieldSource.setNextDocId(0); DocumentLeafReader reader = new DocumentLeafReader(doc.rootDoc(), Collections.emptyMap()); - DocValuesScriptFieldFactory indexData = fieldType.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }) + DocValuesScriptFieldFactory indexData = mappedField.fielddataBuilder( + "test", + () -> { throw new UnsupportedOperationException(); } + ) .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) .load(reader.getContext()) .getScriptFieldFactory("test"); @@ -692,11 +701,11 @@ public final void testIndexTimeStoredFieldsAccess() throws IOException { MapperService mapperService = createMapperService(fieldMapping(this::minimalStoreMapping)); assertParseMinimalWarnings(); - MappedFieldType fieldType = mapperService.fieldType("field"); + MappedField mappedField = mapperService.mappedField("field"); SourceToParse source = source(this::writeField); ParsedDocument doc = mapperService.documentMapper().parse(source); - SearchLookup lookup = new SearchLookup(f -> fieldType, (f, s) -> { throw new UnsupportedOperationException(); }); + SearchLookup lookup = new SearchLookup(f -> mappedField, (f, s) -> { throw new UnsupportedOperationException(); }); withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java index 0034be7100df6..1d4fb48024f1f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java @@ -16,15 +16,15 @@ public class MockFieldMapper extends FieldMapper { public MockFieldMapper(String fullName) { - this(new FakeFieldType(fullName)); + this(new MappedField(fullName, new FakeFieldType())); } - public MockFieldMapper(MappedFieldType fieldType) { - this(findSimpleName(fieldType.name()), fieldType, MultiFields.empty(), CopyTo.empty()); + public MockFieldMapper(MappedField mappedField) { + this(findSimpleName(mappedField.name()), mappedField, MultiFields.empty(), CopyTo.empty()); } - public MockFieldMapper(String fullName, MappedFieldType fieldType, MultiFields multifields, CopyTo copyTo) { - super(findSimpleName(fullName), fieldType, multifields, copyTo, false, null); + public MockFieldMapper(String fullName, MappedField mappedField, MultiFields multifields, CopyTo copyTo) { + super(findSimpleName(fullName), mappedField, multifields, copyTo, false, null); } @Override @@ -38,8 +38,8 @@ static String findSimpleName(String fullName) { } public static class FakeFieldType extends TermBasedFieldType { - public FakeFieldType(String name) { - super(name, true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); + public FakeFieldType() { + super(true, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap()); } @Override @@ -48,7 +48,7 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { throw new UnsupportedOperationException(); } } @@ -66,7 +66,7 @@ public static class Builder extends FieldMapper.Builder { protected Builder(String name) { super(name); - this.fieldType = new FakeFieldType(name); + this.fieldType = new FakeFieldType(); } @Override @@ -87,7 +87,7 @@ public Builder copyTo(String field) { @Override public MockFieldMapper build(MapperBuilderContext context) { MultiFields multiFields = multiFieldsBuilder.build(this, context); - return new MockFieldMapper(name(), fieldType, multiFields, copyTo.build()); + return new MockFieldMapper(name(), new MappedField(name(), fieldType), multiFields, copyTo.build()); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 498aa0d28a941..ad3a53af93ccb 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -79,7 +79,7 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.Mapping; @@ -230,9 +230,9 @@ protected List getSearchPlugins() { protected A createAggregator( AggregationBuilder aggregationBuilder, IndexSearcher searcher, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - return createAggregator(aggregationBuilder, createAggregationContext(searcher, new MatchAllDocsQuery(), fieldTypes)); + return createAggregator(aggregationBuilder, createAggregationContext(searcher, new MatchAllDocsQuery(), mappedFields)); } protected A createAggregator(AggregationBuilder builder, AggregationContext context) throws IOException { @@ -253,7 +253,7 @@ protected A createAggregator(AggregationBuilder builder, * not responsible for releasing it. Instead, it is released automatically in * in {@link #cleanupReleasables()}. */ - protected AggregationContext createAggregationContext(IndexSearcher indexSearcher, Query query, MappedFieldType... fieldTypes) + protected AggregationContext createAggregationContext(IndexSearcher indexSearcher, Query query, MappedField... mappedFields) throws IOException { return createAggregationContext( indexSearcher, @@ -263,7 +263,7 @@ protected AggregationContext createAggregationContext(IndexSearcher indexSearche AggregationBuilder.DEFAULT_PREALLOCATION * 5, // We don't know how many bytes to preallocate so we grab a hand full DEFAULT_MAX_BUCKETS, false, - fieldTypes + mappedFields ); } @@ -281,19 +281,19 @@ protected AggregationContext createAggregationContext( long bytesToPreallocate, int maxBucket, boolean isInSortOrderExecutionRequired, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { MappingLookup mappingLookup = MappingLookup.fromMappers( Mapping.EMPTY, - Arrays.stream(fieldTypes).map(this::buildMockFieldMapper).collect(toList()), + Arrays.stream(mappedFields).map(this::buildMockFieldMapper).collect(toList()), objectMappers(), // Alias all fields to -alias to test aliases - Arrays.stream(fieldTypes) + Arrays.stream(mappedFields) .map(ft -> new FieldAliasMapper(ft.name() + "-alias", ft.name() + "-alias", ft.name())) .collect(toList()) ); - TriFunction, IndexFieldData> fieldDataBuilder = ( + TriFunction, IndexFieldData> fieldDataBuilder = ( fieldType, s, searchLookup) -> fieldType.fielddataBuilder(indexSettings.getIndex().getName(), searchLookup) @@ -353,8 +353,8 @@ public void onCache(ShardId shardId, Accountable accountable) {} * Build a {@link FieldMapper} to create the {@link MappingLookup} used for the aggs. * {@code protected} so subclasses can have it. */ - protected FieldMapper buildMockFieldMapper(MappedFieldType ft) { - return new MockFieldMapper(ft); + protected FieldMapper buildMockFieldMapper(MappedField mappedField) { + return new MockFieldMapper(mappedField); } /** @@ -430,9 +430,9 @@ protected A searchAndReduc IndexSearcher searcher, Query query, AggregationBuilder builder, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - return searchAndReduce(createIndexSettings(), searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes); + return searchAndReduce(createIndexSettings(), searcher, query, builder, DEFAULT_MAX_BUCKETS, mappedFields); } protected A searchAndReduce( @@ -440,9 +440,9 @@ protected A searchAndReduc IndexSearcher searcher, Query query, AggregationBuilder builder, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - return searchAndReduce(indexSettings, searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes); + return searchAndReduce(indexSettings, searcher, query, builder, DEFAULT_MAX_BUCKETS, mappedFields); } protected A searchAndReduce( @@ -450,9 +450,9 @@ protected A searchAndReduc Query query, AggregationBuilder builder, int maxBucket, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - return searchAndReduce(createIndexSettings(), searcher, query, builder, maxBucket, fieldTypes); + return searchAndReduce(createIndexSettings(), searcher, query, builder, maxBucket, mappedFields); } /** @@ -469,9 +469,9 @@ protected A searchAndReduc Query query, AggregationBuilder builder, int maxBucket, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - return searchAndReduce(indexSettings, searcher, query, builder, maxBucket, randomBoolean(), fieldTypes); + return searchAndReduce(indexSettings, searcher, query, builder, maxBucket, randomBoolean(), mappedFields); } /** @@ -492,7 +492,7 @@ protected A searchAndReduc AggregationBuilder builder, int maxBucket, boolean splitLeavesIntoSeparateAggregators, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { // First run it to find circuit breaker leaks on the aggregator CircuitBreakerService crankyService = new CrankyCircuitBreakerService(); @@ -506,7 +506,7 @@ protected A searchAndReduc maxBucket, splitLeavesIntoSeparateAggregators, crankyService, - fieldTypes + mappedFields ); } catch (CircuitBreakingException e) { // expected @@ -524,7 +524,7 @@ protected A searchAndReduc maxBucket, splitLeavesIntoSeparateAggregators, breakerService, - fieldTypes + mappedFields ); } @@ -537,7 +537,7 @@ private A searchAndReduce( int maxBucket, boolean splitLeavesIntoSeparateAggregators, CircuitBreakerService breakerService, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { final IndexReaderContext ctx = searcher.getTopReaderContext(); final PipelineTree pipelines = builder.buildPipelineTree(); @@ -552,7 +552,7 @@ private A searchAndReduce( randomBoolean() ? 0 : builder.bytesToPreallocate(), maxBucket, builder.isInSortOrderExecutionRequired(), - fieldTypes + mappedFields ); C root = createAggregator(builder, context); @@ -651,7 +651,7 @@ protected void tes Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { boolean timeSeries = aggregationBuilder.isInSortOrderExecutionRequired(); try (Directory directory = newDirectory()) { @@ -670,7 +670,7 @@ protected void tes try (DirectoryReader unwrapped = DirectoryReader.open(directory); IndexReader indexReader = wrapDirectoryReader(unwrapped)) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - V agg = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldTypes); + V agg = searchAndReduce(indexSearcher, query, aggregationBuilder, mappedFields); verify.accept(agg); verifyOutputFieldNames(aggregationBuilder, agg); @@ -721,9 +721,9 @@ protected void debugTestCase( Query query, CheckedConsumer buildIndex, TriConsumer, Map>> verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - withIndex(buildIndex, searcher -> debugTestCase(builder, query, searcher, verify, fieldTypes)); + withIndex(buildIndex, searcher -> debugTestCase(builder, query, searcher, verify, mappedFields)); } /** @@ -737,7 +737,7 @@ protected void debugTestCase( Query query, IndexSearcher searcher, TriConsumer, Map>> verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { // Don't use searchAndReduce because we only want a single aggregator. CircuitBreakerService breakerService = new NoneCircuitBreakerService(); @@ -749,7 +749,7 @@ protected void debugTestCase( builder.bytesToPreallocate(), DEFAULT_MAX_BUCKETS, builder.isInSortOrderExecutionRequired(), - fieldTypes + mappedFields ); Aggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); @@ -793,7 +793,7 @@ protected void withAggregator( Query query, CheckedConsumer buildIndex, CheckedBiConsumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -802,7 +802,7 @@ protected void withAggregator( try (DirectoryReader unwrapped = DirectoryReader.open(directory); IndexReader indexReader = wrapDirectoryReader(unwrapped)) { IndexSearcher searcher = newIndexSearcher(indexReader); - AggregationContext context = createAggregationContext(searcher, query, fieldTypes); + AggregationContext context = createAggregationContext(searcher, query, mappedFields); verify.accept(searcher, createAggregator(aggregationBuilder, context)); } } @@ -907,7 +907,7 @@ protected static IndexReader maybeWrapReaderEs(DirectoryReader reader) throws IO * This is used to test the matrix of supported/unsupported field types against the aggregator * and verify it works (or doesn't) as expected. * - * If this method is implemented, {@link AggregatorTestCase#createAggBuilderForTypeTest(MappedFieldType, String)} + * If this method is implemented, {@link AggregatorTestCase#createAggBuilderForTypeTest(MappedField, String)} * should be implemented as well. * * @return list of supported ValuesSourceTypes @@ -927,11 +927,11 @@ protected List getSupportedValuesSourceTypes() { * The list of supported types are provided by {@link AggregatorTestCase#getSupportedValuesSourceTypes()}, * which must also be implemented. * - * @param fieldType the type of the field that will be tested + * @param mappedField the mapped field that will be tested * @param fieldName the name of the field that will be test * @return an aggregation builder to test against the field */ - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { throw new UnsupportedOperationException( "If getSupportedValuesSourceTypes() is implemented, " + "createAggBuilderForTypeTest() must be implemented as well." ); @@ -987,23 +987,23 @@ public void testSupportedFieldTypes() throws IOException { Mapper.Builder builder = mappedType.getValue().parse(fieldName, source, new MockParserContext(indexSettings)); FieldMapper mapper = (FieldMapper) builder.build(MapperBuilderContext.ROOT); - MappedFieldType fieldType = mapper.fieldType(); + MappedField mappedField = mapper.field(); // Non-aggregatable fields are not testable (they will throw an error on all aggs anyway), so skip - if (fieldType.isAggregatable() == false) { + if (mappedField.isAggregatable() == false) { continue; } try (Directory directory = newDirectory()) { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - writeTestDoc(fieldType, fieldName, indexWriter); + writeTestDoc(mappedField, fieldName, indexWriter); indexWriter.close(); try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); - AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, fieldName); + AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(mappedField, fieldName); - ValuesSourceType vst = fieldToVST(fieldType); + ValuesSourceType vst = fieldToVST(mappedField); // TODO in the future we can make this more explicit with expectThrows(), when the exceptions are standardized AssertionError failure = null; try { @@ -1011,7 +1011,7 @@ public void testSupportedFieldTypes() throws IOException { indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, - fieldType + mappedField ); // We should make sure if the builder says it supports sampling, that the internal aggregations returned override // finalizeSampling @@ -1020,22 +1020,22 @@ public void testSupportedFieldTypes() throws IOException { InternalAggregation sampledResult = internalAggregation.finalizeSampling(randomSamplingContext); assertThat(sampledResult.getClass(), equalTo(internalAggregation.getClass())); } - if (supportedVSTypes.contains(vst) == false || unsupportedMappedFieldTypes.contains(fieldType.typeName())) { + if (supportedVSTypes.contains(vst) == false || unsupportedMappedFieldTypes.contains(mappedField.typeName())) { failure = new AssertionError( "Aggregator [" + aggregationBuilder.getType() + "] should not support field type [" - + fieldType.typeName() + + mappedField.typeName() + "] but executing against the field did not throw an exception" ); } } catch (Exception | AssertionError e) { - if (supportedVSTypes.contains(vst) && unsupportedMappedFieldTypes.contains(fieldType.typeName()) == false) { + if (supportedVSTypes.contains(vst) && unsupportedMappedFieldTypes.contains(mappedField.typeName()) == false) { failure = new AssertionError( "Aggregator [" + aggregationBuilder.getType() + "] supports field type [" - + fieldType.typeName() + + mappedField.typeName() + "] but executing against the field threw an exception: [" + e.getMessage() + "]", @@ -1051,8 +1051,10 @@ public void testSupportedFieldTypes() throws IOException { } } - private ValuesSourceType fieldToVST(MappedFieldType fieldType) { - return fieldType.fielddataBuilder("", () -> { throw new UnsupportedOperationException(); }).build(null, null).getValuesSourceType(); + private ValuesSourceType fieldToVST(MappedField mappedField) { + return mappedField.fielddataBuilder("", () -> { throw new UnsupportedOperationException(); }) + .build(null, null) + .getValuesSourceType(); } /** @@ -1061,10 +1063,10 @@ private ValuesSourceType fieldToVST(MappedFieldType fieldType) { * Throws an exception if it encounters an unknown field type, to prevent new ones from sneaking in without * being tested. */ - private void writeTestDoc(MappedFieldType fieldType, String fieldName, RandomIndexWriter iw) throws IOException { + private void writeTestDoc(MappedField mappedField, String fieldName, RandomIndexWriter iw) throws IOException { - String typeName = fieldType.typeName(); - ValuesSourceType vst = fieldToVST(fieldType); + String typeName = mappedField.typeName(); + ValuesSourceType vst = fieldToVST(mappedField); Document doc = new Document(); String json; @@ -1202,46 +1204,46 @@ protected void afterClose() {} /** * Make a {@linkplain DateFieldMapper.DateFieldType} for a {@code date}. */ - protected DateFieldMapper.DateFieldType dateField(String name, DateFieldMapper.Resolution resolution) { - return new DateFieldMapper.DateFieldType(name, resolution); + protected MappedField dateField(String name, DateFieldMapper.Resolution resolution) { + return new MappedField(name, new DateFieldMapper.DateFieldType(resolution)); } /** * Make a {@linkplain NumberFieldMapper.NumberFieldType} for a {@code double}. */ - protected NumberFieldMapper.NumberFieldType doubleField(String name) { - return new NumberFieldMapper.NumberFieldType(name, NumberFieldMapper.NumberType.DOUBLE); + protected MappedField doubleField(String name) { + return new MappedField(name, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); } /** * Make a {@linkplain GeoPointFieldMapper.GeoPointFieldType} for a {@code geo_point}. */ - protected GeoPointFieldMapper.GeoPointFieldType geoPointField(String name) { - return new GeoPointFieldMapper.GeoPointFieldType(name); + protected MappedField geoPointField(String name) { + return new MappedField(name, new GeoPointFieldMapper.GeoPointFieldType()); } /** * Make a {@linkplain DateFieldMapper.DateFieldType} for a {@code date}. */ - protected KeywordFieldMapper.KeywordFieldType keywordField(String name) { - return new KeywordFieldMapper.KeywordFieldType(name); + protected MappedField keywordField(String name) { + return new MappedField(name, new KeywordFieldMapper.KeywordFieldType()); } /** * Make a {@linkplain NumberFieldMapper.NumberFieldType} for a {@code long}. */ - protected NumberFieldMapper.NumberFieldType longField(String name) { - return new NumberFieldMapper.NumberFieldType(name, NumberFieldMapper.NumberType.LONG); + protected MappedField longField(String name) { + return new MappedField(name, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); } /** * Make a {@linkplain NumberFieldMapper.NumberFieldType} for a {@code range}. */ - protected RangeFieldMapper.RangeFieldType rangeField(String name, RangeType rangeType) { + protected MappedField rangeField(String name, RangeType rangeType) { if (rangeType == RangeType.DATE) { - return new RangeFieldMapper.RangeFieldType(name, RangeFieldMapper.Defaults.DATE_FORMATTER); + return new MappedField(name, new RangeFieldMapper.RangeFieldType(RangeFieldMapper.Defaults.DATE_FORMATTER)); } - return new RangeFieldMapper.RangeFieldType(name, rangeType); + return new MappedField(name, new RangeFieldMapper.RangeFieldType(rangeType)); } private void assertRoundTrip(List result) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java index 5c798b80ca09c..2584fba2eef77 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java @@ -27,7 +27,7 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregator; @@ -88,7 +88,7 @@ public abstract class GeoGridAggregatorTestCase protected abstract Rectangle getTile(double lng, double lat, int precision); @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return createBuilder("foo").field(fieldName); } @@ -338,9 +338,9 @@ private void testCase( assertThat(aggregationBuilder.geoBoundingBox(), equalTo(geoBoundingBox)); } - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType(aggregationBuilder.field()); + MappedField mappedField = new MappedField(aggregationBuilder.field(), new GeoPointFieldMapper.GeoPointFieldType()); - Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java index b0b8a4f1de116..53f93fd92d36f 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java @@ -114,7 +114,7 @@ public void testOrientationPersistence() throws Exception { // left orientation test IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); IndexService indexService = indicesService.indexService(resolveIndex(idxName)); - MappedFieldType fieldType = indexService.mapperService().fieldType("location"); + MappedFieldType fieldType = indexService.mapperService().mappedField("location").type(); assertThat(fieldType, instanceOf(AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType.class)); AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType gsfm = @@ -127,7 +127,7 @@ public void testOrientationPersistence() throws Exception { // right orientation test indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName + "2")); indexService = indicesService.indexService(resolveIndex((idxName + "2"))); - fieldType = indexService.mapperService().fieldType("location"); + fieldType = indexService.mapperService().mappedField("location").type(); assertThat(fieldType, instanceOf(AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType.class)); gsfm = (AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType) fieldType; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java index aebb2698b0870..0a8c470abf63d 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java @@ -38,7 +38,11 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa if ((indexFieldData instanceof IndexHistogramFieldData) == false) { throw new IllegalArgumentException( - "Expected histogram type on field [" + fieldContext.field() + "], but got [" + fieldContext.fieldType().typeName() + "]" + "Expected histogram type on field [" + + fieldContext.field() + + "], but got [" + + fieldContext.mappedField().typeName() + + "]" ); } return new HistogramValuesSource.Histogram.Fielddata((IndexHistogramFieldData) indexFieldData); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index b971a048a06d1..de2ab8496a12c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; @@ -98,7 +99,7 @@ protected Parameter[] getParameters() { public HistogramFieldMapper build(MapperBuilderContext context) { return new HistogramFieldMapper( name, - new HistogramFieldType(context.buildFullName(name), meta.getValue(), metric.getValue()), + new MappedField(context.buildFullName(name), new HistogramFieldType(meta.getValue(), metric.getValue())), multiFieldsBuilder.build(this, context), copyTo.build(), this @@ -117,14 +118,8 @@ public HistogramFieldMapper build(MapperBuilderContext context) { /** The metric type (gauge, counter, summary) if field is a time series metric */ private final TimeSeriesParams.MetricType metricType; - public HistogramFieldMapper( - String simpleName, - MappedFieldType mappedFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder - ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + public HistogramFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo); this.ignoreMalformed = builder.ignoreMalformed.getValue(); this.ignoreMalformedByDefault = builder.ignoreMalformed.getDefaultValue().value(); this.metricType = builder.metric.getValue(); @@ -153,8 +148,8 @@ public static class HistogramFieldType extends MappedFieldType { private final TimeSeriesParams.MetricType metricType; - public HistogramFieldType(String name, Map meta, TimeSeriesParams.MetricType metricType) { - super(name, false, false, true, TextSearchInfo.NONE, meta); + public HistogramFieldType(Map meta, TimeSeriesParams.MetricType metricType) { + super(false, false, true, TextSearchInfo.NONE, meta); this.metricType = metricType; } @@ -164,14 +159,14 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.identity(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.identity(name, context, format); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return (cache, breakerService) -> new IndexHistogramFieldData(name(), AnalyticsValuesSourceType.HISTOGRAM) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return (cache, breakerService) -> new IndexHistogramFieldData(name, AnalyticsValuesSourceType.HISTOGRAM) { @Override public LeafHistogramFieldData load(LeafReaderContext context) { @@ -254,9 +249,9 @@ public BucketedSort newBucketedSort( } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { throw new IllegalArgumentException( - "[" + CONTENT_TYPE + "] field do not support searching, " + "use dedicated aggregations instead: [" + name() + "]" + "[" + CONTENT_TYPE + "] field do not support searching, " + "use dedicated aggregations instead: [" + name + "]" ); } @@ -377,7 +372,7 @@ public void parse(DocumentParserContext context) throws IOException { } BytesRef docValue = streamOutput.bytes().toBytesRef(); Field field = new BinaryDocValuesField(name(), docValue); - if (context.doc().getByKey(fieldType().name()) != null) { + if (context.doc().getByKey(name()) != null) { throw new IllegalArgumentException( "Field [" + name() @@ -386,18 +381,18 @@ public void parse(DocumentParserContext context) throws IOException { + "] doesn't not support indexing multiple values for the same field in the same document" ); } - context.doc().addWithKey(fieldType().name(), field); + context.doc().addWithKey(name(), field); } catch (Exception ex) { if (ignoreMalformed.value() == false) { - throw new MapperParsingException("failed to parse field [{}] of type [{}]", ex, fieldType().name(), fieldType().typeName()); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", ex, name(), fieldType().typeName()); } if (subParser != null) { // close the subParser so we advance to the end of the object subParser.close(); } - context.addIgnoredField(fieldType().name()); + context.addIgnoredField(name()); } context.path().remove(); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java index 2bb9bc85d7b33..5ffe9d1482e89 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/histogram/HistoBackedHistogramAggregatorTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.index.mapper.CustomTermFreqField; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -44,7 +44,7 @@ public void testHistograms() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(FIELD_NAME)); assertEquals(9, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -78,7 +78,7 @@ public void testMinDocCount() throws Exception { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME).interval(5).minDocCount(2); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(FIELD_NAME)); assertEquals(4, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); assertEquals(3, histogram.getBuckets().get(0).getDocCount()); @@ -107,7 +107,7 @@ public void testHistogramWithDocCountField() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(FIELD_NAME)); assertTrue(AggregationInspectionHelper.hasValue(histogram)); assertEquals(8, histogram.getBuckets().get(0).getDocCount()); } @@ -129,7 +129,7 @@ public void testRandomOffset() throws Exception { .minDocCount(1); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(FIELD_NAME)); assertEquals(3, histogram.getBuckets().size()); assertEquals(-10 + expectedOffset, histogram.getBuckets().get(0).getKey()); @@ -157,7 +157,7 @@ public void testExtendedBounds() throws Exception { .extendedBounds(-12, 13); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(FIELD_NAME)); assertEquals(6, histogram.getBuckets().size()); assertEquals(-15d, histogram.getBuckets().get(0).getKey()); assertEquals(0, histogram.getBuckets().get(0).getDocCount()); @@ -189,7 +189,7 @@ public void testHardBounds() throws Exception { .hardBounds(new DoubleBounds(0.0, 5.0)); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(FIELD_NAME)); assertEquals(1, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); assertEquals(4, histogram.getBuckets().get(0).getDocCount()); @@ -216,7 +216,7 @@ public void testSubAggs() throws Exception { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME)) + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(FIELD_NAME)) ); assertEquals("Histogram aggregation on histogram fields does not support sub-aggregations", e.getMessage()); @@ -230,12 +230,12 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new HistogramAggregationBuilder("_name").field(fieldName); } - private MappedFieldType defaultFieldType(String fieldName) { - return new HistogramFieldMapper.HistogramFieldType(fieldName, Collections.emptyMap(), null); + private MappedField defaultField(String fieldName) { + return new MappedField(fieldName, new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java index a9077a51bc7a5..691d7d54b6243 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/bucket/range/HistoBackedRangeAggregatorTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.index.mapper.CustomTermFreqField; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -71,13 +71,13 @@ public void testPercentilesAccuracy() throws Exception { searcher, new MatchAllDocsQuery(), rawPercentiles, - defaultFieldType(RAW_FIELD_NAME) + defaultField(RAW_FIELD_NAME) ); Percentiles aggregatedPercentileResults = searchAndReduce( searcher, new MatchAllDocsQuery(), aggregatedPercentiles, - defaultFieldType(HISTO_FIELD_NAME) + defaultField(HISTO_FIELD_NAME) ); aggBuilder.addUnboundedTo(aggregatedPercentileResults.percentile(steps[0])); rawFieldAgg.addUnboundedTo(rawPercentileResults.percentile(steps[0])); @@ -96,13 +96,13 @@ public void testPercentilesAccuracy() throws Exception { searcher, new MatchAllDocsQuery(), aggBuilder, - defaultFieldType(HISTO_FIELD_NAME) + defaultField(HISTO_FIELD_NAME) ); InternalRange rawRange = searchAndReduce( searcher, new MatchAllDocsQuery(), rawFieldAgg, - defaultFieldType(RAW_FIELD_NAME) + defaultField(RAW_FIELD_NAME) ); for (int j = 0; j < rawRange.getBuckets().size(); j++) { absError += Math.abs(range.getBuckets().get(j).getDocCount() - rawRange.getBuckets().get(j).getDocCount()); @@ -188,13 +188,13 @@ private void testRanges(List ranges, String name) throws searcher, new MatchAllDocsQuery(), aggBuilder, - defaultFieldType(HISTO_FIELD_NAME) + defaultField(HISTO_FIELD_NAME) ); InternalRange rawRange = searchAndReduce( searcher, new MatchAllDocsQuery(), rawFieldAgg, - defaultFieldType(RAW_FIELD_NAME) + defaultField(RAW_FIELD_NAME) ); for (int j = 0; j < rawRange.getBuckets().size(); j++) { absError += Math.abs(range.getBuckets().get(j).getDocCount() - rawRange.getBuckets().get(j).getDocCount()); @@ -241,7 +241,7 @@ public void testOverlapping() throws Exception { searcher, new MatchAllDocsQuery(), aggBuilder, - defaultFieldType(HISTO_FIELD_NAME) + defaultField(HISTO_FIELD_NAME) ); assertTrue(AggregationInspectionHelper.hasValue(range)); assertEquals(7, range.getBuckets().size()); @@ -303,7 +303,7 @@ public void testNonOverlapping() throws Exception { searcher, new MatchAllDocsQuery(), aggBuilder, - defaultFieldType(HISTO_FIELD_NAME) + defaultField(HISTO_FIELD_NAME) ); assertTrue(AggregationInspectionHelper.hasValue(range)); assertEquals(4, range.getBuckets().size()); @@ -336,7 +336,7 @@ public void testSubAggs() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(HISTO_FIELD_NAME)) + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultField(HISTO_FIELD_NAME)) ); assertEquals("Range aggregation on histogram fields does not support sub-aggregations", e.getMessage()); } @@ -367,15 +367,15 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new RangeAggregationBuilder("_name").field(fieldName); } - private MappedFieldType defaultFieldType(String fieldName) { + private MappedField defaultField(String fieldName) { if (fieldName.equals(HISTO_FIELD_NAME)) { - return new HistogramFieldMapper.HistogramFieldType(fieldName, Collections.emptyMap(), null); + return new MappedField(fieldName, new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); } else { - return new NumberFieldMapper.NumberFieldType(fieldName, NumberFieldMapper.NumberType.DOUBLE); + return new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); } } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java index 59d55af44f224..061f15f4ec19c 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentileRanksAggregatorTests.java @@ -16,7 +16,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -47,7 +47,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentileRanksAggregationBuilder("hdr_percentiles", new double[] { 1.0 }).field(fieldName) .percentilesConfig(new PercentilesConfig.Hdr()); } @@ -90,7 +90,7 @@ public void testSimple() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) .field("field") .method(PercentilesMethod.HDR); - MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap(), null); + MappedField fieldType = new MappedField("field", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java index 31c5ea233bebe..4cef97b505ee1 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HDRPreAggregatedPercentilesAggregatorTests.java @@ -19,7 +19,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregator; @@ -51,7 +51,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentilesAggregationBuilder("hdr_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.Hdr()); } @@ -149,8 +149,11 @@ private void testCase(Query query, CheckedConsumer getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new AvgAggregationBuilder("_name").field(fieldName); } - private MappedFieldType defaultFieldType() { - return new HistogramFieldMapper.HistogramFieldType(HistoBackedAvgAggregatorTests.FIELD_NAME, Collections.emptyMap(), null); + private MappedField defaultFieldType() { + return new MappedField( + HistoBackedAvgAggregatorTests.FIELD_NAME, + new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java index 6e94a70290e14..15ee005950103 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregatorTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -110,7 +110,7 @@ public void testQueryFiltering() throws IOException { } private void testCase(Query query, CheckedConsumer indexer, Consumer verify) throws IOException { - testCase(max("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); + testCase(max("_name").field(FIELD_NAME), query, indexer, verify, defaultField()); } @Override @@ -130,11 +130,14 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new MaxAggregationBuilder("_name").field(fieldName); } - private MappedFieldType defaultFieldType() { - return new HistogramFieldMapper.HistogramFieldType(HistoBackedMaxAggregatorTests.FIELD_NAME, Collections.emptyMap(), null); + private MappedField defaultField() { + return new MappedField( + HistoBackedMaxAggregatorTests.FIELD_NAME, + new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java index e21b012de38ca..fed8d436685bd 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMinAggregatorTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -110,7 +110,7 @@ public void testQueryFiltering() throws IOException { } private void testCase(Query query, CheckedConsumer indexer, Consumer verify) throws IOException { - testCase(min("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); + testCase(min("_name").field(FIELD_NAME), query, indexer, verify, defaultField()); } @Override @@ -130,11 +130,14 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new MinAggregationBuilder("_name").field(fieldName); } - private MappedFieldType defaultFieldType() { - return new HistogramFieldMapper.HistogramFieldType(HistoBackedMinAggregatorTests.FIELD_NAME, Collections.emptyMap(), null); + private MappedField defaultField() { + return new MappedField( + HistoBackedMinAggregatorTests.FIELD_NAME, + new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java index 53c9f7eab0d43..43af26d59c5f7 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedSumAggregatorTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -110,7 +110,7 @@ public void testQueryFiltering() throws IOException { } private void testCase(Query query, CheckedConsumer indexer, Consumer verify) throws IOException { - testCase(sum("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); + testCase(sum("_name").field(FIELD_NAME), query, indexer, verify, defaultField()); } @Override @@ -130,11 +130,14 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new SumAggregationBuilder("_name").field(fieldName); } - private MappedFieldType defaultFieldType() { - return new HistogramFieldMapper.HistogramFieldType(HistoBackedSumAggregatorTests.FIELD_NAME, Collections.emptyMap(), null); + private MappedField defaultField() { + return new MappedField( + HistoBackedSumAggregatorTests.FIELD_NAME, + new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null) + ); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java index 111b4aefa0853..efff1b7bb9540 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedValueCountAggregatorTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -111,7 +111,7 @@ public void testQueryFiltering() throws IOException { private void testCase(Query query, CheckedConsumer indexer, Consumer verify) throws IOException { - testCase(count("_name").field(FIELD_NAME), query, indexer, verify, defaultFieldType()); + testCase(count("_name").field(FIELD_NAME), query, indexer, verify, defaultField()); } @Override @@ -135,11 +135,11 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new ValueCountAggregationBuilder("_name").field(fieldName); } - private MappedFieldType defaultFieldType() { - return new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap(), null); + private MappedField defaultField() { + return new MappedField("field", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java index e20674c51d805..1ffc4a77f6dd4 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java @@ -12,7 +12,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -45,7 +45,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentileRanksAggregationBuilder("tdigest_percentiles", new double[] { 1.0 }).field(fieldName) .percentilesConfig(new PercentilesConfig.TDigest()); } @@ -70,10 +70,10 @@ public void testSimple() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] { 0.1, 0.5, 12 }) .field("field") .method(PercentilesMethod.TDIGEST); - MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap(), null); + MappedField mappedField = new MappedField("field", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java index 33c702538f855..34e29f5ae0593 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentilesAggregatorTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregator; @@ -47,7 +47,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new PercentilesAggregationBuilder("tdigest_percentiles").field(fieldName).percentilesConfig(new PercentilesConfig.TDigest()); } @@ -130,8 +130,11 @@ private void testCase( PercentilesAggregationBuilder builder = new PercentilesAggregationBuilder("test").field("number") .method(PercentilesMethod.TDIGEST); - MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("number", Collections.emptyMap(), null); - Aggregator aggregator = createAggregator(builder, indexSearcher, fieldType); + MappedField mappedField = new MappedField( + "number", + new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null) + ); + Aggregator aggregator = createAggregator(builder, indexSearcher, mappedField); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java index 2f6b8d07867d4..733523839624c 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.MockScriptEngine; @@ -61,7 +61,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new BoxplotAggregationBuilder("foo").field(fieldName); } @@ -168,7 +168,7 @@ public void testSomeMatchesNumericDocValues() throws IOException { public void testMissingField() throws IOException { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number").missing(10L); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("other", 2))); @@ -183,13 +183,13 @@ public void testMissingField() throws IOException { assertEquals(10, boxplot.getQ1(), 0); assertEquals(10, boxplot.getQ2(), 0); assertEquals(10, boxplot.getQ3(), 0); - }, fieldType); + }, mappedField); } public void testUnmappedWithMissingField() throws IOException { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("does_not_exist").missing(0L); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -200,13 +200,13 @@ public void testUnmappedWithMissingField() throws IOException { assertEquals(0, boxplot.getQ1(), 0); assertEquals(0, boxplot.getQ2(), 0); assertEquals(0, boxplot.getQ3(), 0); - }, fieldType); + }, mappedField); } public void testUnsupportedType() { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("not_a_number"); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("not_a_number"); + MappedField mappedField = new MappedField("not_a_number", new KeywordFieldMapper.KeywordFieldType()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -215,7 +215,7 @@ public void testUnsupportedType() { new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedSetDocValuesField("string", new BytesRef("foo")))); }, (Consumer) boxplot -> { fail("Should have thrown exception"); }, - fieldType + mappedField ) ); assertEquals(e.getMessage(), "Field [not_a_number] of type [keyword] " + "is not supported for aggregation [boxplot]"); @@ -224,7 +224,7 @@ public void testUnsupportedType() { public void testBadMissingField() { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number").missing("not_a_number"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 2))); @@ -233,14 +233,14 @@ public void testBadMissingField() { iw.addDocument(singleton(new NumericDocValuesField("number", 4))); iw.addDocument(singleton(new NumericDocValuesField("number", 5))); iw.addDocument(singleton(new NumericDocValuesField("number", 10))); - }, (Consumer) boxplot -> { fail("Should have thrown exception"); }, fieldType)); + }, (Consumer) boxplot -> { fail("Should have thrown exception"); }, mappedField)); } public void testUnmappedWithBadMissingField() { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("does_not_exist") .missing("not_a_number"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); expectThrows(NumberFormatException.class, () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 2))); @@ -249,7 +249,7 @@ public void testUnmappedWithBadMissingField() { iw.addDocument(singleton(new NumericDocValuesField("number", 4))); iw.addDocument(singleton(new NumericDocValuesField("number", 5))); iw.addDocument(singleton(new NumericDocValuesField("number", 10))); - }, (Consumer) boxplot -> { fail("Should have thrown exception"); }, fieldType)); + }, (Consumer) boxplot -> { fail("Should have thrown exception"); }, mappedField)); } public void testEmptyBucket() throws IOException { @@ -258,7 +258,7 @@ public void testEmptyBucket() throws IOException { .minDocCount(0) .subAggregation(new BoxplotAggregationBuilder("boxplot").field("number")); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(histogram, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 1))); @@ -291,13 +291,13 @@ public void testEmptyBucket() throws IOException { assertEquals(21, boxplot.getQ1(), 0); assertEquals(22, boxplot.getQ2(), 0); assertEquals(23, boxplot.getQ3(), 0); - }, fieldType); + }, mappedField); } public void testFormatter() throws IOException { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number").format("0000.0"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 1))); @@ -316,7 +316,7 @@ public void testFormatter() throws IOException { assertEquals("0001.8", boxplot.getQ1AsString()); assertEquals("0003.0", boxplot.getQ2AsString()); assertEquals("0004.2", boxplot.getQ3AsString()); - }, fieldType); + }, mappedField); } public void testGetProperty() throws IOException { @@ -324,7 +324,7 @@ public void testGetProperty() throws IOException { new BoxplotAggregationBuilder("boxplot").field("number") ); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(globalBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 1))); @@ -342,14 +342,14 @@ public void testGetProperty() throws IOException { assertThat(global.getProperty("boxplot.max"), equalTo(5.0)); assertThat(boxplot.getProperty("min"), equalTo(1.0)); assertThat(boxplot.getProperty("max"), equalTo(5.0)); - }, fieldType); + }, mappedField); } public void testValueScript() throws IOException { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -360,14 +360,14 @@ public void testValueScript() throws IOException { assertEquals(2, boxplot.getQ1(), 0); assertEquals(5, boxplot.getQ2(), 0); assertEquals(8, boxplot.getQ3(), 0); - }, fieldType); + }, mappedField); } public void testValueScriptUnmapped() throws IOException { BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("does_not_exist") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -378,7 +378,7 @@ public void testValueScriptUnmapped() throws IOException { assertEquals(Double.NaN, boxplot.getQ1(), 0); assertEquals(Double.NaN, boxplot.getQ2(), 0); assertEquals(Double.NaN, boxplot.getQ3(), 0); - }, fieldType); + }, mappedField); } public void testValueScriptUnmappedMissing() throws IOException { @@ -386,7 +386,7 @@ public void testValueScriptUnmappedMissing() throws IOException { .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())) .missing(1.0); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -398,14 +398,14 @@ public void testValueScriptUnmappedMissing() throws IOException { assertEquals(1.0, boxplot.getQ1(), 0); assertEquals(1.0, boxplot.getQ2(), 0); assertEquals(1.0, boxplot.getQ3(), 0); - }, fieldType); + }, mappedField); } private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); BoxplotAggregationBuilder aggregationBuilder = new BoxplotAggregationBuilder("boxplot").field("number"); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java index 6dd235ab6a2a8..b8b82b63a7f37 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -153,8 +153,11 @@ private void executeTestCase( try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(HISTO_FIELD); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG); + MappedField fieldType = new MappedField(HISTO_FIELD, new DateFieldMapper.DateFieldType()); + MappedField valueFieldType = new MappedField( + "value_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); InternalAggregation histogram; histogram = searchAndReduce(indexSearcher, query, aggBuilder, fieldType, valueFieldType); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java index 5e6f86f7411c6..0d50cf06b7786 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapperTests.java @@ -318,7 +318,7 @@ public void testCannotBeUsedInMultifields() { public void testMetricType() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - HistogramFieldMapper.HistogramFieldType ft = (HistogramFieldMapper.HistogramFieldType) mapperService.fieldType("field"); + HistogramFieldMapper.HistogramFieldType ft = (HistogramFieldMapper.HistogramFieldType) mapperService.mappedField("field").type(); assertNull(ft.getMetricType()); assertMetricType("histogram", HistogramFieldMapper.HistogramFieldType::getMetricType); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java index 0452e3b073294..74192075c9cb5 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesHDRAggregatorTests.java @@ -19,7 +19,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; @@ -66,11 +66,14 @@ protected void executeTestCase(int window, int shift, Query query, DateHistogram try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField(aggBuilder.field(), new DateFieldMapper.DateFieldType()); + MappedField valueMappedField = new MappedField( + "value_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) + ); InternalDateHistogram histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, new MappedFieldType[] { fieldType, valueFieldType }); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, mappedField, valueMappedField); for (int i = 0; i < histogram.getBuckets().size(); i++) { InternalDateHistogram.Bucket bucket = histogram.getBuckets().get(i); InternalHDRPercentiles values = bucket.getAggregations().get("MovingPercentiles"); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java index 060ff07905cf4..a18538f91fdc1 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesTDigestAggregatorTests.java @@ -18,7 +18,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; @@ -66,11 +66,14 @@ protected void executeTestCase(int window, int shift, Query query, DateHistogram try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field()); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.DOUBLE); + MappedField mappedField = new MappedField(aggBuilder.field(), new DateFieldMapper.DateFieldType()); + MappedField mappedValueField = new MappedField( + "value_field", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) + ); InternalDateHistogram histogram; - histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, new MappedFieldType[] { fieldType, valueFieldType }); + histogram = searchAndReduce(indexSearcher, query, aggBuilder, 1000, mappedField, mappedValueField); for (int i = 0; i < histogram.getBuckets().size(); i++) { InternalDateHistogram.Bucket bucket = histogram.getBuckets().get(i); InternalTDigestPercentiles values = bucket.getAggregations().get("MovingPercentiles"); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java index edd161d16ce32..42a9b77303e66 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregatorTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.MockScriptEngine; @@ -77,8 +77,8 @@ public class MultiTermsAggregatorTests extends AggregatorTestCase { public static final String KEYWORD_FIELD = "kVal"; @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - logger.info(fieldType); + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { + logger.info(mappedField); return new MultiTermsAggregationBuilder("my_terms").terms( List.of( new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName).build(), @@ -614,10 +614,10 @@ private void testCase( CheckedConsumer buildIndex, Consumer verify ) throws IOException { - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType intType = new NumberFieldMapper.NumberFieldType(INT_FIELD, NumberFieldMapper.NumberType.INTEGER); - MappedFieldType floatType = new NumberFieldMapper.NumberFieldType(FLOAT_FIELD, NumberFieldMapper.NumberType.FLOAT); - MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType(KEYWORD_FIELD); + MappedField dateType = dateFieldType(DATE_FIELD); + MappedField intType = new MappedField(INT_FIELD, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField floatType = new MappedField(FLOAT_FIELD, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.FLOAT)); + MappedField keywordType = new MappedField(KEYWORD_FIELD, new KeywordFieldMapper.KeywordFieldType()); MultiTermsAggregationBuilder builder = new MultiTermsAggregationBuilder("my_terms"); builder.terms(terms); if (builderSetup != null) { @@ -642,23 +642,25 @@ protected List getSearchPlugins() { return Collections.singletonList(new AnalyticsPlugin()); } - private DateFieldMapper.DateFieldType dateFieldType(String name) { - return new DateFieldMapper.DateFieldType( + private MappedField dateFieldType(String name) { + return new MappedField( name, - true, - false, - true, - DateFormatter.forPattern("strict_date"), - DateFieldMapper.Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + true, + DateFormatter.forPattern("strict_date"), + DateFieldMapper.Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); } private Iterable docWithDate(String date, IndexableField... fields) { List indexableFields = new ArrayList<>(); - long instant = dateFieldType(DATE_FIELD).parse(date); + long instant = ((DateFieldMapper.DateFieldType) dateFieldType(DATE_FIELD).type()).parse(date); indexableFields.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); indexableFields.add(new LongPoint(DATE_FIELD, instant)); indexableFields.addAll(Arrays.asList(fields)); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java index 571a4b48343f0..7a11073ac9d31 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/normalize/NormalizeAggregatorTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -157,9 +157,12 @@ private void testCase(ValuesSourceAggregationBuilder aggBuilder, Consumer aggBuilder, Consumer { fail("Shouldn't be here"); }, dateType, numType) + }, h -> { fail("Shouldn't be here"); }, dateType, mappedField) ); assertEquals( "The rate aggregation can only be used inside a date histogram aggregation or " @@ -299,8 +299,8 @@ public void testNoWrapping() { } public void testCompositeAggregationWithNoDateHistogramValueSources() { - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedField mappedField = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("day"); List> valuesSourceBuilders = randomBoolean() ? Collections.singletonList(new HistogramValuesSourceBuilder("histo").field("val")) @@ -318,7 +318,7 @@ public void testCompositeAggregationWithNoDateHistogramValueSources() { iw.addDocument(doc("2010-03-12T01:07:45", new NumericDocValuesField("val", 1))); iw.addDocument(doc("2010-04-01T03:43:34", new NumericDocValuesField("val", 3))); iw.addDocument(doc("2010-04-27T03:43:34", new NumericDocValuesField("val", 4))); - }, h -> fail("Shouldn't be here"), dateType, numType) + }, h -> fail("Shouldn't be here"), dateType, mappedField) ); assertEquals( ex.getMessage(), @@ -328,8 +328,8 @@ public void testCompositeAggregationWithNoDateHistogramValueSources() { } public void testDoubleWrapping() throws IOException { - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedField mappedField = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); @@ -357,13 +357,13 @@ public void testDoubleWrapping() throws IOException { assertThat(dh2.getBuckets(), hasSize(2)); assertThat(((InternalRate) dh2.getBuckets().get(0).getAggregations().asList().get(0)).value(), closeTo(2.0, 0.000001)); assertThat(((InternalRate) dh2.getBuckets().get(1).getAggregations().asList().get(0)).value(), closeTo(7.0, 0.000001)); - }, dateType, numType); + }, dateType, mappedField); } public void testKeywordSandwich() throws IOException { - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); + MappedField keywordType = new MappedField("term", new KeywordFieldMapper.KeywordFieldType()); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); @@ -421,9 +421,9 @@ public void testKeywordSandwich() throws IOException { } public void testWithComposite() throws IOException { - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); + MappedField keywordType = new MappedField("term", new KeywordFieldMapper.KeywordFieldType()); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); @@ -501,9 +501,9 @@ public void testUnsupportedKeywordSandwich() throws IOException { histogram = randomFrom("second", "minute", "day", "week"); } - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); + MappedField keywordType = new MappedField("term", new KeywordFieldMapper.KeywordFieldType()); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit(rate).field("val"); if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); @@ -573,9 +573,9 @@ public void testUnsupportedKeywordSandwich() throws IOException { } public void testKeywordSandwichWithSorting() throws IOException { - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); + MappedField keywordType = new MappedField("term", new KeywordFieldMapper.KeywordFieldType()); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("week").field("val"); boolean useSum = randomBoolean(); if (useSum) { @@ -667,9 +667,9 @@ public void testScriptMonthToDay() throws IOException { } public void testFilter() throws IOException { - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); + MappedField keywordType = new MappedField("term", new KeywordFieldMapper.KeywordFieldType()); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); @@ -692,8 +692,8 @@ public void testFilter() throws IOException { } public void testFormatter() throws IOException { - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField dateType = dateField(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month") .field("val") .format("00.0/M"); @@ -719,8 +719,8 @@ public void testFormatter() throws IOException { } public void testHistogramFieldMonthToMonth() throws IOException { - MappedFieldType histType = new HistogramFieldMapper.HistogramFieldType("val", Collections.emptyMap(), null); - MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedField histType = new MappedField("val", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); + MappedField dateType = dateField(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); @@ -742,8 +742,8 @@ public void testHistogramFieldMonthToMonth() throws IOException { } public void testHistogramFieldMonthToYear() throws IOException { - MappedFieldType histType = new HistogramFieldMapper.HistogramFieldType("val", Collections.emptyMap(), null); - MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedField histType = new MappedField("val", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); + MappedField dateType = dateField(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); if (randomBoolean()) { rateAggregationBuilder.rateMode("sum"); @@ -762,8 +762,8 @@ public void testHistogramFieldMonthToYear() throws IOException { } public void testHistogramFieldMonthToMonthValueCount() throws IOException { - MappedFieldType histType = new HistogramFieldMapper.HistogramFieldType("val", Collections.emptyMap(), null); - MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedField histType = new MappedField("val", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); + MappedField dateType = dateField(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month") .rateMode("value_count") .field("val"); @@ -784,8 +784,8 @@ public void testHistogramFieldMonthToMonthValueCount() throws IOException { } public void testHistogramFieldMonthToYearValueCount() throws IOException { - MappedFieldType histType = new HistogramFieldMapper.HistogramFieldType("val", Collections.emptyMap(), null); - MappedFieldType dateType = dateFieldType(DATE_FIELD); + MappedField histType = new MappedField("val", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); + MappedField dateType = dateField(DATE_FIELD); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month") .rateMode("value_count") .field("val"); @@ -805,9 +805,9 @@ public void testHistogramFieldMonthToYearValueCount() throws IOException { } public void testFilterWithHistogramField() throws IOException { - MappedFieldType histType = new HistogramFieldMapper.HistogramFieldType("val", Collections.emptyMap(), null); - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType keywordType = new KeywordFieldMapper.KeywordFieldType("term"); + MappedField histType = new MappedField("val", new HistogramFieldMapper.HistogramFieldType(Collections.emptyMap(), null)); + MappedField dateType = dateField(DATE_FIELD); + MappedField keywordType = new MappedField("term", new KeywordFieldMapper.KeywordFieldType()); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").field("val"); AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( @@ -837,8 +837,8 @@ public void testFilterWithHistogramField() throws IOException { } public void testModeWithoutField() { - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); + MappedField dateType = dateField(DATE_FIELD); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate").rateUnit("month").rateMode("sum"); AbstractAggregationBuilder dateHistogramAggregationBuilder = randomValidMultiBucketAggBuilder( @@ -894,8 +894,8 @@ private void testCase( CheckedConsumer buildIndex, Consumer verify ) throws IOException { - MappedFieldType dateType = dateFieldType(DATE_FIELD); - MappedFieldType numType = new NumberFieldMapper.NumberFieldType("val", NumberFieldMapper.NumberType.INTEGER); + MappedField dateType = dateField(DATE_FIELD); + MappedField numType = new MappedField("val", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); RateAggregationBuilder rateAggregationBuilder = new RateAggregationBuilder("my_rate"); if (unit != null) { rateAggregationBuilder.rateUnit(unit); @@ -923,23 +923,25 @@ protected List getSearchPlugins() { return Collections.singletonList(new AnalyticsPlugin()); } - private DateFieldMapper.DateFieldType dateFieldType(String name) { - return new DateFieldMapper.DateFieldType( + private MappedField dateField(String name) { + return new MappedField( name, - true, - false, - true, - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - DateFieldMapper.Resolution.MILLISECONDS, - null, - null, - Collections.emptyMap() + new DateFieldMapper.DateFieldType( + true, + false, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + DateFieldMapper.Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ) ); } private Iterable doc(String date, IndexableField... fields) { List indexableFields = new ArrayList<>(); - long instant = dateFieldType(DATE_FIELD).parse(date); + long instant = ((DateFieldMapper.DateFieldType) dateField(DATE_FIELD).type()).parse(date); indexableFields.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); indexableFields.add(new LongPoint(DATE_FIELD, instant)); indexableFields.addAll(Arrays.asList(fields)); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java index fc669d19e5a3c..92c890417773d 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.IpFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.plugins.SearchPlugin; @@ -116,16 +116,16 @@ public void testUnmappedWithMissingField() throws IOException { } public void testMissing() throws IOException { - final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - fieldType.setFielddata(true); + final MappedField mappedField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) mappedField.type()).setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(mappedField.name()) .missing("b"); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new TextField(fieldType.name(), "a", Field.Store.NO))); + iw.addDocument(singleton(new TextField(mappedField.name(), "a", Field.Store.NO))); iw.addDocument(emptySet()); - iw.addDocument(singleton(new TextField(fieldType.name(), "a", Field.Store.NO))); + iw.addDocument(singleton(new TextField(mappedField.name(), "a", Field.Store.NO))); iw.addDocument(emptySet()); }, stats -> { assertEquals(4, stats.getCount()); @@ -136,7 +136,7 @@ public void testMissing() throws IOException { assertEquals(0.5, stats.getDistribution().get("a"), 0); assertEquals(0.5, stats.getDistribution().get("b"), 0); assertEquals(1.0, stats.getEntropy(), 0); - }, fieldType); + }, mappedField); } public void testSingleValuedField() throws IOException { @@ -192,8 +192,8 @@ public void testQueryFiltering() throws IOException { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/47469") public void testSingleValuedFieldWithFormatter() throws IOException { - TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - fieldType.setFielddata(true); + final MappedField mappedField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) mappedField.type()).setFielddata(true); StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text") .format("0000.00") @@ -209,17 +209,17 @@ public void testSingleValuedFieldWithFormatter() throws IOException { assertEquals("0005.00", stats.getMinLengthAsString()); assertEquals("0005.00", stats.getAvgLengthAsString()); assertEquals("0002.58", stats.getEntropyAsString()); - }, fieldType); + }, mappedField); } /** * Test a string_stats aggregation as a subaggregation of a terms aggregation */ public void testNestedAggregation() throws IOException { - MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType("value", NumberFieldMapper.NumberType.INTEGER); + MappedField numericField = new MappedField("value", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); - TextFieldMapper.TextFieldType textFieldType = new TextFieldMapper.TextFieldType("text"); - textFieldType.setFielddata(true); + final MappedField textField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) textField.type()).setFielddata(true); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.NUMERIC) .field("value") @@ -239,7 +239,7 @@ public void testNestedAggregation() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, numericFieldType, textFieldType); + TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, numericField, textField); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); @@ -273,15 +273,15 @@ public void testNestedAggregation() throws IOException { } public void testValueScriptSingleValuedField() throws IOException { - final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - fieldType.setFielddata(true); + final MappedField mappedField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) mappedField.type()).setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(mappedField.name()) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO))); - iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO))); + iw.addDocument(singleton(new TextField(mappedField.name(), "b", Field.Store.NO))); + iw.addDocument(singleton(new TextField(mappedField.name(), "b", Field.Store.NO))); }, stats -> { assertEquals(2, stats.getCount()); assertEquals(2, stats.getMaxLength()); @@ -291,22 +291,22 @@ public void testValueScriptSingleValuedField() throws IOException { assertEquals(0.5, stats.getDistribution().get("a"), 0); assertEquals(0.5, stats.getDistribution().get("b"), 0); assertEquals(1.0, stats.getEntropy(), 0); - }, fieldType); + }, mappedField); } public void testValueScriptMultiValuedField() throws IOException { - final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - fieldType.setFielddata(true); + final MappedField mappedField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) mappedField.type()).setFielddata(true); - final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(fieldType.name()) + final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field(mappedField.name()) .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument( - Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + Set.of(new TextField(mappedField.name(), "b", Field.Store.NO), new TextField(mappedField.name(), "c", Field.Store.NO)) ); iw.addDocument( - Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + Set.of(new TextField(mappedField.name(), "b", Field.Store.NO), new TextField(mappedField.name(), "c", Field.Store.NO)) ); }, stats -> { assertEquals(4, stats.getCount()); @@ -318,20 +318,20 @@ public void testValueScriptMultiValuedField() throws IOException { assertEquals(0.25, stats.getDistribution().get("b"), 0); assertEquals(0.25, stats.getDistribution().get("c"), 0); assertEquals(1.5, stats.getEntropy(), 0); - }, fieldType); + }, mappedField); } public void testFieldScriptSingleValuedField() throws IOException { - final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - fieldType.setFielddata(true); + final MappedField mappedField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) mappedField.type()).setFielddata(true); final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script( - new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", fieldType.name())) + new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", mappedField.name())) ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { - iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO))); - iw.addDocument(singleton(new TextField(fieldType.name(), "b", Field.Store.NO))); + iw.addDocument(singleton(new TextField(mappedField.name(), "b", Field.Store.NO))); + iw.addDocument(singleton(new TextField(mappedField.name(), "b", Field.Store.NO))); }, stats -> { assertEquals(2, stats.getCount()); assertEquals(2, stats.getMaxLength()); @@ -341,23 +341,23 @@ public void testFieldScriptSingleValuedField() throws IOException { assertEquals(0.5, stats.getDistribution().get("a"), 0); assertEquals(0.5, stats.getDistribution().get("b"), 0); assertEquals(1.0, stats.getEntropy(), 0); - }, fieldType); + }, mappedField); } public void testFieldScriptMultiValuedField() throws IOException { - final TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - fieldType.setFielddata(true); + final MappedField mappedField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) mappedField.type()).setFielddata(true); final StringStatsAggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").script( - new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", fieldType.name())) + new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap("field", mappedField.name())) ); testAggregation(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument( - Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + Set.of(new TextField(mappedField.name(), "b", Field.Store.NO), new TextField(mappedField.name(), "c", Field.Store.NO)) ); iw.addDocument( - Set.of(new TextField(fieldType.name(), "b", Field.Store.NO), new TextField(fieldType.name(), "c", Field.Store.NO)) + Set.of(new TextField(mappedField.name(), "b", Field.Store.NO), new TextField(mappedField.name(), "c", Field.Store.NO)) ); }, stats -> { assertEquals(4, stats.getCount()); @@ -369,7 +369,7 @@ public void testFieldScriptMultiValuedField() throws IOException { assertEquals(0.25, stats.getDistribution().get("b"), 0); assertEquals(0.25, stats.getDistribution().get("c"), 0); assertEquals(1.5, stats.getEntropy(), 0); - }, fieldType); + }, mappedField); } private void testAggregation( @@ -377,11 +377,11 @@ private void testAggregation( CheckedConsumer buildIndex, Consumer verify ) throws IOException { - TextFieldMapper.TextFieldType fieldType = new TextFieldMapper.TextFieldType("text"); - fieldType.setFielddata(true); + final MappedField mappedField = new MappedField("text", new TextFieldMapper.TextFieldType()); + ((TextFieldMapper.TextFieldType) mappedField.type()).setFielddata(true); AggregationBuilder aggregationBuilder = new StringStatsAggregationBuilder("_name").field("text"); - testAggregation(aggregationBuilder, query, buildIndex, verify, fieldType); + testAggregation(aggregationBuilder, query, buildIndex, verify, mappedField); } private void testAggregation( @@ -389,13 +389,13 @@ private void testAggregation( Query query, CheckedConsumer buildIndex, Consumer verify, - MappedFieldType... fieldTypes + MappedField... mappedFields ) throws IOException { - testCase(aggregationBuilder, query, buildIndex, verify, fieldTypes); + testCase(aggregationBuilder, query, buildIndex, verify, mappedFields); } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new StringStatsAggregationBuilder("_name").field(fieldName); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java index 77c8fd7e27c8e..4649c8dfe0305 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.index.mapper.TextFieldMapper; @@ -450,48 +450,48 @@ private Query boostFoo() { .build(); } - private MappedFieldType[] doubleFields() { - return new MappedFieldType[] { numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m") }; + private MappedField[] doubleFields() { + return new MappedField[] { numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m") }; } - private MappedFieldType[] longFields() { - return new MappedFieldType[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.LONG, "m") }; + private MappedField[] longFields() { + return new MappedField[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.LONG, "m") }; } - private MappedFieldType[] manyMetricsFields() { - return new MappedFieldType[] { + private MappedField[] manyMetricsFields() { + return new MappedField[] { numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m1"), numberFieldType(NumberType.LONG, "m2"), numberFieldType(NumberType.DOUBLE, "m3"), }; } - private MappedFieldType[] floatAndDoubleField() { - return new MappedFieldType[] { numberFieldType(NumberType.FLOAT, "s"), numberFieldType(NumberType.DOUBLE, "m") }; + private MappedField[] floatAndDoubleField() { + return new MappedField[] { numberFieldType(NumberType.FLOAT, "s"), numberFieldType(NumberType.DOUBLE, "m") }; } - private MappedFieldType[] longAndDoubleField() { - return new MappedFieldType[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.DOUBLE, "m") }; + private MappedField[] longAndDoubleField() { + return new MappedField[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.DOUBLE, "m") }; } - private MappedFieldType[] textAndDoubleField() { - return new MappedFieldType[] { textFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") }; + private MappedField[] textAndDoubleField() { + return new MappedField[] { textFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") }; } - private MappedFieldType[] geoPointAndDoubleField() { - return new MappedFieldType[] { geoPointFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") }; + private MappedField[] geoPointAndDoubleField() { + return new MappedField[] { geoPointFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") }; } - private MappedFieldType numberFieldType(NumberType numberType, String name) { - return new NumberFieldMapper.NumberFieldType(name, numberType); + private MappedField numberFieldType(NumberType numberType, String name) { + return new MappedField(name, new NumberFieldMapper.NumberFieldType(numberType)); } - private MappedFieldType textFieldType(String name) { - return new TextFieldMapper.TextFieldType(name); + private MappedField textFieldType(String name) { + return new MappedField(name, new TextFieldMapper.TextFieldType()); } - private MappedFieldType geoPointFieldType(String name) { - return new GeoPointFieldMapper.GeoPointFieldType(name); + private MappedField geoPointFieldType(String name) { + return new MappedField(name, new GeoPointFieldMapper.GeoPointFieldType()); } private IndexableField doubleField(String name, double value) { @@ -518,7 +518,7 @@ private InternalTopMetrics collect( TopMetricsAggregationBuilder builder, Query query, CheckedConsumer buildIndex, - MappedFieldType... fields + MappedField... fields ) throws IOException { InternalTopMetrics result = (InternalTopMetrics) collect((AggregationBuilder) builder, query, buildIndex, fields); List expectedFieldNames = builder.getMetricFields() @@ -533,7 +533,7 @@ private InternalAggregation collect( AggregationBuilder builder, Query query, CheckedConsumer buildIndex, - MappedFieldType... fields + MappedField... fields ) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java index 7e33f3c3ee7e5..4510141ddd160 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.SearchPlugin; @@ -65,8 +65,8 @@ public class TTestAggregatorTests extends AggregatorTestCase { public static final String TERM_FILTERING = "term_filtering"; @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { - if (fieldType instanceof NumberFieldMapper.NumberFieldType) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { + if (mappedField.type() instanceof NumberFieldMapper.NumberFieldType) { return new TTestAggregationBuilder("foo").a( new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) .setFilter(QueryBuilders.rangeQuery(fieldName).lt(10)) @@ -77,8 +77,8 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy .setFilter(QueryBuilders.rangeQuery(fieldName).gte(10)) .build() ); - } else if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE) - || fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) { + } else if (mappedField.typeName().equals(DateFieldMapper.CONTENT_TYPE) + || mappedField.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) { return new TTestAggregationBuilder("foo").a( new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) @@ -90,7 +90,7 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy .setFilter(QueryBuilders.rangeQuery(fieldName).gte(DateUtils.toInstant(10))) .build() ); - } else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) { + } else if (mappedField.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) { return new TTestAggregationBuilder("foo").a( new MultiValuesSourceFieldConfig.Builder().setFieldName(fieldName) .setFilter(QueryBuilders.rangeQuery(fieldName).lt("true")) @@ -201,7 +201,7 @@ public void testMultiplePairedValues() { public void testSameFieldAndNoFilters() { TTestType tTestType = randomFrom(TTestType.values()); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build() ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("field").setMissing(100).build()).testType(tTestType); @@ -211,7 +211,7 @@ public void testSameFieldAndNoFilters() { () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("field", 102))); iw.addDocument(singleton(new SortedNumericDocValuesField("field", 99))); - }, tTest -> fail("Should have thrown exception"), fieldType) + }, tTest -> fail("Should have thrown exception"), mappedField) ); assertEquals("The same field [field] is used for both population but no filters are specified.", ex.getMessage()); } @@ -267,8 +267,14 @@ public void testUnmappedWithMissingField() throws IOException { TTestType tTestType = randomFrom(TTestType.values()); boolean missA = randomBoolean(); boolean missB = missA == false || randomBoolean(); // at least one of the fields should be missing - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType(missA ? "not_a" : "a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(missB ? "not_b" : "b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField( + missA ? "not_a" : "a", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); + MappedField field2 = new MappedField( + missB ? "not_b" : "b", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setMissing(100).build() ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").setMissing(100).build()).testType(tTestType); @@ -295,24 +301,24 @@ public void testUnmappedWithMissingField() throws IOException { } } } - }, fieldType1, fieldType2); + }, field1, field2); } public void testUnsupportedType() { TTestType tTestType = randomFrom(TTestType.values()); boolean wrongA = randomBoolean(); boolean wrongB = wrongA == false || randomBoolean(); // at least one of the fields should have unsupported type - MappedFieldType fieldType1; + MappedField field1; if (wrongA) { - fieldType1 = new KeywordFieldMapper.KeywordFieldType("a"); + field1 = new MappedField("a", new KeywordFieldMapper.KeywordFieldType()); } else { - fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); + field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); } - MappedFieldType fieldType2; + MappedField field2; if (wrongB) { - fieldType2 = new KeywordFieldMapper.KeywordFieldType("b"); + field2 = new MappedField("b", new KeywordFieldMapper.KeywordFieldType()); } else { - fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); } TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() @@ -329,7 +335,7 @@ public void testUnsupportedType() { ) ); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); - }, tTest -> fail("Should have thrown exception"), fieldType1, fieldType2) + }, tTest -> fail("Should have thrown exception"), field1, field2) ); assertEquals("Expected numeric type on field [" + (wrongA ? "a" : "b") + "], but got [keyword]", ex.getMessage()); } @@ -338,12 +344,12 @@ public void testBadMissingField() { TTestType tTestType = randomFrom(TTestType.values()); boolean missA = randomBoolean(); boolean missB = missA == false || randomBoolean(); // at least one of the fields should be have bad missing - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MultiValuesSourceFieldConfig.Builder a = new MultiValuesSourceFieldConfig.Builder().setFieldName("a"); if (missA) { a.setMissing("bad_number"); } - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MultiValuesSourceFieldConfig.Builder b = new MultiValuesSourceFieldConfig.Builder().setFieldName("b"); if (missB) { b.setMissing("bad_number"); @@ -355,7 +361,7 @@ public void testBadMissingField() { () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89))); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); - }, tTest -> fail("Should have thrown exception"), fieldType1, fieldType2) + }, tTest -> fail("Should have thrown exception"), field1, field2) ); assertEquals("For input string: \"bad_number\"", ex.getMessage()); } @@ -364,14 +370,17 @@ public void testUnmappedWithBadMissingField() { TTestType tTestType = randomFrom(TTestType.values()); boolean missA = randomBoolean(); boolean missB = missA == false || randomBoolean(); // at least one of the fields should be have bad missing - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MultiValuesSourceFieldConfig.Builder a = new MultiValuesSourceFieldConfig.Builder(); if (missA) { a.setFieldName("not_a").setMissing("bad_number"); } else { a.setFieldName("a"); } - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(missB ? "not_b" : "b", NumberFieldMapper.NumberType.INTEGER); + MappedField field2 = new MappedField( + missB ? "not_b" : "b", + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER) + ); MultiValuesSourceFieldConfig.Builder b = new MultiValuesSourceFieldConfig.Builder(); if (missB) { @@ -386,16 +395,16 @@ public void testUnmappedWithBadMissingField() { () -> testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { iw.addDocument(asList(new SortedNumericDocValuesField("a", 102), new SortedNumericDocValuesField("b", 89))); iw.addDocument(asList(new SortedNumericDocValuesField("a", 99), new SortedNumericDocValuesField("b", 93))); - }, tTest -> fail("Should have thrown exception"), fieldType1, fieldType2) + }, tTest -> fail("Should have thrown exception"), field1, field2) ); assertEquals("For input string: \"bad_number\"", ex.getMessage()); } public void testEmptyBucket() throws IOException { TTestType tTestType = randomFrom(TTestType.values()); - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldTypePart = new NumberFieldMapper.NumberFieldType("part", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField fieldPart = new MappedField("part", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); HistogramAggregationBuilder histogram = new HistogramAggregationBuilder("histo").field("part") .interval(10) .minDocCount(0) @@ -446,14 +455,14 @@ public void testEmptyBucket() throws IOException { 0.000001 ); - }, fieldType1, fieldType2, fieldTypePart); + }, field1, field2, fieldPart); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/54365") public void testFormatter() throws IOException { TTestType tTestType = randomFrom(TTestType.values()); - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(tTestType).format("0.00%"); @@ -472,12 +481,12 @@ public void testFormatter() throws IOException { tTestType == TTestType.PAIRED ? "19.40%" : tTestType == TTestType.HOMOSCEDASTIC ? "5.88%" : "7.53%", tTest.getValueAsString() ); - }, fieldType1, fieldType2); + }, field1, field2); } public void testGetProperty() throws IOException { - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); GlobalAggregationBuilder globalBuilder = new GlobalAggregationBuilder("global").subAggregation( new TTestAggregationBuilder("t_test").a(new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build()) .b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()) @@ -495,14 +504,14 @@ public void testGetProperty() throws IOException { InternalTTest tTest = (InternalTTest) global.getAggregations().asMap().get("t_test"); assertEquals(tTest, global.getProperty("t_test")); assertEquals(0.1939778614, (Double) global.getProperty("t_test.value"), 0.000001); - }, fieldType1, fieldType2); + }, field1, field2); } public void testScript() throws IOException { boolean fieldInA = randomBoolean(); TTestType tTestType = randomFrom(TTestType.values()); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER); + MappedField field = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); MultiValuesSourceFieldConfig a = new MultiValuesSourceFieldConfig.Builder().setFieldName("field").build(); MultiValuesSourceFieldConfig b = new MultiValuesSourceFieldConfig.Builder().setScript( @@ -520,13 +529,13 @@ public void testScript() throws IOException { (Consumer) tTest -> { assertEquals(tTestType == TTestType.PAIRED ? 0 : 0.5733922538, tTest.getValue(), 0.000001); }, - fieldType + field ); } public void testPaired() throws IOException { - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(TTestType.PAIRED); @@ -541,12 +550,12 @@ public void testPaired() throws IOException { iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98))); iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98))); - }, (Consumer) ttest -> { assertEquals(0.09571844217 * tails, ttest.getValue(), 0.00001); }, fieldType1, fieldType2); + }, (Consumer) ttest -> { assertEquals(0.09571844217 * tails, ttest.getValue(), 0.00001); }, field1, field2); } public void testHomoscedastic() throws IOException { - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()).testType(TTestType.HOMOSCEDASTIC); @@ -561,12 +570,12 @@ public void testHomoscedastic() throws IOException { iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98))); iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98))); - }, (Consumer) ttest -> { assertEquals(0.03928288693 * tails, ttest.getValue(), 0.00001); }, fieldType1, fieldType2); + }, (Consumer) ttest -> { assertEquals(0.03928288693 * tails, ttest.getValue(), 0.00001); }, field1, field2); } public void testHeteroscedastic() throws IOException { - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() ).b(new MultiValuesSourceFieldConfig.Builder().setFieldName("b").build()); @@ -584,13 +593,13 @@ public void testHeteroscedastic() throws IOException { iw.addDocument(asList(new NumericDocValuesField("a", 97), new NumericDocValuesField("b", 98))); iw.addDocument(asList(new NumericDocValuesField("a", 101), new NumericDocValuesField("b", 102))); iw.addDocument(asList(new NumericDocValuesField("a", 99), new NumericDocValuesField("b", 98))); - }, (Consumer) ttest -> { assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001); }, fieldType1, fieldType2); + }, (Consumer) ttest -> { assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001); }, field1, field2); } public void testFiltered() throws IOException { TTestType tTestType = randomFrom(TTestType.values()); - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").setFilter(QueryBuilders.termQuery("b", 1)).build() ) @@ -630,8 +639,8 @@ public void testFiltered() throws IOException { new MatchAllDocsQuery(), buildIndex, tTest -> fail("Should have thrown exception"), - fieldType1, - fieldType2 + field1, + field2 ) ); assertEquals("Paired t-test doesn't support filters", ex.getMessage()); @@ -642,7 +651,7 @@ public void testFiltered() throws IOException { } else { assertEquals(0.04538666214 * tails, ttest.getValue(), 0.00001); } - }, fieldType1, fieldType2); + }, field1, field2); } } @@ -650,8 +659,8 @@ public void testFilterByFilterOrScript() throws IOException { boolean fieldInA = randomBoolean(); TTestType tTestType = randomFrom(TTestType.HOMOSCEDASTIC, TTestType.HETEROSCEDASTIC); - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("term", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("term", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); boolean filterTermOne = randomBoolean(); @@ -673,7 +682,7 @@ public void testFilterByFilterOrScript() throws IOException { iw.addDocument(asList(new NumericDocValuesField("field", 4), new IntPoint("term", 2), new NumericDocValuesField("term", 2))); iw.addDocument(asList(new NumericDocValuesField("field", 5), new IntPoint("term", 2), new NumericDocValuesField("term", 2))); iw.addDocument(asList(new NumericDocValuesField("field", 6), new IntPoint("term", 2), new NumericDocValuesField("term", 2))); - }, (Consumer) tTest -> { assertEquals(0.02131164113, tTest.getValue(), 0.000001); }, fieldType1, fieldType2); + }, (Consumer) tTest -> { assertEquals(0.02131164113, tTest.getValue(), 0.000001); }, field1, field2); } private void testCase( @@ -682,8 +691,8 @@ private void testCase( CheckedConsumer buildIndex, Consumer verify ) throws IOException { - MappedFieldType fieldType1 = new NumberFieldMapper.NumberFieldType("a", NumberFieldMapper.NumberType.INTEGER); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("b", NumberFieldMapper.NumberType.INTEGER); + MappedField field1 = new MappedField("a", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); + MappedField field2 = new MappedField("b", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER)); TTestAggregationBuilder aggregationBuilder = new TTestAggregationBuilder("t_test").a( new MultiValuesSourceFieldConfig.Builder().setFieldName("a").build() @@ -691,7 +700,7 @@ private void testCase( if (type != TTestType.HETEROSCEDASTIC || randomBoolean()) { aggregationBuilder.testType(type); } - testCase(aggregationBuilder, query, buildIndex, verify, fieldType1, fieldType2); + testCase(aggregationBuilder, query, buildIndex, verify, field1, field2); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java index e1f6ec33bc74c..134fa17074434 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldMapper.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.query.SearchExecutionContext; @@ -36,7 +37,7 @@ static final class DataTierFieldType extends ConstantFieldType { static final DataTierFieldType INSTANCE = new DataTierFieldType(); private DataTierFieldType() { - super(NAME, Collections.emptyMap()); + super(Collections.emptyMap()); } @Override @@ -63,7 +64,7 @@ protected boolean matches(String pattern, boolean caseInsensitive, SearchExecuti } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { String tierPreference = getTierPreference(context); if (tierPreference == null) { return new MatchNoDocsQuery(); @@ -72,9 +73,9 @@ public Query existsQuery(SearchExecutionContext context) { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } String tierPreference = getTierPreference(context); @@ -100,7 +101,7 @@ private String getTierPreference(SearchExecutionContext context) { } public DataTierFieldMapper() { - super(DataTierFieldType.INSTANCE); + super(new MappedField(NAME, DataTierFieldType.INSTANCE)); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java index fdbc1bfed6156..23572607ff48e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java @@ -19,7 +19,7 @@ /** * A utility class for fields that need to support autocomplete via - * {@link MappedFieldType#getTerms(boolean, String, org.elasticsearch.index.query.SearchExecutionContext, String)} + * {@link MappedFieldType#getTerms(String, boolean, String, org.elasticsearch.index.query.SearchExecutionContext, String)} * but can't return a raw Lucene TermsEnum. */ public class SimpleTermCountEnum extends TermsEnum { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java index bc6b1ad38ef87..be031eca212b5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java @@ -38,7 +38,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.Rewriteable; @@ -349,9 +349,9 @@ protected NodeTermsEnumResponse dataNodeOperation(NodeTermsEnumRequest request, null, Collections.emptyMap() ); - final MappedFieldType mappedFieldType = indexShard.mapperService().fieldType(request.field()); - if (mappedFieldType != null) { - TermsEnum terms = mappedFieldType.getTerms( + final MappedField mappedField = indexShard.mapperService().mappedField(request.field()); + if (mappedField != null) { + TermsEnum terms = mappedField.getTerms( request.caseInsensitive(), request.string() == null ? "" : request.string(), queryShardContext, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index 099d64f69ad7c..0f39d98b71e2a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -456,7 +456,7 @@ public IndexShard reindex(DirectoryReader reader, MappingMetadata mapping) throw if (liveDocs == null || liveDocs.get(i)) { rootFieldsVisitor.reset(); leafReader.document(i, rootFieldsVisitor); - rootFieldsVisitor.postProcess(targetShard.mapperService()::fieldType); + rootFieldsVisitor.postProcess(targetShard.mapperService()::mappedField); String id = rootFieldsVisitor.id(); BytesReference source = rootFieldsVisitor.source(); assert source != null : "_source is null but should have been filtered out at snapshot time"; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldTypeTests.java index 9d712d2381384..fd3b38c7d3226 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/cluster/routing/allocation/mapper/DataTierFieldTypeTests.java @@ -36,52 +36,52 @@ public class DataTierFieldTypeTests extends MapperServiceTestCase { public void testPrefixQuery() throws IOException { MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE; - assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("data_w", null, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("noSuchRole", null, createContext())); + assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("_tier", "data_w", null, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("_tier", "noSuchRole", null, createContext())); } public void testWildcardQuery() { MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE; - assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("data_w*", null, createContext())); - assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("data_warm", null, createContext())); - assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("Data_Warm", null, true, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("Data_Warm", null, false, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("noSuchRole", null, createContext())); - - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("data_*", null, createContextWithoutSetting())); - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("*", null, createContextWithoutSetting())); + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("_tier", "data_w*", null, createContext())); + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("_tier", "data_warm", null, createContext())); + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("_tier", "Data_Warm", null, true, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("_tier", "Data_Warm", null, false, createContext())); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("_tier", "noSuchRole", null, createContext())); + + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("_tier", "data_*", null, createContextWithoutSetting())); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("_tier", "*", null, createContextWithoutSetting())); } public void testTermQuery() { MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE; - assertEquals(new MatchAllDocsQuery(), ft.termQuery("data_warm", createContext())); - assertEquals(new MatchNoDocsQuery(), ft.termQuery("data_hot", createContext())); - assertEquals(new MatchNoDocsQuery(), ft.termQuery("noSuchRole", createContext())); + assertEquals(new MatchAllDocsQuery(), ft.termQuery("_tier", "data_warm", createContext())); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("_tier", "data_hot", createContext())); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("_tier", "noSuchRole", createContext())); - assertEquals(new MatchNoDocsQuery(), ft.termQuery("data_warm", createContextWithoutSetting())); - assertEquals(new MatchNoDocsQuery(), ft.termQuery("", createContextWithoutSetting())); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("_tier", "data_warm", createContextWithoutSetting())); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("_tier", "", createContextWithoutSetting())); } public void testTermsQuery() { MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE; - assertEquals(new MatchAllDocsQuery(), ft.termsQuery(Arrays.asList("data_warm"), createContext())); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(Arrays.asList("data_cold", "data_frozen"), createContext())); + assertEquals(new MatchAllDocsQuery(), ft.termsQuery("_tier", Arrays.asList("data_warm"), createContext())); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("_tier", Arrays.asList("data_cold", "data_frozen"), createContext())); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(Arrays.asList("data_warm"), createContextWithoutSetting())); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(Arrays.asList(""), createContextWithoutSetting())); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("_tier", Arrays.asList("data_warm"), createContextWithoutSetting())); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("_tier", Arrays.asList(""), createContextWithoutSetting())); } public void testExistsQuery() { MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE; - assertEquals(new MatchAllDocsQuery(), ft.existsQuery(createContext())); - assertEquals(new MatchNoDocsQuery(), ft.existsQuery(createContextWithoutSetting())); + assertEquals(new MatchAllDocsQuery(), ft.existsQuery("_tier", createContext())); + assertEquals(new MatchNoDocsQuery(), ft.existsQuery("_tier", createContextWithoutSetting())); } public void testRegexpQuery() { MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE; QueryShardException e = expectThrows( QueryShardException.class, - () -> assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 0, 10, null, createContext())) + () -> assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("_tier", "ind.x", 0, 0, 10, null, createContext())) ); assertThat(e.getMessage(), containsString("Can only use regexp queries on keyword and text fields")); } @@ -91,10 +91,10 @@ public void testFetchValue() throws IOException { SourceLookup lookup = new SourceLookup(); List ignoredValues = new ArrayList<>(); - ValueFetcher valueFetcher = ft.valueFetcher(createContext(), null); + ValueFetcher valueFetcher = ft.valueFetcher("_tier", createContext(), null); assertEquals(singletonList("data_warm"), valueFetcher.fetchValues(lookup, ignoredValues)); - ValueFetcher emptyValueFetcher = ft.valueFetcher(createContextWithoutSetting(), null); + ValueFetcher emptyValueFetcher = ft.valueFetcher("_tier", createContextWithoutSetting(), null); assertTrue(emptyValueFetcher.fetchValues(lookup, ignoredValues).isEmpty()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java index c576c66948373..3145d71e05ab0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MockFieldMapper; @@ -631,9 +632,9 @@ private void runTestOnIndices(int numberIndices, CheckedConsumer types = new ArrayList<>(); for (int i = 0; i < 11; i++) { // the tests use fields 1 to 10. // This field has a value. - types.add(new MockFieldMapper(new KeywordFieldMapper.KeywordFieldType("field-" + i))); + types.add(new MockFieldMapper(new MappedField("field-" + i, new KeywordFieldMapper.KeywordFieldType()))); // This field never has a value - types.add(new MockFieldMapper(new KeywordFieldMapper.KeywordFieldType("dne-" + i))); + types.add(new MockFieldMapper(new MappedField("dne-" + i, new KeywordFieldMapper.KeywordFieldType()))); } MappingLookup mappingLookup = MappingLookup.fromMappers(Mapping.EMPTY, types, emptyList(), emptyList()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index dc5e7475e1c7d..d69207320a1cb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MockFieldMapper; @@ -72,7 +72,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT public void testDLS() throws Exception { ShardId shardId = new ShardId("_index", "_na_", 0); - MappingLookup mappingLookup = createMappingLookup(List.of(new KeywordFieldType("field"))); + MappingLookup mappingLookup = createMappingLookup(List.of(new MappedField("field", new KeywordFieldType()))); ScriptService scriptService = mock(ScriptService.class); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); @@ -207,7 +207,11 @@ protected IndicesAccessControl getIndicesAccessControl() { public void testDLSWithLimitedPermissions() throws Exception { ShardId shardId = new ShardId("_index", "_na_", 0); MappingLookup mappingLookup = createMappingLookup( - List.of(new KeywordFieldType("field"), new KeywordFieldType("f1"), new KeywordFieldType("f2")) + List.of( + new MappedField("field", new KeywordFieldType()), + new MappedField("f1", new KeywordFieldType()), + new MappedField("f2", new KeywordFieldType()) + ) ); ScriptService scriptService = mock(ScriptService.class); @@ -342,7 +346,7 @@ protected IndicesAccessControl getIndicesAccessControl() { directory.close(); } - private static MappingLookup createMappingLookup(List concreteFields) { + private static MappingLookup createMappingLookup(List concreteFields) { List mappers = concreteFields.stream().map(MockFieldMapper::new).collect(Collectors.toList()); return MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList(), emptyList()); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java index 4e8651619c539..10f820e7f9642 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java @@ -267,7 +267,7 @@ protected MultiSearchResponse shardOperation(Request request, ShardId shardId) t if (context.isFieldMapped(field) == false) { throw new IllegalStateException("Field [" + field + "] exists in the index but not in mappings"); } - return context.getFieldType(field); + return context.getMappedField(field); }); final SearchHit hit = new SearchHit(scoreDoc.doc, visitor.id(), Map.of(), Map.of()); hit.sourceRef(filterSource(fetchSourceContext, visitor.source())); diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java index 9d2887b69b3ed..76d5c07da7b36 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.indices.IndicesService; @@ -189,7 +190,7 @@ public void testTimestampFieldTypeExposedByAllIndicesServices() throws Exception } for (final IndicesService indicesService : internalCluster().getInstances(IndicesService.class)) { - assertNull(indicesService.getTimestampFieldType(index)); + assertNull(indicesService.getTimestampField(index)); } assertAcked(client().execute(FreezeIndexAction.INSTANCE, new FreezeRequest("index")).actionGet()); @@ -197,9 +198,9 @@ public void testTimestampFieldTypeExposedByAllIndicesServices() throws Exception for (final IndicesService indicesService : internalCluster().getInstances(IndicesService.class)) { final PlainActionFuture timestampFieldTypeFuture = new PlainActionFuture<>(); assertBusy(() -> { - final DateFieldMapper.DateFieldType timestampFieldType = indicesService.getTimestampFieldType(index); - assertNotNull(timestampFieldType); - timestampFieldTypeFuture.onResponse(timestampFieldType); + final MappedField timestampField = indicesService.getTimestampField(index); + assertNotNull(timestampField); + timestampFieldTypeFuture.onResponse((DateFieldMapper.DateFieldType) timestampField.type()); }); assertTrue(timestampFieldTypeFuture.isDone()); assertThat(timestampFieldTypeFuture.get().dateTimeFormatter().locale().toString(), equalTo(locale)); @@ -209,7 +210,7 @@ public void testTimestampFieldTypeExposedByAllIndicesServices() throws Exception assertAcked(client().execute(FreezeIndexAction.INSTANCE, new FreezeRequest("index").setFreeze(false)).actionGet()); ensureGreen("index"); for (final IndicesService indicesService : internalCluster().getInstances(IndicesService.class)) { - assertNull(indicesService.getTimestampFieldType(index)); + assertNull(indicesService.getTimestampField(index)); } } diff --git a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReaderTests.java b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReaderTests.java index 783f0ba76089b..eb4e7005d323c 100644 --- a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReaderTests.java +++ b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReaderTests.java @@ -97,9 +97,10 @@ public void testIsWithinQuery() throws IOException { writer.addDocument(doc); try (DirectoryReader reader = DirectoryReader.open(writer)) { RewriteCachingDirectoryReader cachingDirectoryReader = new RewriteCachingDirectoryReader(dir, reader.leaves(), null); - DateFieldMapper.DateFieldType dateFieldType = new DateFieldMapper.DateFieldType("test"); + DateFieldMapper.DateFieldType dateFieldType = new DateFieldMapper.DateFieldType(); QueryRewriteContext context = new QueryRewriteContext(parserConfig(), writableRegistry(), null, () -> 0); MappedFieldType.Relation relation = dateFieldType.isFieldWithinQuery( + "test", cachingDirectoryReader, 0, 10, @@ -111,10 +112,30 @@ public void testIsWithinQuery() throws IOException { ); assertEquals(relation, MappedFieldType.Relation.WITHIN); - relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 3, 11, true, true, ZoneOffset.UTC, null, context); + relation = dateFieldType.isFieldWithinQuery( + "test", + cachingDirectoryReader, + 3, + 11, + true, + true, + ZoneOffset.UTC, + null, + context + ); assertEquals(relation, MappedFieldType.Relation.INTERSECTS); - relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 10, 11, false, true, ZoneOffset.UTC, null, context); + relation = dateFieldType.isFieldWithinQuery( + "test", + cachingDirectoryReader, + 10, + 11, + false, + true, + ZoneOffset.UTC, + null, + context + ); assertEquals(relation, MappedFieldType.Relation.DISJOINT); } } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java index dd3f6d694664b..8b57445a59a19 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java @@ -41,7 +41,7 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa "Expected aggregate_metric_double type on field [" + fieldContext.field() + "], but got [" - + fieldContext.fieldType().typeName() + + fieldContext.mappedField().typeName() + "]" ); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index 13691e3e8e676..fcf8d7baaeb29 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -238,26 +238,30 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { metricMappers.put(m, fieldMapper); } - EnumMap metricFields = metricMappers.entrySet() + EnumMap metricFields = metricMappers.entrySet() .stream() .collect( Collectors.toMap( Map.Entry::getKey, - e -> e.getValue().fieldType(), + e -> e.getValue().field(), (l, r) -> { throw new IllegalArgumentException("Duplicate keys " + l + "and " + r + "."); }, () -> new EnumMap<>(Metric.class) ) ); AggregateDoubleMetricFieldType metricFieldType = new AggregateDoubleMetricFieldType( - context.buildFullName(name), meta.getValue(), timeSeriesMetric.getValue() ); metricFieldType.setMetricFields(metricFields); metricFieldType.setDefaultMetric(defaultMetric.getValue()); - return new AggregateDoubleMetricFieldMapper(name, metricFieldType, metricMappers, this); + return new AggregateDoubleMetricFieldMapper( + name, + new MappedField(context.buildFullName(name), metricFieldType), + metricMappers, + this + ); } } @@ -268,18 +272,18 @@ public AggregateDoubleMetricFieldMapper build(MapperBuilderContext context) { public static final class AggregateDoubleMetricFieldType extends SimpleMappedFieldType { - private EnumMap metricFields; + private EnumMap metricFields; private Metric defaultMetric; private final MetricType metricType; - public AggregateDoubleMetricFieldType(String name) { - this(name, Collections.emptyMap(), null); + public AggregateDoubleMetricFieldType() { + this(Collections.emptyMap(), null); } - public AggregateDoubleMetricFieldType(String name, Map meta, MetricType metricType) { - super(name, true, false, false, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + public AggregateDoubleMetricFieldType(Map meta, MetricType metricType) { + super(true, false, false, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.metricType = metricType; } @@ -287,7 +291,7 @@ public AggregateDoubleMetricFieldType(String name, Map meta, Met * Return a delegate field type for a given metric sub-field * @return a field type */ - private NumberFieldMapper.NumberFieldType delegateFieldType(Metric metric) { + private MappedField delegateField(Metric metric) { return metricFields.get(metric); } @@ -295,8 +299,8 @@ private NumberFieldMapper.NumberFieldType delegateFieldType(Metric metric) { * Return a delegate field type for the default metric sub-field * @return a field type */ - private NumberFieldMapper.NumberFieldType delegateFieldType() { - return delegateFieldType(defaultMetric); + private MappedField delegateField() { + return delegateField(defaultMetric); } @Override @@ -309,16 +313,16 @@ public String typeName() { return CONTENT_TYPE; } - private void setMetricFields(EnumMap metricFields) { + private void setMetricFields(EnumMap metricFields) { this.metricFields = metricFields; } - public void addMetricField(Metric m, NumberFieldMapper.NumberFieldType subfield) { + public void addMetricField(String name, Metric m, MappedField subfield) { if (metricFields == null) { metricFields = new EnumMap<>(AggregateDoubleMetricFieldMapper.Metric.class); } - if (name() == null) { + if (name == null) { throw new IllegalArgumentException("Field of type [" + typeName() + "] must have a name before adding a subfield"); } metricFields.put(m, subfield); @@ -333,51 +337,60 @@ Metric getDefaultMetric() { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return delegateFieldType().mayExistInIndex(context); // TODO how does searching actually work here? + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return delegateField().mayExistInIndex(context); // TODO how does searching actually work here? } @Override - public Query existsQuery(SearchExecutionContext context) { - return delegateFieldType().existsQuery(context); + public Query existsQuery(String name, SearchExecutionContext context) { + return delegateField().existsQuery(context); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { if (value == null) { throw new IllegalArgumentException("Cannot search for null."); } - return delegateFieldType().termQuery(value, context); + return delegateField().termQuery(value, context); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - return delegateFieldType().termsQuery(values, context); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + return delegateField().termsQuery(values, context); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - return delegateFieldType().rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context); + return ((NumberFieldMapper.NumberFieldType) delegateField().type()).rangeQuery( + delegateField().name(), + lowerTerm, + upperTerm, + includeLower, + includeUpper, + context + ); } @Override public Object valueForDisplay(Object value) { - return delegateFieldType().valueForDisplay(value); + return delegateField().valueForDisplay(value); } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - return delegateFieldType().docValueFormat(format, timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + return delegateField().docValueFormat(format, timeZone); } @Override public Relation isFieldWithinQuery( + String name, IndexReader reader, Object from, Object to, @@ -387,13 +400,13 @@ public Relation isFieldWithinQuery( DateMathParser dateMathParser, QueryRewriteContext context ) throws IOException { - return delegateFieldType().isFieldWithinQuery(reader, from, to, includeLower, includeUpper, timeZone, dateMathParser, context); + return delegateField().isFieldWithinQuery(reader, from, to, includeLower, includeUpper, timeZone, dateMathParser, context); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { return (cache, breakerService) -> new IndexAggregateDoubleMetricFieldData( - name(), + name, AggregateMetricsValuesSourceType.AGGREGATE_METRIC ) { @Override @@ -473,7 +486,7 @@ public SortField sortField( XFieldComparatorSource.Nested nested, boolean reverse ) { - return new SortedNumericSortField(delegateFieldType().name(), SortField.Type.DOUBLE, reverse); + return new SortedNumericSortField(delegateField().name(), SortField.Type.DOUBLE, reverse); } @Override @@ -493,12 +506,12 @@ public BucketedSort newBucketedSort( } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new SourceValueFetcher(name(), context) { + return new SourceValueFetcher(name, context) { @Override @SuppressWarnings("unchecked") protected Object parseSourceValue(Object value) { @@ -536,11 +549,11 @@ public MetricType getMetricType() { private AggregateDoubleMetricFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, EnumMap metricFieldMappers, Builder builder ) { - super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty()); + super(simpleName, mappedField, MultiFields.empty(), CopyTo.empty()); this.ignoreMalformed = builder.ignoreMalformed.getValue(); this.ignoreMalformedByDefault = builder.ignoreMalformed.getDefaultValue(); this.metrics = builder.metrics.getValue(); @@ -597,7 +610,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio if (metrics.contains(metric) == false) { throw new IllegalArgumentException( - "Aggregate metric [" + metric + "] does not exist in the mapping of field [" + mappedFieldType.name() + "]" + "Aggregate metric [" + metric + "] does not exist in the mapping of field [" + mappedField.name() + "]" ); } @@ -607,7 +620,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, subParser); NumberFieldMapper delegateFieldMapper = metricFieldMappers.get(metric); // We don't accept arrays of metrics - if (context.doc().getField(delegateFieldMapper.fieldType().name()) != null) { + if (context.doc().getField(delegateFieldMapper.field().name()) != null) { throw new IllegalArgumentException( "Field [" + name() @@ -626,7 +639,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio Number n = context.doc().getField(delegateFieldMapper.name()).numericValue(); if (n.intValue() < 0) { throw new IllegalArgumentException( - "Aggregate metric [" + metric.name() + "] of field [" + mappedFieldType.name() + "] cannot be a negative number" + "Aggregate metric [" + metric.name() + "] of field [" + mappedField.name() + "] cannot be a negative number" ); } } @@ -637,7 +650,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio // Check if all required metrics have been parsed. if (metricsParsed.containsAll(metrics) == false) { throw new IllegalArgumentException( - "Aggregate metric field [" + mappedFieldType.name() + "] must contain all metrics " + metrics.toString() + "Aggregate metric field [" + mappedField.name() + "] must contain all metrics " + metrics.toString() ); } } catch (Exception e) { @@ -649,8 +662,8 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio // If ignoreMalformed == true, clear all parsed fields Set ignoreFieldNames = Sets.newHashSetWithExpectedSize(metricFieldMappers.size()); for (NumberFieldMapper m : metricFieldMappers.values()) { - context.addIgnoredField(m.fieldType().name()); - ignoreFieldNames.add(m.fieldType().name()); + context.addIgnoredField(m.field().name()); + ignoreFieldNames.add(m.field().name()); } // Parsing a metric sub-field is delegated to the delegate field mapper by calling method // delegateFieldMapper.parse(context). Unfortunately, this method adds the parsed sub-field diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java index bd81b6da87887..de2605cf5d24c 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedAvgAggregatorTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -116,26 +116,26 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private MappedField createDefaultFieldType(String fieldName) { + AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); - NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( + MappedField subfield = new MappedField( subfieldName, - NumberFieldMapper.NumberType.DOUBLE + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) ); - fieldType.addMetricField(m, subfield); + fieldType.addMetricField(fieldName, m, subfield); } fieldType.setDefaultMetric(Metric.sum); - return fieldType; + return new MappedField(fieldName, fieldType); } private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = createDefaultFieldType(FIELD_NAME); - AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, FIELD_NAME); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + MappedField mappedField = createDefaultFieldType(FIELD_NAME); + AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(mappedField, FIELD_NAME); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } @Override @@ -144,7 +144,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new AvgAggregationBuilder("avg_agg").field(fieldName); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java index e3dc1236e38de..f437bca8a6107 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMaxAggregatorTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -116,26 +116,26 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private MappedField createDefaultFieldType(String fieldName) { + AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(); for (Metric m : List.of(Metric.min, Metric.max)) { String subfieldName = subfieldName(fieldName, m); - NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( + MappedField subfield = new MappedField( subfieldName, - NumberFieldMapper.NumberType.DOUBLE + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) ); - fieldType.addMetricField(m, subfield); + fieldType.addMetricField(fieldName, m, subfield); } fieldType.setDefaultMetric(Metric.min); - return fieldType; + return new MappedField(fieldName, fieldType); } private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = createDefaultFieldType(FIELD_NAME); - AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, FIELD_NAME); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + MappedField mappedField = createDefaultFieldType(FIELD_NAME); + AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(mappedField, FIELD_NAME); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } @Override @@ -144,7 +144,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new MaxAggregationBuilder("max_agg").field(fieldName); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java index f4c08c1adba8b..d82de5de51156 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedMinAggregatorTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -116,26 +116,26 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private MappedField createDefaultFieldType(String fieldName) { + AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(); for (Metric m : List.of(Metric.min, Metric.max)) { String subfieldName = subfieldName(fieldName, m); - NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( + MappedField subfield = new MappedField( subfieldName, - NumberFieldMapper.NumberType.DOUBLE + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) ); - fieldType.addMetricField(m, subfield); + fieldType.addMetricField(fieldName, m, subfield); } fieldType.setDefaultMetric(Metric.min); - return fieldType; + return new MappedField(fieldName, fieldType); } private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = createDefaultFieldType(FIELD_NAME); - AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, FIELD_NAME); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + MappedField mappedField = createDefaultFieldType(FIELD_NAME); + AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(mappedField, FIELD_NAME); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } @Override @@ -144,7 +144,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new MinAggregationBuilder("min_agg").field(fieldName); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java index eef1690c01e59..db67a1947b339 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedSumAggregatorTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -116,26 +116,26 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private MappedField createDefaultFieldType(String fieldName) { + AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); - NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( + MappedField subfield = new MappedField( subfieldName, - NumberFieldMapper.NumberType.DOUBLE + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) ); - fieldType.addMetricField(m, subfield); + fieldType.addMetricField(fieldName, m, subfield); } fieldType.setDefaultMetric(Metric.sum); - return fieldType; + return new MappedField(fieldName, fieldType); } private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = createDefaultFieldType(FIELD_NAME); - AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, FIELD_NAME); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + MappedField mappedField = createDefaultFieldType(FIELD_NAME); + AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(mappedField, FIELD_NAME); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } @Override @@ -144,7 +144,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new SumAggregationBuilder("sum_agg").field(fieldName); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java index f934ba767392f..065e24b874ac6 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/aggregations/metrics/AggregateMetricBackedValueCountAggregatorTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.core.CheckedConsumer; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -115,26 +115,26 @@ public void testQueryFiltering() throws IOException { * @param fieldName the name of the field * @return the created field type */ - private AggregateDoubleMetricFieldType createDefaultFieldType(String fieldName) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(fieldName); + private MappedField createDefaultFieldType(String fieldName) { + AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(); for (Metric m : List.of(Metric.value_count, Metric.sum)) { String subfieldName = subfieldName(fieldName, m); - NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( + MappedField subfield = new MappedField( subfieldName, - NumberFieldMapper.NumberType.DOUBLE + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) ); - fieldType.addMetricField(m, subfield); + fieldType.addMetricField(fieldName, m, subfield); } fieldType.setDefaultMetric(Metric.sum); - return fieldType; + return new MappedField(fieldName, fieldType); } private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify) throws IOException { - MappedFieldType fieldType = createDefaultFieldType(FIELD_NAME); - AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, FIELD_NAME); - testCase(aggregationBuilder, query, buildIndex, verify, fieldType); + MappedField mappedField = createDefaultFieldType(FIELD_NAME); + AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(mappedField, FIELD_NAME); + testCase(aggregationBuilder, query, buildIndex, verify, mappedField); } @Override @@ -143,7 +143,7 @@ protected List getSearchPlugins() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new ValueCountAggregationBuilder("value_count_agg").field(fieldName); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java index 904effcd6283e..c1677332e29cb 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.Query; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -516,20 +517,21 @@ public void testNoSubFieldsIterated() throws IOException { public void testFieldCaps() throws IOException { MapperService aggMetricMapperService = createMapperService(fieldMapping(this::minimalMapping)); - MappedFieldType fieldType = aggMetricMapperService.fieldType("field"); + MappedFieldType fieldType = aggMetricMapperService.mappedField("field").type(); assertThat(fieldType.familyTypeName(), equalTo("double")); assertTrue(fieldType.isSearchable()); - assertTrue(fieldType.isAggregatable()); + assertTrue(fieldType.isAggregatable("field")); } /* * Since all queries for aggregate_metric_double fields are delegated to their default_metric numeric * sub-field, we override this method so that testExistsQueryMinimalMapping() passes successfully. */ - protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { + protected void assertExistsQuery(MappedField mappedField, Query query, LuceneDocument fields) { assertThat(query, Matchers.instanceOf(FieldExistsQuery.class)); FieldExistsQuery fieldExistsQuery = (FieldExistsQuery) query; - String defaultMetric = ((AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) fieldType).getDefaultMetric().name(); + String defaultMetric = ((AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) mappedField.type()).getDefaultMetric() + .name(); assertEquals("field." + defaultMetric, fieldExistsQuery.getField()); assertNoFieldNamesField(fields); } @@ -556,7 +558,7 @@ public void testMetricType() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType ft = - (AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) mapperService.fieldType("field"); + (AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType) mapperService.mappedField("field").type(); assertNull(ft.getMetricType()); assertMetricType("gauge", AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType::getMetricType); diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java index 6ccb2cac33daf..4956f33aac08d 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldTypeTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.script.DocReader; @@ -44,58 +44,58 @@ public class AggregateDoubleMetricFieldTypeTests extends FieldTypeTestCase { - protected AggregateDoubleMetricFieldType createDefaultFieldType(String name, Map meta, Metric defaultMetric) { - AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(name, meta, null); + protected MappedField createDefaultFieldType(String name, Map meta, Metric defaultMetric) { + AggregateDoubleMetricFieldType fieldType = new AggregateDoubleMetricFieldType(meta, null); for (AggregateDoubleMetricFieldMapper.Metric m : List.of( AggregateDoubleMetricFieldMapper.Metric.min, AggregateDoubleMetricFieldMapper.Metric.max )) { - String subfieldName = subfieldName(fieldType.name(), m); - NumberFieldMapper.NumberFieldType subfield = new NumberFieldMapper.NumberFieldType( + String subfieldName = subfieldName(name, m); + MappedField subfield = new MappedField( subfieldName, - NumberFieldMapper.NumberType.DOUBLE + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE) ); - fieldType.addMetricField(m, subfield); + fieldType.addMetricField(name, m, subfield); } fieldType.setDefaultMetric(defaultMetric); - return fieldType; + return new MappedField(name, fieldType); } public void testTermQuery() { - final MappedFieldType fieldType = createDefaultFieldType("foo", Collections.emptyMap(), Metric.max); - Query query = fieldType.termQuery(55.2, MOCK_CONTEXT); + final MappedField mappedField = createDefaultFieldType("foo", Collections.emptyMap(), Metric.max); + Query query = mappedField.termQuery(55.2, MOCK_CONTEXT); assertThat(query, equalTo(DoublePoint.newRangeQuery("foo.max", 55.2, 55.2))); } public void testTermsQuery() { - final MappedFieldType fieldType = createDefaultFieldType("foo", Collections.emptyMap(), Metric.max); - Query query = fieldType.termsQuery(asList(55.2, 500.3), MOCK_CONTEXT); + final MappedField mappedField = createDefaultFieldType("foo", Collections.emptyMap(), Metric.max); + Query query = mappedField.termsQuery(asList(55.2, 500.3), MOCK_CONTEXT); assertThat(query, equalTo(DoublePoint.newSetQuery("foo.max", 55.2, 500.3))); } public void testRangeQuery() { - final MappedFieldType fieldType = createDefaultFieldType("foo", Collections.emptyMap(), Metric.max); - Query query = fieldType.rangeQuery(10.1, 100.1, true, true, null, null, null, MOCK_CONTEXT); + final MappedField mappedField = createDefaultFieldType("foo", Collections.emptyMap(), Metric.max); + Query query = mappedField.rangeQuery(10.1, 100.1, true, true, null, null, null, MOCK_CONTEXT); assertThat(query, instanceOf(IndexOrDocValuesQuery.class)); } public void testFetchSourceValueWithOneMetric() throws IOException { - final MappedFieldType fieldType = createDefaultFieldType("field", Collections.emptyMap(), Metric.min); + final MappedField mappedField = createDefaultFieldType("field", Collections.emptyMap(), Metric.min); final double defaultValue = 45.8; final Map metric = Collections.singletonMap("min", defaultValue); - assertEquals(List.of(defaultValue), fetchSourceValue(fieldType, metric)); + assertEquals(List.of(defaultValue), fetchSourceValue(mappedField, metric)); } public void testFetchSourceValueWithMultipleMetrics() throws IOException { - final MappedFieldType fieldType = createDefaultFieldType("field", Collections.emptyMap(), Metric.max); + final MappedField mappedField = createDefaultFieldType("field", Collections.emptyMap(), Metric.max); final double defaultValue = 45.8; final Map metric = Map.of("min", 14.2, "max", defaultValue); - assertEquals(List.of(defaultValue), fetchSourceValue(fieldType, metric)); + assertEquals(List.of(defaultValue), fetchSourceValue(mappedField, metric)); } /** Tests that aggregate_metric_double uses the default_metric subfield's doc-values as values in scripts */ public void testUsedInScript() throws IOException { - final MappedFieldType mappedFieldType = createDefaultFieldType("field", Collections.emptyMap(), Metric.max); + final MappedField mappedField = createDefaultFieldType("field", Collections.emptyMap(), Metric.max); try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument( List.of( @@ -117,10 +117,10 @@ public void testUsedInScript() throws IOException { ); try (DirectoryReader reader = iw.getReader()) { SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); - when(searchExecutionContext.getFieldType(anyString())).thenReturn(mappedFieldType); + when(searchExecutionContext.getMappedField(anyString())).thenReturn(mappedField); when(searchExecutionContext.allowExpensiveQueries()).thenReturn(true); SearchLookup lookup = new SearchLookup( - searchExecutionContext::getFieldType, + searchExecutionContext::getMappedField, (mft, lookupSupplier) -> mft.fielddataBuilder("test", lookupSupplier).build(null, null) ); when(searchExecutionContext.lookup()).thenReturn(lookup); diff --git a/x-pack/plugin/mapper-constant-keyword/src/internalClusterTest/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/internalClusterTest/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index e9077f3cb8a97..4aff059dd3731 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/internalClusterTest/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/internalClusterTest/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; @@ -51,7 +52,7 @@ protected Object getSampleValueForQuery() { } @Override - protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { + protected void assertExistsQuery(MappedField mappedField, Query query, LuceneDocument fields) { assertThat(query, instanceOf(MatchNoDocsQuery.class)); assertNoFieldNamesField(fields); } @@ -150,7 +151,7 @@ public void testNumericValue() throws IOException { b.field("type", "constant_keyword"); b.field("value", 74); })); - ConstantKeywordFieldType ft = (ConstantKeywordFieldType) mapperService.fieldType("field"); + ConstantKeywordFieldType ft = (ConstantKeywordFieldType) mapperService.mappedField("field").type(); assertEquals("74", ft.value()); } diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index f1f1bccba22c7..d7148319478a8 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -30,7 +30,7 @@ import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; @@ -95,7 +95,7 @@ protected Parameter[] getParameters() { public ConstantKeywordFieldMapper build(MapperBuilderContext context) { return new ConstantKeywordFieldMapper( name, - new ConstantKeywordFieldType(context.buildFullName(name), value.getValue(), meta.getValue()) + new MappedField(context.buildFullName(name), new ConstantKeywordFieldType(value.getValue(), meta.getValue())) ); } } @@ -106,13 +106,13 @@ public static final class ConstantKeywordFieldType extends ConstantFieldType { private final String value; - public ConstantKeywordFieldType(String name, String value, Map meta) { - super(name, meta); + public ConstantKeywordFieldType(String value, Map meta) { + super(meta); this.value = value; } - public ConstantKeywordFieldType(String name, String value) { - this(name, value, Collections.emptyMap()); + public ConstantKeywordFieldType(String value) { + this(value, Collections.emptyMap()); } /** Return the value that this field wraps. This may be {@code null} if the field is not configured yet. */ @@ -131,26 +131,32 @@ public String familyTypeName() { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { return new ConstantIndexFieldData.Builder( value, - name(), + name, CoreValuesSourceType.KEYWORD, (dv, n) -> new ConstantKeywordDocValuesField(FieldData.toString(dv), n) ); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } return value == null ? (lookup, ignoredValues) -> List.of() : (lookup, ignoredValues) -> List.of(value); } @Override - public TermsEnum getTerms(boolean caseInsensitive, String string, SearchExecutionContext queryShardContext, String searchAfter) { + public TermsEnum getTerms( + String name, + boolean caseInsensitive, + String string, + SearchExecutionContext queryShardContext, + String searchAfter + ) { if (value == null) { return TermsEnum.EMPTY; } @@ -178,12 +184,13 @@ protected boolean matches(String pattern, boolean caseInsensitive, SearchExecuti } @Override - public Query existsQuery(SearchExecutionContext context) { + public Query existsQuery(String name, SearchExecutionContext context) { return value != null ? new MatchAllDocsQuery() : new MatchNoDocsQuery(); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -209,6 +216,7 @@ public Query rangeQuery( @Override public Query fuzzyQuery( + String name, Object term, Fuzziness fuzziness, int prefixLength, @@ -245,6 +253,7 @@ public Query fuzzyQuery( @Override public Query regexpQuery( + String name, String regexp, int syntaxFlags, int matchFlags, @@ -267,8 +276,8 @@ public Query regexpQuery( } - ConstantKeywordFieldMapper(String simpleName, MappedFieldType mappedFieldType) { - super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty()); + ConstantKeywordFieldMapper(String simpleName, MappedField mappedField) { + super(simpleName, mappedField, MultiFields.empty(), CopyTo.empty()); } @Override @@ -286,8 +295,8 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } if (fieldType().value == null) { - ConstantKeywordFieldType newFieldType = new ConstantKeywordFieldType(fieldType().name(), value, fieldType().meta()); - Mapper update = new ConstantKeywordFieldMapper(simpleName(), newFieldType); + ConstantKeywordFieldType newFieldType = new ConstantKeywordFieldType(value, fieldType().meta()); + Mapper update = new ConstantKeywordFieldMapper(simpleName(), new MappedField(name(), newFieldType)); context.addDynamicMapper(update); } else if (Objects.equals(fieldType().value, value) == false) { throw new IllegalArgumentException( diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldTypeTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldTypeTests.java index c376b9c10ea12..675017e90c5da 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldTypeTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldTypeTests.java @@ -25,91 +25,91 @@ public class ConstantKeywordFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); - assertEquals(new MatchAllDocsQuery(), ft.termQuery("foo", null)); - assertEquals(new MatchAllDocsQuery(), ft.termQueryCaseInsensitive("fOo", null)); - assertEquals(new MatchNoDocsQuery(), ft.termQuery("bar", null)); - assertEquals(new MatchNoDocsQuery(), ft.termQueryCaseInsensitive("bAr", null)); - ConstantKeywordFieldType bar = new ConstantKeywordFieldType("f", "bar"); - assertEquals(new MatchNoDocsQuery(), bar.termQuery("foo", null)); - assertEquals(new MatchNoDocsQuery(), bar.termQueryCaseInsensitive("fOo", null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foo"); + assertEquals(new MatchAllDocsQuery(), ft.termQuery("f", "foo", null)); + assertEquals(new MatchAllDocsQuery(), ft.termQueryCaseInsensitive("f", "fOo", null)); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("f", "bar", null)); + assertEquals(new MatchNoDocsQuery(), ft.termQueryCaseInsensitive("f", "bAr", null)); + ConstantKeywordFieldType bar = new ConstantKeywordFieldType("bar"); + assertEquals(new MatchNoDocsQuery(), bar.termQuery("f", "foo", null)); + assertEquals(new MatchNoDocsQuery(), bar.termQueryCaseInsensitive("f", "fOo", null)); } public void testTermsQuery() { - ConstantKeywordFieldType bar = new ConstantKeywordFieldType("f", "bar"); - assertEquals(new MatchNoDocsQuery(), bar.termsQuery(Collections.singletonList("foo"), null)); - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); - assertEquals(new MatchAllDocsQuery(), ft.termsQuery(Collections.singletonList("foo"), null)); - assertEquals(new MatchAllDocsQuery(), ft.termsQuery(Arrays.asList("bar", "foo", "quux"), null)); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(Collections.emptyList(), null)); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(Collections.singletonList("bar"), null)); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(Arrays.asList("bar", "quux"), null)); + ConstantKeywordFieldType bar = new ConstantKeywordFieldType("bar"); + assertEquals(new MatchNoDocsQuery(), bar.termsQuery("f", Collections.singletonList("foo"), null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foo"); + assertEquals(new MatchAllDocsQuery(), ft.termsQuery("f", Collections.singletonList("foo"), null)); + assertEquals(new MatchAllDocsQuery(), ft.termsQuery("f", Arrays.asList("bar", "foo", "quux"), null)); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("f", Collections.emptyList(), null)); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("f", Collections.singletonList("bar"), null)); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("f", Arrays.asList("bar", "quux"), null)); } public void testWildcardQuery() { - ConstantKeywordFieldType bar = new ConstantKeywordFieldType("f", "bar"); - assertEquals(new MatchNoDocsQuery(), bar.wildcardQuery("f*o", null, false, null)); - assertEquals(new MatchNoDocsQuery(), bar.wildcardQuery("F*o", null, true, null)); - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); - assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("f*o", null, false, null)); - assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("F*o", null, true, null)); - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("b*r", null, false, null)); - assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("B*r", null, true, null)); + ConstantKeywordFieldType bar = new ConstantKeywordFieldType("bar"); + assertEquals(new MatchNoDocsQuery(), bar.wildcardQuery("f", "f*o", null, false, null)); + assertEquals(new MatchNoDocsQuery(), bar.wildcardQuery("f", "F*o", null, true, null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foo"); + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("f", "f*o", null, false, null)); + assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("f", "F*o", null, true, null)); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("f", "b*r", null, false, null)); + assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("f", "B*r", null, true, null)); } public void testPrefixQuery() { - ConstantKeywordFieldType bar = new ConstantKeywordFieldType("f", "bar"); - assertEquals(new MatchNoDocsQuery(), bar.prefixQuery("fo", null, false, null)); - assertEquals(new MatchNoDocsQuery(), bar.prefixQuery("fO", null, true, null)); - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); - assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("fo", null, false, null)); - assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("fO", null, true, null)); - assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("ba", null, false, null)); - assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("Ba", null, true, null)); + ConstantKeywordFieldType bar = new ConstantKeywordFieldType("bar"); + assertEquals(new MatchNoDocsQuery(), bar.prefixQuery("f", "fo", null, false, null)); + assertEquals(new MatchNoDocsQuery(), bar.prefixQuery("f", "fO", null, true, null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foo"); + assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("f", "fo", null, false, null)); + assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("f", "fO", null, true, null)); + assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("f", "ba", null, false, null)); + assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("f", "Ba", null, true, null)); } public void testExistsQuery() { - ConstantKeywordFieldType none = new ConstantKeywordFieldType("f", null); - assertEquals(new MatchNoDocsQuery(), none.existsQuery(null)); - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); - assertEquals(new MatchAllDocsQuery(), ft.existsQuery(null)); + ConstantKeywordFieldType none = new ConstantKeywordFieldType(null); + assertEquals(new MatchNoDocsQuery(), none.existsQuery("f", null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foo"); + assertEquals(new MatchAllDocsQuery(), ft.existsQuery("f", null)); } public void testRangeQuery() { - ConstantKeywordFieldType none = new ConstantKeywordFieldType("f", null); - assertEquals(new MatchNoDocsQuery(), none.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, null)); - assertEquals(new MatchNoDocsQuery(), none.rangeQuery(null, "foo", randomBoolean(), randomBoolean(), null, null, null, null)); - assertEquals(new MatchNoDocsQuery(), none.rangeQuery("foo", null, randomBoolean(), randomBoolean(), null, null, null, null)); - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); - assertEquals(new MatchAllDocsQuery(), ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, null)); - assertEquals(new MatchAllDocsQuery(), ft.rangeQuery("foo", null, true, randomBoolean(), null, null, null, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("foo", null, false, randomBoolean(), null, null, null, null)); - assertEquals(new MatchAllDocsQuery(), ft.rangeQuery(null, "foo", randomBoolean(), true, null, null, null, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery(null, "foo", randomBoolean(), false, null, null, null, null)); - assertEquals(new MatchAllDocsQuery(), ft.rangeQuery("abc", "xyz", randomBoolean(), randomBoolean(), null, null, null, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("abc", "def", randomBoolean(), randomBoolean(), null, null, null, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("mno", "xyz", randomBoolean(), randomBoolean(), null, null, null, null)); + ConstantKeywordFieldType none = new ConstantKeywordFieldType(null); + assertEquals(new MatchNoDocsQuery(), none.rangeQuery("f", null, null, randomBoolean(), randomBoolean(), null, null, null, null)); + assertEquals(new MatchNoDocsQuery(), none.rangeQuery("f", null, "foo", randomBoolean(), randomBoolean(), null, null, null, null)); + assertEquals(new MatchNoDocsQuery(), none.rangeQuery("f", "foo", null, randomBoolean(), randomBoolean(), null, null, null, null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foo"); + assertEquals(new MatchAllDocsQuery(), ft.rangeQuery("f", null, null, randomBoolean(), randomBoolean(), null, null, null, null)); + assertEquals(new MatchAllDocsQuery(), ft.rangeQuery("f", "foo", null, true, randomBoolean(), null, null, null, null)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("f", "foo", null, false, randomBoolean(), null, null, null, null)); + assertEquals(new MatchAllDocsQuery(), ft.rangeQuery("f", null, "foo", randomBoolean(), true, null, null, null, null)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("f", null, "foo", randomBoolean(), false, null, null, null, null)); + assertEquals(new MatchAllDocsQuery(), ft.rangeQuery("f", "abc", "xyz", randomBoolean(), randomBoolean(), null, null, null, null)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("f", "abc", "def", randomBoolean(), randomBoolean(), null, null, null, null)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("f", "mno", "xyz", randomBoolean(), randomBoolean(), null, null, null, null)); } public void testFuzzyQuery() { - ConstantKeywordFieldType none = new ConstantKeywordFieldType("f", null); - assertEquals(new MatchNoDocsQuery(), none.fuzzyQuery("fooquux", Fuzziness.AUTO, 3, 50, randomBoolean(), null)); - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foobar"); - assertEquals(new MatchAllDocsQuery(), ft.fuzzyQuery("foobaz", Fuzziness.AUTO, 3, 50, randomBoolean(), null)); - assertEquals(new MatchNoDocsQuery(), ft.fuzzyQuery("fooquux", Fuzziness.AUTO, 3, 50, randomBoolean(), null)); + ConstantKeywordFieldType none = new ConstantKeywordFieldType(null); + assertEquals(new MatchNoDocsQuery(), none.fuzzyQuery("f", "fooquux", Fuzziness.AUTO, 3, 50, randomBoolean(), null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foobar"); + assertEquals(new MatchAllDocsQuery(), ft.fuzzyQuery("f", "foobaz", Fuzziness.AUTO, 3, 50, randomBoolean(), null)); + assertEquals(new MatchNoDocsQuery(), ft.fuzzyQuery("f", "fooquux", Fuzziness.AUTO, 3, 50, randomBoolean(), null)); } public void testRegexpQuery() { - ConstantKeywordFieldType none = new ConstantKeywordFieldType("f", null); - assertEquals(new MatchNoDocsQuery(), none.regexpQuery("f..o", RegExp.ALL, 0, 10, null, null)); - ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); - assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("f.o", RegExp.ALL, 0, 10, null, null)); - assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("f..o", RegExp.ALL, 0, 10, null, null)); + ConstantKeywordFieldType none = new ConstantKeywordFieldType(null); + assertEquals(new MatchNoDocsQuery(), none.regexpQuery("f", "f..o", RegExp.ALL, 0, 10, null, null)); + ConstantKeywordFieldType ft = new ConstantKeywordFieldType("foo"); + assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("f", "f.o", RegExp.ALL, 0, 10, null, null)); + assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("f", "f..o", RegExp.ALL, 0, 10, null, null)); } public void testFetchValue() throws Exception { - MappedFieldType fieldType = new ConstantKeywordFieldMapper.ConstantKeywordFieldType("field", null); - ValueFetcher fetcher = fieldType.valueFetcher(null, null); + MappedFieldType fieldType = new ConstantKeywordFieldMapper.ConstantKeywordFieldType(null); + ValueFetcher fetcher = fieldType.valueFetcher("f", null, null); SourceLookup missingValueLookup = new SourceLookup(); SourceLookup nullValueLookup = new SourceLookup(); @@ -119,8 +119,8 @@ public void testFetchValue() throws Exception { assertTrue(fetcher.fetchValues(missingValueLookup, ignoredValues).isEmpty()); assertTrue(fetcher.fetchValues(nullValueLookup, ignoredValues).isEmpty()); - MappedFieldType valued = new ConstantKeywordFieldMapper.ConstantKeywordFieldType("field", "foo"); - fetcher = valued.valueFetcher(null, null); + MappedFieldType valued = new ConstantKeywordFieldMapper.ConstantKeywordFieldType("foo"); + fetcher = valued.valueFetcher("field", null, null); assertEquals(List.of("foo"), fetcher.fetchValues(missingValueLookup, ignoredValues)); assertEquals(List.of("foo"), fetcher.fetchValues(nullValueLookup, ignoredValues)); diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index ea1469bac9d40..d417890f03686 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -23,7 +23,7 @@ import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MappingLookup; @@ -172,7 +172,6 @@ Number parsedNullValue() { @Override public UnsignedLongFieldMapper build(MapperBuilderContext context) { UnsignedLongFieldType fieldType = new UnsignedLongFieldType( - context.buildFullName(name), indexed.getValue(), stored.getValue(), hasDocValues.getValue(), @@ -181,7 +180,13 @@ public UnsignedLongFieldMapper build(MapperBuilderContext context) { dimension.getValue(), metric.getValue() ); - return new UnsignedLongFieldMapper(name, fieldType, multiFieldsBuilder.build(this, context), copyTo.build(), this); + return new UnsignedLongFieldMapper( + name, + new MappedField(context.buildFullName(name), fieldType), + multiFieldsBuilder.build(this, context), + copyTo.build(), + this + ); } } @@ -194,7 +199,6 @@ public static final class UnsignedLongFieldType extends SimpleMappedFieldType { private final MetricType metricType; public UnsignedLongFieldType( - String name, boolean indexed, boolean isStored, boolean hasDocValues, @@ -203,14 +207,14 @@ public UnsignedLongFieldType( boolean isDimension, MetricType metricType ) { - super(name, indexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); + super(indexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.nullValueFormatted = nullValueFormatted; this.isDimension = isDimension; this.metricType = metricType; } - public UnsignedLongFieldType(String name) { - this(name, true, false, true, null, Collections.emptyMap(), false, null); + public UnsignedLongFieldType() { + this(true, false, true, null, Collections.emptyMap(), false, null); } @Override @@ -219,23 +223,23 @@ public String typeName() { } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } @Override - public Query termQuery(Object value, SearchExecutionContext context) { - failIfNotIndexed(); + public Query termQuery(String name, Object value, SearchExecutionContext context) { + failIfNotIndexed(name); Long longValue = parseTerm(value); if (longValue == null) { return new MatchNoDocsQuery(); } - return LongPoint.newExactQuery(name(), unsignedToSortableSignedLong(longValue)); + return LongPoint.newExactQuery(name, unsignedToSortableSignedLong(longValue)); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - failIfNotIndexed(); + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { + failIfNotIndexed(name); long[] lvalues = new long[values.size()]; int upTo = 0; for (Object value : values) { @@ -250,18 +254,19 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (upTo != lvalues.length) { lvalues = Arrays.copyOf(lvalues, upTo); } - return LongPoint.newSetQuery(name(), lvalues); + return LongPoint.newSetQuery(name, lvalues); } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - failIfNotIndexed(); + failIfNotIndexed(name); long l = Long.MIN_VALUE; long u = Long.MAX_VALUE; if (lowerTerm != null) { @@ -276,23 +281,23 @@ public Query rangeQuery( } if (l > u) return new MatchNoDocsQuery(); - Query query = LongPoint.newRangeQuery(name(), l, u); + Query query = LongPoint.newRangeQuery(name, l, u); if (hasDocValues()) { - Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u); + Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(name, l, u); query = new IndexOrDocValuesQuery(query, dvQuery); - if (context.indexSortedOnField(name())) { - query = new IndexSortSortedNumericDocValuesRangeQuery(name(), l, u, query); + if (context.indexSortedOnField(name)) { + query = new IndexSortSortedNumericDocValuesRangeQuery(name, l, u, query); } } return query; } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); return (cache, breakerService) -> { final IndexNumericFieldData signedLongValues = new SortedNumericIndexFieldData.Builder( - name(), + name, IndexNumericFieldData.NumericType.LONG, (dv, n) -> { throw new UnsupportedOperationException(); } ).build(cache, breakerService); @@ -301,12 +306,12 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new SourceValueFetcher(name(), context, nullValueFormatted) { + return new SourceValueFetcher(name, context, nullValueFormatted) { @Override protected Object parseSourceValue(Object value) { if (value.equals("")) { @@ -331,8 +336,8 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, ZoneId timeZone) { - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, String format, ZoneId timeZone) { + checkNoTimeZone(name, timeZone); return DocValueFormat.UNSIGNED_LONG_SHIFTED; } @@ -478,14 +483,8 @@ public MetricType getMetricType() { private final boolean dimension; private final MetricType metricType; - private UnsignedLongFieldMapper( - String simpleName, - MappedFieldType mappedFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder - ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + private UnsignedLongFieldMapper(String simpleName, MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Builder builder) { + super(simpleName, mappedField, multiFields, copyTo); this.indexed = builder.indexed.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); this.stored = builder.stored.getValue(); @@ -533,7 +532,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } } catch (IllegalArgumentException e) { if (ignoreMalformed.value() && parser.currentToken().isValue()) { - context.addIgnoredField(mappedFieldType.name()); + context.addIgnoredField(mappedField.name()); return; } else { throw e; @@ -550,25 +549,25 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } if (dimension && numericValue != null) { - context.getDimensions().addUnsignedLong(fieldType().name(), numericValue); + context.getDimensions().addUnsignedLong(name(), numericValue); } List fields = new ArrayList<>(); if (indexed) { - fields.add(new LongPoint(fieldType().name(), numericValue)); + fields.add(new LongPoint(name(), numericValue)); } if (hasDocValues) { - fields.add(new SortedNumericDocValuesField(fieldType().name(), numericValue)); + fields.add(new SortedNumericDocValuesField(name(), numericValue)); } if (stored) { // for stored field, keeping original unsigned_long value in the String form String storedValued = isNullValue ? nullValue : Long.toUnsignedString(unsignedToSortableSignedLong(numericValue)); - fields.add(new StoredField(fieldType().name(), storedValued)); + fields.add(new StoredField(name(), storedValued)); } context.doc().addAll(fields); if (hasDocValues == false && (stored || indexed)) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java index 00f7ba6c72145..f6003959499a7 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; @@ -249,8 +250,8 @@ public void testExistsQueryDocValuesDisabled() throws IOException { public void testDimension() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - UnsignedLongFieldMapper.UnsignedLongFieldType ft = (UnsignedLongFieldMapper.UnsignedLongFieldType) mapperService.fieldType("field"); - assertFalse(ft.isDimension()); + MappedField mappedField = mapperService.mappedField("field"); + assertFalse(mappedField.isDimension()); assertDimension(true, UnsignedLongFieldMapper.UnsignedLongFieldType::isDimension); assertDimension(false, UnsignedLongFieldMapper.UnsignedLongFieldType::isDimension); @@ -305,8 +306,8 @@ public void testDimensionMultiValuedField() throws IOException { public void testMetricType() throws IOException { // Test default setting MapperService mapperService = createMapperService(fieldMapping(b -> minimalMapping(b))); - UnsignedLongFieldMapper.UnsignedLongFieldType ft = (UnsignedLongFieldMapper.UnsignedLongFieldType) mapperService.fieldType("field"); - assertNull(ft.getMetricType()); + MappedField mappedField = mapperService.mappedField("field"); + assertNull(mappedField.getMetricType()); assertMetricType("gauge", UnsignedLongFieldMapper.UnsignedLongFieldType::getMetricType); assertMetricType("counter", UnsignedLongFieldMapper.UnsignedLongFieldType::getMetricType); diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java index cf47a4749e8ea..c6a38922370ff 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.search.MatchNoDocsQuery; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.xpack.unsignedlong.UnsignedLongFieldMapper.UnsignedLongFieldType; @@ -26,74 +26,83 @@ public class UnsignedLongFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { - UnsignedLongFieldType ft = new UnsignedLongFieldType("my_unsigned_long"); + UnsignedLongFieldType ft = new UnsignedLongFieldType(); - assertEquals(LongPoint.newExactQuery("my_unsigned_long", -9223372036854775808L), ft.termQuery(0, null)); - assertEquals(LongPoint.newExactQuery("my_unsigned_long", 0L), ft.termQuery("9223372036854775808", null)); - assertEquals(LongPoint.newExactQuery("my_unsigned_long", 9223372036854775807L), ft.termQuery("18446744073709551615", null)); + assertEquals(LongPoint.newExactQuery("my_unsigned_long", -9223372036854775808L), ft.termQuery("my_unsigned_long", 0, null)); + assertEquals(LongPoint.newExactQuery("my_unsigned_long", 0L), ft.termQuery("my_unsigned_long", "9223372036854775808", null)); + assertEquals( + LongPoint.newExactQuery("my_unsigned_long", 9223372036854775807L), + ft.termQuery("my_unsigned_long", "18446744073709551615", null) + ); - assertEquals(new MatchNoDocsQuery(), ft.termQuery(-1L, null)); - assertEquals(new MatchNoDocsQuery(), ft.termQuery(10.5, null)); - assertEquals(new MatchNoDocsQuery(), ft.termQuery("18446744073709551616", null)); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("my_unsigned_long", -1L, null)); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("my_unsigned_long", 10.5, null)); + assertEquals(new MatchNoDocsQuery(), ft.termQuery("my_unsigned_long", "18446744073709551616", null)); - expectThrows(NumberFormatException.class, () -> ft.termQuery("18incorrectnumber", null)); + expectThrows(NumberFormatException.class, () -> ft.termQuery("my_unsigned_long", "18incorrectnumber", null)); } public void testTermsQuery() { - UnsignedLongFieldType ft = new UnsignedLongFieldType("my_unsigned_long"); + UnsignedLongFieldType ft = new UnsignedLongFieldType(); assertEquals( LongPoint.newSetQuery("my_unsigned_long", -9223372036854775808L, 0L, 9223372036854775807L), - ft.termsQuery(List.of("0", "9223372036854775808", "18446744073709551615"), null) + ft.termsQuery("my_unsigned_long", List.of("0", "9223372036854775808", "18446744073709551615"), null) ); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(List.of(-9223372036854775808L, -1L), null)); - assertEquals(new MatchNoDocsQuery(), ft.termsQuery(List.of("-0.5", "3.14", "18446744073709551616"), null)); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("my_unsigned_long", List.of(-9223372036854775808L, -1L), null)); + assertEquals(new MatchNoDocsQuery(), ft.termsQuery("my_unsigned_long", List.of("-0.5", "3.14", "18446744073709551616"), null)); - expectThrows(NumberFormatException.class, () -> ft.termsQuery(List.of("18incorrectnumber"), null)); + expectThrows(NumberFormatException.class, () -> ft.termsQuery("my_unsigned_long", List.of("18incorrectnumber"), null)); } public void testRangeQuery() { - UnsignedLongFieldType ft = new UnsignedLongFieldType( - "my_unsigned_long", - true, - false, - false, - null, - Collections.emptyMap(), - false, - null - ); + UnsignedLongFieldType ft = new UnsignedLongFieldType(true, false, false, null, Collections.emptyMap(), false, null); assertEquals( LongPoint.newRangeQuery("my_unsigned_long", -9223372036854775808L, -9223372036854775808L), - ft.rangeQuery(-1L, 0L, true, true, null) + ft.rangeQuery("my_unsigned_long", -1L, 0L, true, true, null) ); assertEquals( LongPoint.newRangeQuery("my_unsigned_long", -9223372036854775808L, -9223372036854775808L), - ft.rangeQuery(0.0, 0.5, true, true, null) + ft.rangeQuery("my_unsigned_long", 0.0, 0.5, true, true, null) ); assertEquals( LongPoint.newRangeQuery("my_unsigned_long", 0, 0), - ft.rangeQuery("9223372036854775807", "9223372036854775808", false, true, null) + ft.rangeQuery("my_unsigned_long", "9223372036854775807", "9223372036854775808", false, true, null) ); assertEquals( LongPoint.newRangeQuery("my_unsigned_long", -9223372036854775808L, 9223372036854775806L), - ft.rangeQuery(null, "18446744073709551614.5", true, true, null) + ft.rangeQuery("my_unsigned_long", null, "18446744073709551614.5", true, true, null) ); assertEquals( LongPoint.newRangeQuery("my_unsigned_long", 9223372036854775807L, 9223372036854775807L), - ft.rangeQuery("18446744073709551615", "18446744073709551616", true, true, null) + ft.rangeQuery("my_unsigned_long", "18446744073709551615", "18446744073709551616", true, true, null) ); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery(-1f, -0.5f, true, true, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery(-1L, 0L, true, false, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery(9223372036854775807L, 9223372036854775806L, true, true, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("18446744073709551616", "18446744073709551616", true, true, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("18446744073709551615", "18446744073709551616", false, true, null)); - assertEquals(new MatchNoDocsQuery(), ft.rangeQuery(9223372036854775807L, 9223372036854775806L, true, true, null)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("my_unsigned_long", -1f, -0.5f, true, true, null)); + assertEquals(new MatchNoDocsQuery(), ft.rangeQuery("my_unsigned_long", -1L, 0L, true, false, null)); + assertEquals( + new MatchNoDocsQuery(), + ft.rangeQuery("my_unsigned_long", 9223372036854775807L, 9223372036854775806L, true, true, null) + ); + assertEquals( + new MatchNoDocsQuery(), + ft.rangeQuery("my_unsigned_long", "18446744073709551616", "18446744073709551616", true, true, null) + ); + assertEquals( + new MatchNoDocsQuery(), + ft.rangeQuery("my_unsigned_long", "18446744073709551615", "18446744073709551616", false, true, null) + ); + assertEquals( + new MatchNoDocsQuery(), + ft.rangeQuery("my_unsigned_long", 9223372036854775807L, 9223372036854775806L, true, true, null) + ); - expectThrows(NumberFormatException.class, () -> ft.rangeQuery("18incorrectnumber", "18incorrectnumber", true, true, null)); + expectThrows( + NumberFormatException.class, + () -> ft.rangeQuery("my_unsigned_long", "18incorrectnumber", "18incorrectnumber", true, true, null) + ); } public void testParseTermForTermQuery() { @@ -166,15 +175,15 @@ public void testParseUpperTermForRangeQuery() { } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new UnsignedLongFieldMapper.Builder("field", false).build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new UnsignedLongFieldMapper.Builder("field", false).build(MapperBuilderContext.ROOT).field(); assertEquals(List.of(0L), fetchSourceValue(mapper, 0L)); assertEquals(List.of(9223372036854775807L), fetchSourceValue(mapper, 9223372036854775807L)); assertEquals(List.of(BIGINTEGER_2_64_MINUS_ONE), fetchSourceValue(mapper, "18446744073709551615")); assertEquals(List.of(), fetchSourceValue(mapper, "")); - MappedFieldType nullValueMapper = new UnsignedLongFieldMapper.Builder("field", false).nullValue("18446744073709551615") + MappedField nullValueMapper = new UnsignedLongFieldMapper.Builder("field", false).nullValue("18446744073709551615") .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); assertEquals(List.of(BIGINTEGER_2_64_MINUS_ONE), fetchSourceValue(nullValueMapper, "")); } } diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index ca1d722c6fc01..fb6ce90575565 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -36,7 +36,7 @@ import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; @@ -97,8 +97,8 @@ static class Builder extends FieldMapper.Builder { super(name); } - private VersionStringFieldType buildFieldType(MapperBuilderContext context, FieldType fieldtype) { - return new VersionStringFieldType(context.buildFullName(name), fieldtype, meta.getValue()); + private VersionStringFieldType buildFieldType(FieldType fieldtype) { + return new VersionStringFieldType(fieldtype, meta.getValue()); } @Override @@ -107,7 +107,7 @@ public VersionStringFieldMapper build(MapperBuilderContext context) { return new VersionStringFieldMapper( name, fieldtype, - buildFieldType(context, fieldtype), + new MappedField(context.buildFullName(name), buildFieldType(fieldtype)), multiFieldsBuilder.build(this, context), copyTo.build() ); @@ -123,8 +123,8 @@ protected Parameter[] getParameters() { public static final class VersionStringFieldType extends TermBasedFieldType { - private VersionStringFieldType(String name, FieldType fieldType, Map meta) { - super(name, true, false, true, new TextSearchInfo(fieldType, null, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER), meta); + private VersionStringFieldType(FieldType fieldType, Map meta) { + super(true, false, true, new TextSearchInfo(fieldType, null, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER), meta); } @Override @@ -133,23 +133,24 @@ public String typeName() { } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.toString(name(), context, format); + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { + return SourceValueFetcher.toString(name, context, format); } @Override - public Query existsQuery(SearchExecutionContext context) { - return new FieldExistsQuery(name()); + public Query existsQuery(String name, SearchExecutionContext context) { + return new FieldExistsQuery(name); } @Override public Query prefixQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context ) { - return wildcardQuery(value + "*", method, caseInsensitive, context); + return wildcardQuery(name, value + "*", method, caseInsensitive, context); } /** @@ -161,6 +162,7 @@ public Query prefixQuery( */ @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -173,7 +175,7 @@ public Query regexpQuery( "[regexp] queries cannot be executed when '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false." ); } - RegexpQuery query = new RegexpQuery(new Term(name(), new BytesRef(value)), syntaxFlags, matchFlags, maxDeterminizedStates) { + RegexpQuery query = new RegexpQuery(new Term(name, new BytesRef(value)), syntaxFlags, matchFlags, maxDeterminizedStates) { @Override protected TermsEnum getTermsEnum(Terms terms, AttributeSource atts) throws IOException { @@ -207,6 +209,7 @@ protected AcceptStatus accept(BytesRef term) throws IOException { */ @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -220,7 +223,7 @@ public Query fuzzyQuery( ); } return new FuzzyQuery( - new Term(name(), (BytesRef) value), + new Term(name, (BytesRef) value), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, @@ -248,6 +251,7 @@ protected AcceptStatus accept(BytesRef term) throws IOException { @Override public Query wildcardQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, @@ -259,13 +263,13 @@ public Query wildcardQuery( ); } - VersionFieldWildcardQuery query = new VersionFieldWildcardQuery(new Term(name(), value), caseInsensitive); + VersionFieldWildcardQuery query = new VersionFieldWildcardQuery(new Term(name, value), caseInsensitive); QueryParsers.setRewriteMethod(query, method); return query; } @Override - protected BytesRef indexedValueForSearch(Object value) { + protected BytesRef indexedValueForSearch(String name, Object value) { String valueAsString; if (value instanceof String) { valueAsString = (String) value; @@ -279,8 +283,8 @@ protected BytesRef indexedValueForSearch(Object value) { } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.KEYWORD, VersionStringDocValuesField::new); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + return new SortedSetOrdinalsIndexFieldData.Builder(name, CoreValuesSourceType.KEYWORD, VersionStringDocValuesField::new); } @Override @@ -292,23 +296,24 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { - checkNoFormat(format); - checkNoTimeZone(timeZone); + public DocValueFormat docValueFormat(String name, @Nullable String format, ZoneId timeZone) { + checkNoFormat(name, format); + checkNoTimeZone(name, timeZone); return VERSION_DOCVALUE; } @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, SearchExecutionContext context ) { - BytesRef lower = lowerTerm == null ? null : indexedValueForSearch(lowerTerm); - BytesRef upper = upperTerm == null ? null : indexedValueForSearch(upperTerm); - return new TermRangeQuery(name(), lower, upper, includeLower, includeUpper); + BytesRef lower = lowerTerm == null ? null : indexedValueForSearch(name, lowerTerm); + BytesRef upper = upperTerm == null ? null : indexedValueForSearch(name, upperTerm); + return new TermRangeQuery(name, lower, upper, includeLower, includeUpper); } } @@ -317,17 +322,17 @@ public Query rangeQuery( private VersionStringFieldMapper( String simpleName, FieldType fieldType, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + super(simpleName, mappedField, multiFields, copyTo); this.fieldType = fieldType; } @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), Lucene.KEYWORD_ANALYZER); + return Map.of(mappedField.name(), Lucene.KEYWORD_ANALYZER); } @Override @@ -356,8 +361,8 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio EncodedVersion encoding = encodeVersion(versionString); BytesRef encodedVersion = encoding.bytesRef; - context.doc().add(new Field(fieldType().name(), encodedVersion, fieldType)); - context.doc().add(new SortedSetDocValuesField(fieldType().name(), encodedVersion)); + context.doc().add(new Field(name(), encodedVersion, fieldType)); + context.doc().add(new SortedSetDocValuesField(name(), encodedVersion)); } @Override diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java index 2a3abff72bd1e..07fd03f0bd215 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTypeTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.versionfield; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import java.io.IOException; @@ -17,7 +17,7 @@ public class VersionStringFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new VersionStringFieldMapper.Builder("field").build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new VersionStringFieldMapper.Builder("field").build(MapperBuilderContext.ROOT).field(); assertEquals(List.of("value"), fetchSourceValue(mapper, "value")); assertEquals(List.of("42"), fetchSourceValue(mapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(mapper, true)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java index f0476f8ba7454..f2d2c6f191aee 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -41,7 +41,7 @@ public class CategorizeTextAggregator extends DeferableBucketAggregator { private final TermsAggregator.BucketCountThresholds bucketCountThresholds; private final SourceLookup sourceLookup; - private final MappedFieldType fieldType; + private final MappedField mappedField; private final CategorizationAnalyzer analyzer; private final String sourceFieldName; private ObjectArray categorizers; @@ -56,7 +56,7 @@ protected CategorizeTextAggregator( AggregationContext context, Aggregator parent, String sourceFieldName, - MappedFieldType fieldType, + MappedField mappedField, TermsAggregator.BucketCountThresholds bucketCountThresholds, int similarityThreshold, CategorizationAnalyzerConfig categorizationAnalyzerConfig, @@ -65,7 +65,7 @@ protected CategorizeTextAggregator( super(name, factories, context, parent, metadata); this.sourceLookup = context.lookup().source(); this.sourceFieldName = sourceFieldName; - this.fieldType = fieldType; + this.mappedField = mappedField; CategorizationAnalyzerConfig analyzerConfig = Optional.ofNullable(categorizationAnalyzerConfig) .orElse(CategorizationAnalyzerConfig.buildStandardCategorizationAnalyzer(List.of())); final String analyzerName = analyzerConfig.getAnalyzer(); @@ -159,13 +159,13 @@ private void collectFromSource(int doc, long owningBucketOrd, TokenListCategoriz sourceLookup.setSegmentAndDocument(aggCtx.getLeafReaderContext(), doc); Iterator itr = sourceLookup.extractRawValuesWithoutCaching(sourceFieldName).stream().map(obj -> { if (obj instanceof BytesRef) { - return fieldType.valueForDisplay(obj).toString(); + return mappedField.valueForDisplay(obj).toString(); } return (obj == null) ? null : obj.toString(); }).iterator(); while (itr.hasNext()) { String string = itr.next(); - try (TokenStream ts = analyzer.tokenStream(fieldType.name(), string)) { + try (TokenStream ts = analyzer.tokenStream(mappedField.name(), string)) { processTokenStream(owningBucketOrd, ts, string.length(), doc, categorizer); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorFactory.java index 2d67842001bd1..c95245bc83551 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorFactory.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.aggs.categorization; import org.elasticsearch.index.mapper.KeywordScriptFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -26,7 +26,7 @@ public class CategorizeTextAggregatorFactory extends AggregatorFactory { - private final MappedFieldType fieldType; + private final MappedField mappedField; private final int similarityThreshold; private final CategorizationAnalyzerConfig categorizationAnalyzerConfig; private final TermsAggregator.BucketCountThresholds bucketCountThresholds; @@ -43,7 +43,7 @@ public CategorizeTextAggregatorFactory( Map metadata ) throws IOException { super(name, context, parent, subFactoriesBuilder, metadata); - this.fieldType = context.getFieldType(fieldName); + this.mappedField = context.getMappedField(fieldName); this.similarityThreshold = similarityThreshold; this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; this.bucketCountThresholds = bucketCountThresholds; @@ -68,7 +68,7 @@ public InternalAggregation buildEmptyAggregation() { @Override protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { - if (fieldType == null) { + if (mappedField == null) { return createUnmapped(parent, metadata); } // Most of the text and keyword family of fields use a bespoke TextSearchInfo that doesn't match any @@ -78,16 +78,16 @@ protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound car // a new field type via a plugin that also creates a bespoke TextSearchInfo member - it will just get // converted to a string and then likely the analyzer won't create any tokens, so the categorizer // will see an empty token list.) - if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE - || (fieldType.getTextSearchInfo() == TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS - && fieldType instanceof KeywordScriptFieldType == false)) { + if (mappedField.getTextSearchInfo() == TextSearchInfo.NONE + || (mappedField.getTextSearchInfo() == TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS + && mappedField.type() instanceof KeywordScriptFieldType == false)) { throw new IllegalArgumentException( "categorize_text agg [" + name + "] only works on text and keyword fields. Cannot aggregate field type [" - + fieldType.name() + + mappedField.name() + "] via [" - + fieldType.getClass().getSimpleName() + + mappedField.getClass().getSimpleName() + "]" ); } @@ -106,8 +106,8 @@ protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound car factories, context, parent, - fieldType.name(), - fieldType, + mappedField.name(), + mappedField, bucketCountThresholds, similarityThreshold, categorizationAnalyzerConfig, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java index ebd7b4ce4da61..6b07ebb6654ef 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.SearchPlugin; @@ -72,7 +73,7 @@ public void testCategorizationWithoutSubAggs() throws Exception { equalTo("Failed to shutdown error org.aaaa.bbbb.Cccc line caused by foo exception") ); }, - new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), + new MappedField(TEXT_FIELD_NAME, new TextFieldMapper.TextFieldType()), longField(NUMERIC_FIELD_NAME) ); } @@ -104,7 +105,7 @@ public void testCategorizationWithSubAggs() throws Exception { assertThat(((Min) result.getBuckets().get(1).getAggregations().get("min")).value(), equalTo(0.0)); assertThat(((Avg) result.getBuckets().get(1).getAggregations().get("avg")).getValue(), equalTo(2.0)); }, - new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), + new MappedField(TEXT_FIELD_NAME, new TextFieldMapper.TextFieldType()), longField(NUMERIC_FIELD_NAME) ); } @@ -153,7 +154,7 @@ public void testCategorizationWithMultiBucketSubAggs() throws Exception { assertThat(((Avg) histo.getBuckets().get(0).getAggregations().get("avg")).getValue(), equalTo(0.0)); assertThat(((Avg) histo.getBuckets().get(2).getAggregations().get("avg")).getValue(), equalTo(4.0)); }, - new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), + new MappedField(TEXT_FIELD_NAME, new TextFieldMapper.TextFieldType()), longField(NUMERIC_FIELD_NAME) ); } @@ -218,7 +219,7 @@ public void testCategorizationAsSubAgg() throws Exception { assertThat(((Max) categorizationAggregation.getBuckets().get(1).getAggregations().get("max")).value(), equalTo(4.0)); assertThat(((Min) categorizationAggregation.getBuckets().get(1).getAggregations().get("min")).value(), equalTo(4.0)); assertThat(((Avg) categorizationAggregation.getBuckets().get(1).getAggregations().get("avg")).getValue(), equalTo(4.0)); - }, new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), longField(NUMERIC_FIELD_NAME)); + }, new MappedField(TEXT_FIELD_NAME, new TextFieldMapper.TextFieldType()), longField(NUMERIC_FIELD_NAME)); } public void testCategorizationWithSubAggsManyDocs() throws Exception { @@ -265,7 +266,7 @@ public void testCategorizationWithSubAggsManyDocs() throws Exception { assertThat(((Avg) histo.getBuckets().get(0).getAggregations().get("avg")).getValue(), equalTo(0.0)); assertThat(((Avg) histo.getBuckets().get(2).getAggregations().get("avg")).getValue(), equalTo(4.0)); }, - new TextFieldMapper.TextFieldType(TEXT_FIELD_NAME), + new MappedField(TEXT_FIELD_NAME, new TextFieldMapper.TextFieldType()), longField(NUMERIC_FIELD_NAME) ); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java index d299e4a1d01ad..0ff34eec6805a 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/FieldValueFetcher.java @@ -11,7 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.FormattedDocValues; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; @@ -35,15 +35,15 @@ class FieldValueFetcher { ); private final String name; - private final MappedFieldType fieldType; + private final MappedField mappedField; private final DocValueFormat format; private final IndexFieldData fieldData; private final Function valueFunc; - protected FieldValueFetcher(String name, MappedFieldType fieldType, IndexFieldData fieldData, Function valueFunc) { + protected FieldValueFetcher(String name, MappedField mappedField, IndexFieldData fieldData, Function valueFunc) { this.name = name; - this.fieldType = fieldType; - this.format = fieldType.docValueFormat(null, null); + this.mappedField = mappedField; + this.format = mappedField.docValueFormat(null, null); this.fieldData = fieldData; this.valueFunc = valueFunc; } @@ -52,8 +52,8 @@ public String name() { return name; } - public MappedFieldType fieldType() { - return fieldType; + public MappedField mappedField() { + return mappedField; } public DocValueFormat format() { @@ -105,12 +105,12 @@ Object format(Object value) { static List build(SearchExecutionContext context, String[] fields) { List fetchers = new ArrayList<>(fields.length); for (String field : fields) { - MappedFieldType fieldType = context.getFieldType(field); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(field); + if (mappedField == null) { throw new IllegalArgumentException("Unknown field: [" + field + "]"); } - IndexFieldData fieldData = context.getForField(fieldType); - fetchers.add(new FieldValueFetcher(field, fieldType, fieldData, getValidator(field))); + IndexFieldData fieldData = context.getForField(mappedField); + fetchers.add(new FieldValueFetcher(field, mappedField, fieldData, getValidator(field))); } return Collections.unmodifiableList(fetchers); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducer.java index 80d2de7d6401c..8e443b46b58be 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducer.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.rollup.v2; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; @@ -270,13 +270,13 @@ public Object value() { static Map buildMetricFieldProducers(SearchExecutionContext context, String[] metricFields) { final Map fields = new LinkedHashMap<>(); for (String field : metricFields) { - MappedFieldType fieldType = context.getFieldType(field); - assert fieldType.getMetricType() != null; + MappedField mappedField = context.getMappedField(field); + assert mappedField.getMetricType() != null; - MetricFieldProducer producer = switch (fieldType.getMetricType()) { + MetricFieldProducer producer = switch (mappedField.getMetricType()) { case gauge -> new GaugeMetricFieldProducer(field); case counter -> new CounterMetricFieldProducer(field); - default -> throw new IllegalArgumentException("Unsupported metric type [" + fieldType.getMetricType() + "]"); + default -> throw new IllegalArgumentException("Unsupported metric type [" + mappedField.getMetricType() + "]"); }; fields.put(field, producer); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java index 68e09e470cdce..0ab5e09aa5171 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/RollupShardIndexer.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.fielddata.FormattedDocValues; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DocCountFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; @@ -77,7 +77,7 @@ class RollupShardIndexer { private final Engine.Searcher searcher; private final SearchExecutionContext searchExecutionContext; - private final MappedFieldType timestampField; + private final MappedField timestampField; private final DocValueFormat timestampFormat; private final Rounding.Prepared rounding; @@ -116,7 +116,7 @@ class RollupShardIndexer { null, Collections.emptyMap() ); - this.timestampField = searchExecutionContext.getFieldType(DataStreamTimestampFieldMapper.DEFAULT_PATH); + this.timestampField = searchExecutionContext.getMappedField(DataStreamTimestampFieldMapper.DEFAULT_PATH); this.timestampFormat = timestampField.docValueFormat(null, null); this.rounding = config.createRounding(); this.metricFieldFetchers = FieldValueFetcher.build(searchExecutionContext, metricFields); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index ef6e4d030ef61..b8c5a8ec3afcf 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordField; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -429,7 +429,7 @@ public void testUnsupported() throws IOException { GeoBoundsAggregationBuilder geo1 = new GeoBoundsAggregationBuilder("foo").field("bar"); GeoBoundsAggregationBuilder geo2 = new GeoBoundsAggregationBuilder("foo").field("bar"); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField("field", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); List responses = doQueries(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -439,7 +439,7 @@ public void testUnsupported() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 2))); iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - }, geo2, new MappedFieldType[] { fieldType }, new MappedFieldType[] { fieldType }); + }, geo2, new MappedField[] { mappedField }, new MappedField[] { mappedField }); Exception e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0)); assertThat( @@ -452,7 +452,7 @@ public void testUnsupported() throws IOException { public void testUnsupportedMultiBucket() throws IOException { - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("foo"); + MappedField mappedField = new MappedField("foo", new KeywordFieldMapper.KeywordFieldType()); QueryBuilder filter = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("field", "foo")) .should(QueryBuilders.termQuery("field", "bar")); @@ -466,7 +466,7 @@ public void testUnsupportedMultiBucket() throws IOException { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, builder, new MappedFieldType[] { fieldType }, new MappedFieldType[] { fieldType }); + }, builder, new MappedField[] { mappedField }, new MappedField[] { mappedField }); Exception e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0)); assertThat( @@ -486,7 +486,7 @@ public void testMismatch() throws IOException { FilterAggregationBuilder filterBuilder = new FilterAggregationBuilder("filter", new TermQueryBuilder("foo", "bar")); filterBuilder.subAggregation(histoBuilder); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); + MappedField mappedField = new MappedField("number", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); List responses = doQueries(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); @@ -496,7 +496,7 @@ public void testMismatch() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 2))); iw.addDocument(singleton(new NumericDocValuesField("number", 3))); - }, filterBuilder, new MappedFieldType[] { fieldType }, new MappedFieldType[] { fieldType }); + }, filterBuilder, new MappedField[] { mappedField }, new MappedField[] { mappedField }); // TODO SearchResponse.Clusters is not public, using null for now. Should fix upstream. MultiSearchResponse.Item unrolledItem = new MultiSearchResponse.Item( @@ -564,12 +564,12 @@ public void testDateHisto() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) ); - DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); - DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); + MappedField nrFTtimestamp = new MappedField(nonRollupHisto.field(), new DateFieldMapper.DateFieldType()); + MappedField rFTtimestamp = new MappedField(rollupHisto.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -580,7 +580,7 @@ public void testDateHisto() throws IOException { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { nrFTtimestamp }, new MappedField[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -601,12 +601,12 @@ public void testDateHistoWithGap() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) ); - DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); - DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); + MappedField nrFTtimestamp = new MappedField(nonRollupHisto.field(), new DateFieldMapper.DateFieldType()); + MappedField rFTtimestamp = new MappedField(rollupHisto.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -617,7 +617,7 @@ public void testDateHistoWithGap() throws IOException { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); iw.addDocument(timestampedValueRollupDoc(400, 3)); - }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { nrFTtimestamp }, new MappedField[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); @@ -680,16 +680,16 @@ public void testNonMatchingPartition() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) ); - DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); - DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); + MappedField nrFTtimestamp = new MappedField(nonRollupHisto.field(), new DateFieldMapper.DateFieldType()); + MappedField rFTtimestamp = new MappedField(rollupHisto.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); - KeywordFieldMapper.KeywordFieldType nrKeywordFT = new KeywordFieldMapper.KeywordFieldType("partition"); - KeywordFieldMapper.KeywordFieldType rKeywordFT = new KeywordFieldMapper.KeywordFieldType("partition"); + MappedField nrKeywordFT = new MappedField("partition", new KeywordFieldMapper.KeywordFieldType()); + MappedField rKeywordFT = new MappedField("partition", new KeywordFieldMapper.KeywordFieldType()); // Note: term query for "a" List results = new ArrayList<>(2); @@ -718,7 +718,7 @@ public void testNonMatchingPartition() throws IOException { doc.add(new TextField("partition", "b", Field.Store.NO)); iw.addDocument(doc); - }, nonRollupHisto, new MappedFieldType[] { nrFTtimestamp, nrKeywordFT })); + }, nonRollupHisto, new MappedField[] { nrFTtimestamp, nrKeywordFT })); // Note: term query for "a" results.add(doQuery(new TermQuery(new Term("partition.terms." + RollupField.VALUE, "a")), iw -> { @@ -753,7 +753,7 @@ public void testNonMatchingPartition() throws IOException { doc.add(new TextField("partition.terms." + RollupField.VALUE, "b", Field.Store.NO)); doc.add(new SortedNumericDocValuesField("partition.terms." + RollupField.COUNT_FIELD, 1)); iw.addDocument(doc); - }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue, rKeywordFT })); + }, rollupHisto, new MappedField[] { rFTtimestamp, rFTvalue, rKeywordFT })); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(results.get(1), null, null, 0); assertThat(((InternalDateHistogram) unrolled).getBuckets().size(), equalTo(2)); @@ -792,12 +792,12 @@ public void testDateHistoOverlappingAggTrees() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) ); - DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); - DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); + MappedField nrFTtimestamp = new MappedField(nonRollupHisto.field(), new DateFieldMapper.DateFieldType()); + MappedField rFTtimestamp = new MappedField(rollupHisto.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -808,7 +808,7 @@ public void testDateHistoOverlappingAggTrees() throws IOException { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { nrFTtimestamp }, new MappedField[] { rFTtimestamp, rFTvalue }); List currentTree = doQueries(new MatchAllDocsQuery(), iw -> { iw.addDocument(timestampedValueDoc(100, 1)); @@ -816,7 +816,7 @@ public void testDateHistoOverlappingAggTrees() throws IOException { }, nonRollupHisto, iw -> { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); - }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { nrFTtimestamp }, new MappedField[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, currentTree.get(1), 0); @@ -841,12 +841,12 @@ public void testDateHistoOverlappingMergeRealIntoZero() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) ); - DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); - DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); + MappedField nrFTtimestamp = new MappedField(nonRollupHisto.field(), new DateFieldMapper.DateFieldType()); + MappedField rFTtimestamp = new MappedField(rollupHisto.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -857,7 +857,7 @@ public void testDateHistoOverlappingMergeRealIntoZero() throws IOException { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { nrFTtimestamp }, new MappedField[] { rFTtimestamp, rFTvalue }); InternalAggregation currentTree = doQuery(new MatchAllDocsQuery(), iw -> { Document doc = new Document(); @@ -872,7 +872,7 @@ public void testDateHistoOverlappingMergeRealIntoZero() throws IOException { doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); iw.addDocument(doc2); - }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { rFTtimestamp, rFTvalue }); // In this test we merge real buckets into zero count buckets (e.g. empty list of buckets after unrolling) InternalAggregation unrolledCurrentTree = RollupResponseTranslator.unrollAgg(currentTree, null, null, 0); @@ -900,19 +900,19 @@ public void testDateHistoOverlappingMergeZeroIntoReal() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) ); - DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); - DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); + MappedField nrFTtimestamp = new MappedField(nonRollupHisto.field(), new DateFieldMapper.DateFieldType()); + MappedField rFTtimestamp = new MappedField(rollupHisto.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); InternalAggregation currentTree = doQuery(new MatchAllDocsQuery(), iw -> { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { rFTtimestamp, rFTvalue }); InternalAggregation responses = doQuery(new MatchAllDocsQuery(), iw -> { Document doc = new Document(); @@ -927,7 +927,7 @@ public void testDateHistoOverlappingMergeZeroIntoReal() throws IOException { doc2.add(new SortedNumericDocValuesField("timestamp.date_histogram." + RollupField.INTERVAL, 1)); iw.addDocument(doc2); - }, rollupHisto, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { rFTtimestamp, rFTvalue }); // In this test, we merge zero_count buckets into existing buckets to ensure the metrics remain InternalAggregation unrolledCurrentTree = RollupResponseTranslator.unrollAgg(currentTree, null, null, 0); @@ -942,8 +942,11 @@ public void testAvg() throws IOException { SumAggregationBuilder rollup = new SumAggregationBuilder("avg").field("foo.avg." + RollupField.VALUE); - MappedFieldType nrFTvalue = new NumberFieldMapper.NumberFieldType("foo", NumberFieldMapper.NumberType.LONG); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType("foo.avg." + RollupField.VALUE, NumberFieldMapper.NumberType.LONG); + MappedField nrFTvalue = new MappedField("foo", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField rFTvalue = new MappedField( + "foo.avg." + RollupField.VALUE, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { iw.addDocument(timestampedValueDoc(100, 1)); @@ -953,8 +956,8 @@ public void testAvg() throws IOException { nonRollup, iw -> { iw.addDocument(timestampedValueRollupDoc(100, 6)); }, rollup, - new MappedFieldType[] { nrFTvalue }, - new MappedFieldType[] { rFTvalue } + new MappedField[] { nrFTvalue }, + new MappedField[] { rFTvalue } ); // NOTE: we manually set the count to 3 here, which is somewhat cheating. Will have to rely on @@ -990,8 +993,8 @@ public void testMetric() throws IOException { rollupValue = 0; } - MappedFieldType nrFTvalue = new NumberFieldMapper.NumberFieldType("foo", NumberFieldMapper.NumberType.LONG); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType(fieldName, NumberFieldMapper.NumberType.LONG); + MappedField nrFTvalue = new MappedField("foo", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField rFTvalue = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); List responses = doQueries(new MatchAllDocsQuery(), iw -> { iw.addDocument(timestampedValueDoc(100, 1)); @@ -1001,8 +1004,8 @@ public void testMetric() throws IOException { nonRollup, iw -> { iw.addDocument(timestampedValueRollupDoc(100, rollupValue)); }, rollup, - new MappedFieldType[] { nrFTvalue }, - new MappedFieldType[] { rFTvalue } + new MappedField[] { nrFTvalue }, + new MappedField[] { rFTvalue } ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 1); @@ -1015,9 +1018,9 @@ public void testUnsupportedMetric() throws IOException { String fieldName = "foo.max." + RollupField.VALUE; AggregationBuilder rollup = new CardinalityAggregationBuilder("test_metric").userValueTypeHint(ValueType.LONG).field(fieldName); - MappedFieldType nrFTvalue = new NumberFieldMapper.NumberFieldType("foo", NumberFieldMapper.NumberType.LONG); + MappedField nrFTvalue = new MappedField("foo", new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType(fieldName, NumberFieldMapper.NumberType.LONG); + MappedField rFTvalue = new MappedField(fieldName, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); List responses = doQueries(new MatchAllDocsQuery(), iw -> { iw.addDocument(timestampedValueDoc(100, 1)); @@ -1027,8 +1030,8 @@ public void testUnsupportedMetric() throws IOException { nonRollup, iw -> { iw.addDocument(timestampedValueRollupDoc(100, 3)); }, rollup, - new MappedFieldType[] { nrFTvalue }, - new MappedFieldType[] { rFTvalue } + new MappedField[] { nrFTvalue }, + new MappedField[] { rFTvalue } ); RuntimeException e = expectThrows( @@ -1053,13 +1056,13 @@ public void testStringTerms() throws IOException { new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD).field("stringfield.terms." + RollupField.COUNT_FIELD) ); - KeywordFieldMapper.KeywordFieldType nrFTterm = new KeywordFieldMapper.KeywordFieldType(nonRollupTerms.field()); + MappedField nrFTterm = new MappedField(nonRollupTerms.field(), new KeywordFieldMapper.KeywordFieldType()); - KeywordFieldMapper.KeywordFieldType rFTterm = new KeywordFieldMapper.KeywordFieldType(rollupTerms.field()); + MappedField rFTterm = new MappedField(rollupTerms.field(), new KeywordFieldMapper.KeywordFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "stringfield.terms." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -1070,8 +1073,8 @@ public void testStringTerms() throws IOException { nonRollupTerms, iw -> { iw.addDocument(stringValueRollupDoc("abc", 3)); }, rollupTerms, - new MappedFieldType[] { nrFTterm }, - new MappedFieldType[] { rFTterm, rFTvalue } + new MappedField[] { nrFTterm }, + new MappedField[] { rFTterm, rFTvalue } ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); @@ -1089,12 +1092,13 @@ public void testStringTermsNullValue() throws IOException { new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD).field("stringfield.terms." + RollupField.COUNT_FIELD) ); - KeywordFieldMapper.KeywordFieldType nrFTterm = new KeywordFieldMapper.KeywordFieldType(nonRollupTerms.field()); - KeywordFieldMapper.KeywordFieldType rFTterm = new KeywordFieldMapper.KeywordFieldType(rollupTerms.field()); + MappedField nrFTterm = new MappedField(nonRollupTerms.field(), new KeywordFieldMapper.KeywordFieldType()); + + MappedField rFTterm = new MappedField(rollupTerms.field(), new KeywordFieldMapper.KeywordFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "stringfield.terms." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -1110,8 +1114,8 @@ public void testStringTermsNullValue() throws IOException { nonRollupTerms, iw -> { iw.addDocument(stringValueRollupDoc("abc", 3)); }, rollupTerms, - new MappedFieldType[] { nrFTterm }, - new MappedFieldType[] { rFTterm, rFTvalue } + new MappedField[] { nrFTterm }, + new MappedField[] { rFTterm, rFTvalue } ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); @@ -1130,12 +1134,19 @@ public void testLongTerms() throws IOException { new SumAggregationBuilder("terms." + RollupField.COUNT_FIELD).field("longfield.terms." + RollupField.COUNT_FIELD) ); - MappedFieldType nrFTterm = new NumberFieldMapper.NumberFieldType(nonRollupTerms.field(), NumberFieldMapper.NumberType.LONG); - MappedFieldType rFTterm = new NumberFieldMapper.NumberFieldType(rollupTerms.field(), NumberFieldMapper.NumberType.LONG); + MappedField nrFTterm = new MappedField( + nonRollupTerms.field(), + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTterm = new MappedField( + rollupTerms.field(), + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + + MappedField rFTvalue = new MappedField( "longfield.terms." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -1146,8 +1157,8 @@ public void testLongTerms() throws IOException { nonRollupTerms, iw -> { iw.addDocument(longValueRollupDoc(19L, 3)); }, rollupTerms, - new MappedFieldType[] { nrFTterm }, - new MappedFieldType[] { rFTterm, rFTvalue } + new MappedField[] { nrFTterm }, + new MappedField[] { rFTterm, rFTvalue } ); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); @@ -1164,12 +1175,16 @@ public void testHisto() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("bar.histogram." + RollupField.COUNT_FIELD) ); - MappedFieldType nrFTbar = new NumberFieldMapper.NumberFieldType(nonRollupHisto.field(), NumberFieldMapper.NumberType.LONG); - MappedFieldType rFTbar = new NumberFieldMapper.NumberFieldType(rollupHisto.field(), NumberFieldMapper.NumberType.LONG); + MappedField nrFTbar = new MappedField( + nonRollupHisto.field(), + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); + + MappedField rFTbar = new MappedField(rollupHisto.field(), new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "bar.histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -1180,7 +1195,7 @@ public void testHisto() throws IOException { iw.addDocument(timestampedValueRollupDoc(100, 1)); iw.addDocument(timestampedValueRollupDoc(200, 2)); iw.addDocument(timestampedValueRollupDoc(300, 3)); - }, rollupHisto, new MappedFieldType[] { nrFTbar }, new MappedFieldType[] { rFTbar, rFTvalue }); + }, rollupHisto, new MappedField[] { nrFTbar }, new MappedField[] { rFTbar, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); @@ -1199,13 +1214,13 @@ public void testOverlappingBuckets() throws IOException { new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD).field("timestamp.date_histogram." + RollupField.COUNT_FIELD) ); - DateFieldMapper.DateFieldType nrFTtimestamp = new DateFieldMapper.DateFieldType(nonRollupHisto.field()); + MappedField nrFTtimestamp = new MappedField(nonRollupHisto.field(), new DateFieldMapper.DateFieldType()); - DateFieldMapper.DateFieldType rFTtimestamp = new DateFieldMapper.DateFieldType(rollupHisto.field()); + MappedField rFTtimestamp = new MappedField(rollupHisto.field(), new DateFieldMapper.DateFieldType()); - MappedFieldType rFTvalue = new NumberFieldMapper.NumberFieldType( + MappedField rFTvalue = new MappedField( "timestamp.date_histogram." + RollupField.COUNT_FIELD, - NumberFieldMapper.NumberType.LONG + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) ); List responses = doQueries(new MatchAllDocsQuery(), iw -> { @@ -1217,7 +1232,7 @@ public void testOverlappingBuckets() throws IOException { iw.addDocument(timestampedValueRollupDoc(200, 200)); iw.addDocument(timestampedValueRollupDoc(300, 300)); iw.addDocument(timestampedValueRollupDoc(400, 4)); // <-- Only one that should show up in rollup - }, rollupHisto, new MappedFieldType[] { nrFTtimestamp }, new MappedFieldType[] { rFTtimestamp, rFTvalue }); + }, rollupHisto, new MappedField[] { nrFTtimestamp }, new MappedField[] { rFTtimestamp, rFTvalue }); InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), responses.get(0), null, 0); assertThat(((InternalDateHistogram) unrolled).getBuckets().size(), equalTo(1)); @@ -1287,8 +1302,8 @@ private List doQueries( AggregationBuilder nonRollupAggBuilder, CheckedConsumer buildRollupIndex, AggregationBuilder rollupAggBuilder, - MappedFieldType[] nonRollupFieldType, - MappedFieldType[] rollupFieldType + MappedField[] nonRollupFieldType, + MappedField[] rollupFieldType ) throws IOException { List results = new ArrayList<>(2); @@ -1302,7 +1317,7 @@ private InternalAggregation doQuery( Query query, CheckedConsumer buildIndex, AggregationBuilder aggBuilder, - MappedFieldType[] fieldType + MappedField[] mappedFields ) throws IOException { Directory directory = newDirectory(); @@ -1313,7 +1328,7 @@ private InternalAggregation doQuery( IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - Aggregator aggregator = createAggregator(aggBuilder, indexSearcher, fieldType); + Aggregator aggregator = createAggregator(aggBuilder, indexSearcher, mappedFields); try { aggregator.preCollection(); indexSearcher.search(query, aggregator); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 71c6a3cc991ba..ab33e1e715c04 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -94,8 +94,8 @@ public void testMissingFields() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); + MappedField timestampFieldType = new MappedField(timestampField, new DateFieldMapper.DateFieldType()); + MappedField valueFieldType = new MappedField(valueField, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); // Setup the composite agg DateHistogramGroupConfig dateHistoGroupConfig = new DateHistogramGroupConfig.CalendarInterval( @@ -154,8 +154,8 @@ public void testCorrectFields() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); + MappedField timestampFieldType = new MappedField(timestampField, new DateFieldMapper.DateFieldType()); + MappedField valueFieldType = new MappedField(valueField, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); // Setup the composite agg // TODO swap this over to DateHistoConfig.Builder once DateInterval is in @@ -213,7 +213,7 @@ public void testNumericTerms() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); + MappedField valueFieldType = new MappedField(valueField, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); // Setup the composite agg TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("the_terms." + TermsAggregationBuilder.NAME).field(valueField); @@ -270,8 +270,8 @@ public void testEmptyCounts() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); + MappedField timestampFieldType = new MappedField(timestampField, new DateFieldMapper.DateFieldType()); + MappedField valueFieldType = new MappedField(valueField, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); // Setup the composite agg DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( @@ -461,8 +461,11 @@ public void testMissingBuckets() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); - MappedFieldType metricFieldType = new NumberFieldMapper.NumberFieldType(metricField, NumberFieldMapper.NumberType.LONG); + MappedField valueFieldType = new MappedField(valueField, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); + MappedField metricFieldType = new MappedField( + metricField, + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG) + ); // Setup the composite agg TermsGroupConfig termsGroupConfig = new TermsGroupConfig(valueField); @@ -529,8 +532,8 @@ public void testTimezone() throws IOException { IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newIndexSearcher(indexReader); - DateFieldMapper.DateFieldType timestampFieldType = new DateFieldMapper.DateFieldType(timestampField); - MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(valueField, NumberFieldMapper.NumberType.LONG); + MappedField timestampFieldType = new MappedField(timestampField, new DateFieldMapper.DateFieldType()); + MappedField valueFieldType = new MappedField(valueField, new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); // Setup the composite agg DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder( diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 8e2714f8bb9e1..d4be29f36eae3 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; @@ -699,7 +699,7 @@ private void executeTestCase( long now, Consumer> rollupConsumer ) throws Exception { - Map fieldTypeLookup = createFieldTypes(config); + Map fieldTypeLookup = createMappedFields(config); Directory dir = index(docs, fieldTypeLookup); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = new IndexSearcher(reader); @@ -712,7 +712,7 @@ private void executeTestCase( threadPool, job, searcher, - fieldTypeLookup.values().toArray(new MappedFieldType[0]), + fieldTypeLookup.values().toArray(new MappedField[0]), fieldTypeLookup.get(dateHistoField) ); rollupConsumer.accept(action.triggerAndWaitForCompletion(now)); @@ -722,56 +722,54 @@ private void executeTestCase( } /** - * Creates {@link MappedFieldType} from the provided job. + * Creates {@link MappedField} from the provided job. * For simplicity all numbers are considered as longs. * * @return A map containing all created field types accessible by their names */ - private Map createFieldTypes(RollupJobConfig job) { - Map fieldTypes = new HashMap<>(); + private Map createMappedFields(RollupJobConfig job) { + Map mappedFields = new HashMap<>(); DateFormatter formatter = DateFormatter.forPattern(randomDateFormatterPattern()).withLocale(Locale.ROOT); - MappedFieldType fieldType = new DateFieldMapper.DateFieldType(job.getGroupConfig().getDateHistogram().getField(), formatter); - fieldTypes.put(fieldType.name(), fieldType); + MappedField mappedField = new MappedField( + job.getGroupConfig().getDateHistogram().getField(), + new DateFieldMapper.DateFieldType(formatter) + ); + mappedFields.put(mappedField.name(), mappedField); if (job.getGroupConfig().getHistogram() != null) { for (String field : job.getGroupConfig().getHistogram().getFields()) { - MappedFieldType ft = new NumberFieldMapper.Builder( - field, - NumberType.LONG, - ScriptCompiler.NONE, - false, - false, - Version.CURRENT - ).build(MapperBuilderContext.ROOT).fieldType(); - fieldTypes.put(ft.name(), ft); + MappedField ft = new NumberFieldMapper.Builder(field, NumberType.LONG, ScriptCompiler.NONE, false, false, Version.CURRENT) + .build(MapperBuilderContext.ROOT) + .field(); + mappedFields.put(ft.name(), ft); } } if (job.getGroupConfig().getTerms() != null) { for (String field : job.getGroupConfig().getTerms().getFields()) { - MappedFieldType ft = new KeywordFieldMapper.Builder(field, Version.CURRENT).build(MapperBuilderContext.ROOT).fieldType(); - fieldTypes.put(ft.name(), ft); + MappedField ft = new KeywordFieldMapper.Builder(field, Version.CURRENT).build(MapperBuilderContext.ROOT).field(); + mappedFields.put(ft.name(), ft); } } if (job.getMetricsConfig() != null) { for (MetricConfig metric : job.getMetricsConfig()) { - MappedFieldType ft = new NumberFieldMapper.Builder( + MappedField ft = new NumberFieldMapper.Builder( metric.getField(), NumberType.LONG, ScriptCompiler.NONE, false, false, Version.CURRENT - ).build(MapperBuilderContext.ROOT).fieldType(); - fieldTypes.put(ft.name(), ft); + ).build(MapperBuilderContext.ROOT).field(); + mappedFields.put(ft.name(), ft); } } - return fieldTypes; + return mappedFields; } @SuppressWarnings("unchecked") - private Directory index(List> docs, Map fieldTypeLookup) throws IOException { + private Directory index(List> docs, Map mappedFieldLookup) throws IOException { Directory directory = LuceneTestCase.newDirectory(); IndexWriterConfig config = LuceneTestCase.newIndexWriterConfig(LuceneTestCase.random(), new MockAnalyzer(LuceneTestCase.random())); try (RandomIndexWriter indexWriter = new RandomIndexWriter(LuceneTestCase.random(), directory, config)) { @@ -781,7 +779,7 @@ private Directory index(List> docs, Map entry : doc.entrySet()) { final String name = entry.getKey(); final Object value = entry.getValue(); - MappedFieldType ft = fieldTypeLookup.get(name); + MappedField mappedField = mappedFieldLookup.get(name); Collection values; if (value instanceof Collection) { values = (Collection) value; @@ -789,15 +787,16 @@ private Directory index(List> docs, Map> docs, Map documents = new ArrayList<>(); private final CountDownLatch latch = new CountDownLatch(1); private Exception exc; @@ -820,12 +819,12 @@ class SyncRollupIndexer extends RollupIndexer { ThreadPool threadPool, RollupJob job, IndexSearcher searcher, - MappedFieldType[] fieldTypes, - MappedFieldType timestampField + MappedField[] mappedFields, + MappedField timestampField ) { super(threadPool, job, new AtomicReference<>(IndexerState.STARTED), null); this.searcher = searcher; - this.fieldTypes = fieldTypes; + this.mappedFields = mappedFields; this.timestampField = timestampField; } @@ -880,7 +879,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener CompositeAggregation result = null; try { - result = searchAndReduce(searcher, query, aggBuilder, fieldTypes); + result = searchAndReduce(searcher, query, aggBuilder, mappedFields); } catch (IOException e) { listener.onFailure(e); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducerTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducerTests.java index e0f97315b7082..867c77487aa28 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducerTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/v2/MetricFieldProducerTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.rollup.v2; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.index.query.SearchExecutionContext; @@ -152,34 +152,38 @@ public void testGaugeMetricFieldProducer() { } public void testBuildMetricProducers() { - final Map provideMappedFieldType = Map.of( + final Map provideMappedFieldType = Map.of( "gauge_field", - new NumberFieldMapper.NumberFieldType( + new MappedField( "gauge_field", - NumberFieldMapper.NumberType.DOUBLE, - true, - true, - true, - true, - null, - emptyMap(), - null, - false, - TimeSeriesParams.MetricType.gauge + new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.DOUBLE, + true, + true, + true, + true, + null, + emptyMap(), + null, + false, + TimeSeriesParams.MetricType.gauge + ) ), "counter_field", - new NumberFieldMapper.NumberFieldType( + new MappedField( "counter_field", - NumberFieldMapper.NumberType.DOUBLE, - true, - true, - true, - true, - null, - emptyMap(), - null, - false, - TimeSeriesParams.MetricType.counter + new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.DOUBLE, + true, + true, + true, + true, + null, + emptyMap(), + null, + false, + TimeSeriesParams.MetricType.counter + ) ) ); @@ -206,7 +210,7 @@ public void testBuildMetricProducers() { emptyMap() ) { @Override - public MappedFieldType getFieldType(String name) { + public MappedField getMappedField(String name) { return provideMappedFieldType.get(name); } }; diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java index d6cc37bcf13a1..f50ac439e1829 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilder.java @@ -21,7 +21,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -347,7 +347,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { - MappedFieldType idField = context.getFieldType(IdFieldMapper.NAME); + MappedField idField = context.getMappedField(IdFieldMapper.NAME); if (idField == null) { return new MatchNoDocsQuery("No mappings"); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index 4368efb280238..b5a35b3c452f2 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.indices.IndicesService; @@ -142,8 +143,8 @@ public void testSearchableSnapshotShardsAreSkippedWithoutQueryingAnyNodeWhenThey final IndexMetadata indexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange); assertThat(indexMetadata.getTimestampRange(), equalTo(IndexLongFieldRange.NO_SHARDS)); - DateFieldMapper.DateFieldType timestampFieldType = indicesService.getTimestampFieldType(indexMetadata.getIndex()); - assertThat(timestampFieldType, nullValue()); + MappedField timestampField = indicesService.getTimestampField(indexMetadata.getIndex()); + assertThat(timestampField, nullValue()); final boolean includeIndexCoveringSearchRangeInSearchRequest = randomBoolean(); List indicesToSearch = new ArrayList<>(); @@ -182,9 +183,9 @@ public void testSearchableSnapshotShardsAreSkippedWithoutQueryingAnyNodeWhenThey final IndexMetadata updatedIndexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange); final IndexLongFieldRange updatedTimestampMillisRange = updatedIndexMetadata.getTimestampRange(); - final DateFieldMapper.DateFieldType dateFieldType = indicesService.getTimestampFieldType(updatedIndexMetadata.getIndex()); - assertThat(dateFieldType, notNullValue()); - final DateFieldMapper.Resolution resolution = dateFieldType.resolution(); + final MappedField dateField = indicesService.getTimestampField(updatedIndexMetadata.getIndex()); + assertThat(dateField, notNullValue()); + final DateFieldMapper.Resolution resolution = ((DateFieldMapper.DateFieldType) dateField.type()).resolution(); assertThat(updatedTimestampMillisRange.isComplete(), equalTo(true)); if (indexDataWithTimestamp) { assertThat(updatedTimestampMillisRange, not(sameInstance(IndexLongFieldRange.EMPTY))); @@ -277,8 +278,8 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() final IndexMetadata indexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange); assertThat(indexMetadata.getTimestampRange(), equalTo(IndexLongFieldRange.NO_SHARDS)); - DateFieldMapper.DateFieldType timestampFieldType = indicesService.getTimestampFieldType(indexMetadata.getIndex()); - assertThat(timestampFieldType, nullValue()); + MappedField timestampField = indicesService.getTimestampField(indexMetadata.getIndex()); + assertThat(timestampField, nullValue()); SearchRequest request = new SearchRequest().indices(indexOutsideSearchRange, searchableSnapshotIndexOutsideSearchRange) .source( @@ -307,9 +308,9 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() final IndexMetadata updatedIndexMetadata = getIndexMetadata(searchableSnapshotIndexOutsideSearchRange); final IndexLongFieldRange updatedTimestampMillisRange = updatedIndexMetadata.getTimestampRange(); - final DateFieldMapper.DateFieldType dateFieldType = indicesService.getTimestampFieldType(updatedIndexMetadata.getIndex()); - assertThat(dateFieldType, notNullValue()); - final DateFieldMapper.Resolution resolution = dateFieldType.resolution(); + final MappedField dateField = indicesService.getTimestampField(updatedIndexMetadata.getIndex()); + assertThat(dateField, notNullValue()); + final DateFieldMapper.Resolution resolution = ((DateFieldMapper.DateFieldType) dateField.type()).resolution(); assertThat(updatedTimestampMillisRange.isComplete(), equalTo(true)); assertThat(updatedTimestampMillisRange, not(sameInstance(IndexLongFieldRange.EMPTY))); assertThat(updatedTimestampMillisRange.getMin(), greaterThanOrEqualTo(resolution.convert(Instant.parse("2020-11-26T00:00:00Z")))); @@ -388,8 +389,8 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo final IndexMetadata indexMetadata = getIndexMetadata(searchableSnapshotIndexWithinSearchRange); assertThat(indexMetadata.getTimestampRange(), equalTo(IndexLongFieldRange.NO_SHARDS)); - DateFieldMapper.DateFieldType timestampFieldType = indicesService.getTimestampFieldType(indexMetadata.getIndex()); - assertThat(timestampFieldType, nullValue()); + MappedField timestampField = indicesService.getTimestampField(indexMetadata.getIndex()); + assertThat(timestampField, nullValue()); SearchRequest request = new SearchRequest().indices(searchableSnapshotIndexWithinSearchRange) .source( @@ -411,9 +412,9 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo final IndexMetadata updatedIndexMetadata = getIndexMetadata(searchableSnapshotIndexWithinSearchRange); final IndexLongFieldRange updatedTimestampMillisRange = updatedIndexMetadata.getTimestampRange(); - final DateFieldMapper.DateFieldType dateFieldType = indicesService.getTimestampFieldType(updatedIndexMetadata.getIndex()); - assertThat(dateFieldType, notNullValue()); - final DateFieldMapper.Resolution resolution = dateFieldType.resolution(); + final MappedField dateField = indicesService.getTimestampField(updatedIndexMetadata.getIndex()); + assertThat(dateField, notNullValue()); + final DateFieldMapper.Resolution resolution = ((DateFieldMapper.DateFieldType) dateField.type()).resolution(); assertThat(updatedTimestampMillisRange.isComplete(), equalTo(true)); assertThat(updatedTimestampMillisRange, not(sameInstance(IndexLongFieldRange.EMPTY))); assertThat(updatedTimestampMillisRange.getMin(), greaterThanOrEqualTo(resolution.convert(Instant.parse("2020-11-28T00:00:00Z")))); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 4789170e21cb4..5d29b9ce60f59 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -32,7 +32,7 @@ import org.elasticsearch.index.mapper.GeoShapeIndexer; import org.elasticsearch.index.mapper.GeoShapeParser; import org.elasticsearch.index.mapper.GeoShapeQueryable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; @@ -142,7 +142,6 @@ public GeoShapeWithDocValuesFieldMapper build(MapperBuilderContext context) { ); GeoShapeParser parser = new GeoShapeParser(geometryParser, orientation.get().value()); GeoShapeWithDocValuesFieldType ft = new GeoShapeWithDocValuesFieldType( - context.buildFullName(name), indexed.get(), hasDocValues.get(), orientation.get().value(), @@ -150,12 +149,13 @@ public GeoShapeWithDocValuesFieldMapper build(MapperBuilderContext context) { geoFormatterFactory, meta.get() ); + String fullName = context.buildFullName(name); return new GeoShapeWithDocValuesFieldMapper( name, - ft, + new MappedField(fullName, ft), multiFieldsBuilder.build(this, context), copyTo.build(), - new GeoShapeIndexer(orientation.get().value(), ft.name()), + new GeoShapeIndexer(orientation.get().value(), fullName), parser, this ); @@ -168,7 +168,6 @@ public static final class GeoShapeWithDocValuesFieldType extends AbstractShapeGe private final GeoFormatterFactory geoFormatterFactory; public GeoShapeWithDocValuesFieldType( - String name, boolean indexed, boolean hasDocValues, Orientation orientation, @@ -176,13 +175,14 @@ public GeoShapeWithDocValuesFieldType( GeoFormatterFactory geoFormatterFactory, Map meta ) { - super(name, indexed, false, hasDocValues, parser, orientation, meta); + super(indexed, false, hasDocValues, parser, orientation, meta); this.geoFormatterFactory = geoFormatterFactory; } - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); - return new AbstractLatLonShapeIndexFieldData.Builder(name(), GeoShapeValuesSourceType.instance(), GeoShapeDocValuesField::new); + @Override + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); + return new AbstractLatLonShapeIndexFieldData.Builder(name, GeoShapeValuesSourceType.instance(), GeoShapeDocValuesField::new); } @Override @@ -191,7 +191,13 @@ public String typeName() { } @Override - public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { + public Query geoShapeQuery( + String name, + SearchExecutionContext context, + String fieldName, + ShapeRelation relation, + LatLonGeometry... geometries + ) { // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0) if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(Version.V_7_5_0)) { throw new QueryShardException( @@ -264,7 +270,7 @@ public Mapper.Builder parse(String name, Map node, MappingParser public GeoShapeWithDocValuesFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, GeoShapeIndexer indexer, @@ -273,7 +279,7 @@ public GeoShapeWithDocValuesFieldMapper( ) { super( simpleName, - mappedFieldType, + mappedField, builder.ignoreMalformed.get(), builder.coerce.get(), builder.ignoreZValue.get(), @@ -296,7 +302,7 @@ protected void index(DocumentParserContext context, Geometry geometry) throws IO context.doc().addAll(fields); } if (fieldType().hasDocValues()) { - String name = fieldType().name(); + String name = name(); BinaryGeoShapeDocValuesField docValuesField = (BinaryGeoShapeDocValuesField) context.doc().getByKey(name); if (docValuesField == null) { docValuesField = new BinaryGeoShapeDocValuesField(name); @@ -304,7 +310,7 @@ protected void index(DocumentParserContext context, Geometry geometry) throws IO } docValuesField.add(fields, geometry); } else if (fieldType().isIndexed()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 5ce045d6f83d1..fb41a6b98996a 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -22,7 +22,7 @@ import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -105,15 +105,15 @@ public FieldMapper build(MapperBuilderContext context) { ignoreZValue.get().value(), ignoreMalformed.get().value() ); - PointFieldType ft = new PointFieldType( - context.buildFullName(name), - indexed.get(), - stored.get(), - hasDocValues.get(), + PointFieldType ft = new PointFieldType(indexed.get(), stored.get(), hasDocValues.get(), parser, meta.get()); + return new PointFieldMapper( + name, + new MappedField(context.buildFullName(name), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), parser, - meta.get() + this ); - return new PointFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), parser, this); } } @@ -124,7 +124,7 @@ public FieldMapper build(MapperBuilderContext context) { public PointFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, CartesianPointParser parser, @@ -132,7 +132,7 @@ public PointFieldMapper( ) { super( simpleName, - mappedFieldType, + mappedField, multiFields, builder.ignoreMalformed.get(), builder.ignoreZValue.get(), @@ -146,15 +146,15 @@ public PointFieldMapper( @Override protected void index(DocumentParserContext context, CartesianPoint point) throws IOException { if (fieldType().isIndexed()) { - context.doc().add(new XYPointField(fieldType().name(), (float) point.getX(), (float) point.getY())); + context.doc().add(new XYPointField(name(), (float) point.getX(), (float) point.getY())); } if (fieldType().hasDocValues()) { - context.doc().add(new XYDocValuesField(fieldType().name(), (float) point.getX(), (float) point.getY())); + context.doc().add(new XYDocValuesField(name(), (float) point.getX(), (float) point.getY())); } else if (fieldType().isStored() || fieldType().isIndexed()) { - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } if (fieldType().isStored()) { - context.doc().add(new StoredField(fieldType().name(), point.toString())); + context.doc().add(new StoredField(name(), point.toString())); } } @@ -165,7 +165,7 @@ protected String contentType() { @Override public PointFieldType fieldType() { - return (PointFieldType) mappedFieldType; + return (PointFieldType) mappedField.type(); } @Override @@ -178,14 +178,13 @@ public static class PointFieldType extends AbstractGeometryFieldType meta ) { - super(name, indexed, stored, hasDocValues, parser, meta); + super(indexed, stored, hasDocValues, parser, meta); this.queryProcessor = new ShapeQueryPointProcessor(); } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index d7c319d9432eb..18063bd477bf7 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryProcessor; @@ -92,14 +92,15 @@ public ShapeFieldMapper build(MapperBuilderContext context) { ignoreZValue.get().value() ); Parser parser = new ShapeParser(geometryParser); - ShapeFieldType ft = new ShapeFieldType( - context.buildFullName(name), - indexed.get(), - orientation.get().value(), + ShapeFieldType ft = new ShapeFieldType(indexed.get(), orientation.get().value(), parser, meta.get()); + return new ShapeFieldMapper( + name, + new MappedField(context.buildFullName(name), ft), + multiFieldsBuilder.build(this, context), + copyTo.build(), parser, - meta.get() + this ); - return new ShapeFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), parser, this); } } @@ -111,8 +112,8 @@ public static final class ShapeFieldType extends AbstractShapeGeometryFieldType< private final ShapeQueryProcessor queryProcessor; - public ShapeFieldType(String name, boolean indexed, Orientation orientation, Parser parser, Map meta) { - super(name, indexed, false, false, parser, orientation, meta); + public ShapeFieldType(boolean indexed, Orientation orientation, Parser parser, Map meta) { + super(indexed, false, false, parser, orientation, meta); this.queryProcessor = new ShapeQueryProcessor(); } @@ -137,7 +138,7 @@ protected Function, List> getFormatter(String format) { public ShapeFieldMapper( String simpleName, - MappedFieldType mappedFieldType, + MappedField mappedField, MultiFields multiFields, CopyTo copyTo, Parser parser, @@ -145,7 +146,7 @@ public ShapeFieldMapper( ) { super( simpleName, - mappedFieldType, + mappedField, builder.ignoreMalformed.get(), builder.coerce.get(), builder.ignoreZValue.get(), @@ -155,7 +156,7 @@ public ShapeFieldMapper( parser ); this.builder = builder; - this.indexer = new ShapeIndexer(mappedFieldType.name()); + this.indexer = new ShapeIndexer(mappedField.name()); } @Override @@ -164,7 +165,7 @@ protected void index(DocumentParserContext context, Geometry geometry) throws IO return; } context.doc().addAll(indexer.indexShape(geometry)); - context.addToFieldNames(fieldType().name()); + context.addToFieldNames(name()); } @Override diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilder.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilder.java index 663ffa5c8be48..1a09297380483 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilder.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilder.java @@ -22,7 +22,7 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoPointScriptFieldType; import org.elasticsearch.index.mapper.GeoShapeQueryable; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -51,13 +51,19 @@ public enum Grid { private static final String name = "geohash"; @Override - protected Query toQuery(SearchExecutionContext context, String fieldName, MappedFieldType fieldType, String id) { - if (fieldType instanceof GeoShapeQueryable geoShapeQueryable) { - return geoShapeQueryable.geoShapeQuery(context, fieldName, ShapeRelation.INTERSECTS, getQueryHash(id)); + protected Query toQuery(SearchExecutionContext context, String fieldName, MappedField mappedField, String id) { + if (mappedField.type()instanceof GeoShapeQueryable geoShapeQueryable) { + return geoShapeQueryable.geoShapeQuery( + mappedField.name(), + context, + fieldName, + ShapeRelation.INTERSECTS, + getQueryHash(id) + ); } throw new QueryShardException( context, - "Field [" + fieldName + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" + "Field [" + fieldName + "] is of unsupported type [" + mappedField.typeName() + "] for [" + NAME + "] query" ); } @@ -76,13 +82,19 @@ protected void validate(String gridId) { private static final String name = "geotile"; @Override - protected Query toQuery(SearchExecutionContext context, String fieldName, MappedFieldType fieldType, String id) { - if (fieldType instanceof GeoShapeQueryable geoShapeQueryable) { - return geoShapeQueryable.geoShapeQuery(context, fieldName, ShapeRelation.INTERSECTS, getQueryTile(id)); + protected Query toQuery(SearchExecutionContext context, String fieldName, MappedField mappedField, String id) { + if (mappedField.type()instanceof GeoShapeQueryable geoShapeQueryable) { + return geoShapeQueryable.geoShapeQuery( + mappedField.name(), + context, + fieldName, + ShapeRelation.INTERSECTS, + getQueryTile(id) + ); } throw new QueryShardException( context, - "Field [" + fieldName + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" + "Field [" + fieldName + "] is of unsupported type [" + mappedField.typeName() + "] for [" + NAME + "] query" ); } @@ -101,16 +113,16 @@ protected void validate(String gridId) { private static final String name = "geohex"; @Override - protected Query toQuery(SearchExecutionContext context, String fieldName, MappedFieldType fieldType, String id) { + protected Query toQuery(SearchExecutionContext context, String fieldName, MappedField mappedField, String id) { H3LatLonGeometry geometry = new H3LatLonGeometry(id); - if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType pointFieldType) { - return pointFieldType.geoShapeQuery(context, fieldName, ShapeRelation.INTERSECTS, geometry); - } else if (fieldType instanceof GeoPointScriptFieldType scriptType) { - return scriptType.geoShapeQuery(context, fieldName, ShapeRelation.INTERSECTS, geometry); + if (mappedField.type()instanceof GeoPointFieldMapper.GeoPointFieldType pointFieldType) { + return pointFieldType.geoShapeQuery(mappedField.name(), context, fieldName, ShapeRelation.INTERSECTS, geometry); + } else if (mappedField.type()instanceof GeoPointScriptFieldType scriptType) { + return scriptType.geoShapeQuery(mappedField.name(), context, fieldName, ShapeRelation.INTERSECTS, geometry); } throw new QueryShardException( context, - "Field [" + fieldName + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" + "Field [" + fieldName + "] is of unsupported type [" + mappedField.typeName() + "] for [" + NAME + "] query" ); } @@ -133,7 +145,7 @@ protected void validate(String gridId) { } }; - protected abstract Query toQuery(SearchExecutionContext context, String fieldName, MappedFieldType fieldType, String id); + protected abstract Query toQuery(SearchExecutionContext context, String fieldName, MappedField mappedField, String id); protected abstract String getName(); @@ -263,15 +275,15 @@ public boolean ignoreUnmapped() { @Override public Query doToQuery(SearchExecutionContext context) { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType == null) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { throw new QueryShardException(context, "failed to find geo field [" + fieldName + "]"); } } - return grid.toQuery(context, fieldName, fieldType, gridId); + return grid.toQuery(context, fieldName, mappedField, gridId); } @Override diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java index e50c15ea12c07..1d9622e824c05 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.AbstractGeometryQueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -87,15 +88,15 @@ protected ShapeQueryBuilder newShapeQueryBuilder(String fieldName, Supplier { SearchExecutionContext context; - MappedFieldType fieldType; + MappedField mappedField; String fieldName; ShapeRelation relation; ShapeVisitor(SearchExecutionContext context, String fieldName, ShapeRelation relation) { this.context = context; - this.fieldType = context.getFieldType(fieldName); + this.mappedField = context.getMappedField(fieldName); this.fieldName = fieldName; this.relation = relation; } @@ -77,7 +79,7 @@ private class ShapeVisitor implements GeometryVisitor { public Query visit(Circle circle) { XYCircle xyCircle = ShapeUtils.toLuceneXYCircle(circle); Query query = XYPointField.newDistanceQuery(fieldName, xyCircle.getX(), xyCircle.getY(), xyCircle.getRadius()); - if (fieldType.hasDocValues()) { + if (mappedField.hasDocValues()) { Query dvQuery = XYDocValuesField.newSlowDistanceQuery(fieldName, xyCircle.getX(), xyCircle.getY(), xyCircle.getRadius()); query = new IndexOrDocValuesQuery(query, dvQuery); } @@ -126,7 +128,7 @@ public Query visit(MultiPolygon multiPolygon) { lucenePolygons[i] = ShapeUtils.toLuceneXYPolygon(multiPolygon.get(i)); } Query query = XYPointField.newPolygonQuery(fieldName, lucenePolygons); - if (fieldType.hasDocValues()) { + if (mappedField.hasDocValues()) { Query dvQuery = XYDocValuesField.newSlowPolygonQuery(fieldName, lucenePolygons); query = new IndexOrDocValuesQuery(query, dvQuery); } @@ -143,7 +145,7 @@ public Query visit(Point point) { public Query visit(Polygon polygon) { org.apache.lucene.geo.XYPolygon lucenePolygon = ShapeUtils.toLuceneXYPolygon(polygon); Query query = XYPointField.newPolygonQuery(fieldName, lucenePolygon); - if (fieldType.hasDocValues()) { + if (mappedField.hasDocValues()) { Query dvQuery = XYDocValuesField.newSlowPolygonQuery(fieldName, lucenePolygon); query = new IndexOrDocValuesQuery(query, dvQuery); } @@ -154,7 +156,7 @@ public Query visit(Polygon polygon) { public Query visit(Rectangle r) { XYRectangle xyRectangle = ShapeUtils.toLuceneXYRectangle(r); Query query = XYPointField.newBoxQuery(fieldName, xyRectangle.minX, xyRectangle.maxX, xyRectangle.minY, xyRectangle.maxY); - if (fieldType.hasDocValues()) { + if (mappedField.hasDocValues()) { Query dvQuery = XYDocValuesField.newSlowBoxQuery( fieldName, xyRectangle.minX, diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java index 5bd7d83da7820..19ac1fc434463 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java @@ -24,7 +24,7 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.Rectangle; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.spatial.common.ShapeUtils; @@ -48,11 +48,13 @@ public Query shapeQuery(Geometry shape, String fieldName, ShapeRelation relation } private void validateIsShapeFieldType(String fieldName, SearchExecutionContext context) { - MappedFieldType fieldType = context.getFieldType(fieldName); - if (fieldType instanceof ShapeFieldMapper.ShapeFieldType == false) { + MappedField mappedField = context.getMappedField(fieldName); + if (mappedField == null || mappedField.type() instanceof ShapeFieldMapper.ShapeFieldType == false) { throw new QueryShardException( context, - "Expected " + ShapeFieldMapper.CONTENT_TYPE + " field type for Field [" + fieldName + "] but found " + fieldType.typeName() + "Expected " + ShapeFieldMapper.CONTENT_TYPE + " field type for Field [" + fieldName + "] but found " + mappedField == null + ? "absent field" + : mappedField.typeName() ); } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java index 139f462575349..4ea79907a8ca3 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java @@ -54,7 +54,7 @@ public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFa "Expected geo_point or geo_shape type on field [" + fieldContext.field() + "], but got [" - + fieldContext.fieldType().typeName() + + fieldContext.mappedField().typeName() + "]" ); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java index 9aed0477d81b1..c99647e68b863 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianFieldMapperTests.java @@ -9,7 +9,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; @@ -37,7 +37,7 @@ protected Collection getPlugins() { } @Override - protected void assertSearchable(MappedFieldType fieldType) {} + protected void assertSearchable(MappedField mappedField) {} @Override protected void minimalMapping(XContentBuilder b) throws IOException { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 7085da9b8b1b7..bdc29fa5262de 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -418,7 +419,7 @@ public String toXContentString(GeoShapeWithDocValuesFieldMapper mapper) throws I } @Override - protected void assertSearchable(MappedFieldType fieldType) { + protected void assertSearchable(MappedField mappedField) { } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java index 22033b2108ad6..c9521aa0c3af9 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.xpack.vectortile.SpatialGeometryFormatterExtension; import org.elasticsearch.xpack.vectortile.feature.FeatureFactory; @@ -32,13 +32,9 @@ public void testFetchSourceValue() throws IOException { final GeoFormatterFactory geoFormatterFactory = new GeoFormatterFactory<>( new SpatialGeometryFormatterExtension().getGeometryFormatterFactories() ); - final MappedFieldType mapper = new GeoShapeWithDocValuesFieldMapper.Builder( - "field", - Version.CURRENT, - false, - false, - geoFormatterFactory - ).build(MapperBuilderContext.ROOT).fieldType(); + final MappedField mapper = new GeoShapeWithDocValuesFieldMapper.Builder("field", Version.CURRENT, false, false, geoFormatterFactory) + .build(MapperBuilderContext.ROOT) + .field(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); @@ -102,13 +98,9 @@ private void fetchVectorTile(Geometry geometry) throws IOException { final GeoFormatterFactory geoFormatterFactory = new GeoFormatterFactory<>( new SpatialGeometryFormatterExtension().getGeometryFormatterFactories() ); - final MappedFieldType mapper = new GeoShapeWithDocValuesFieldMapper.Builder( - "field", - Version.CURRENT, - false, - false, - geoFormatterFactory - ).build(MapperBuilderContext.ROOT).fieldType(); + final MappedField mapper = new GeoShapeWithDocValuesFieldMapper.Builder("field", Version.CURRENT, false, false, geoFormatterFactory) + .build(MapperBuilderContext.ROOT) + .field(); final int z = randomIntBetween(1, 10); int x = randomIntBetween(0, (1 << z) - 1); int y = randomIntBetween(0, (1 << z) - 1); @@ -154,13 +146,9 @@ public void testFetchSourceValueDateLine() throws IOException { final GeoFormatterFactory geoFormatterFactory = new GeoFormatterFactory<>( new SpatialGeometryFormatterExtension().getGeometryFormatterFactories() ); - final MappedFieldType mapper = new GeoShapeWithDocValuesFieldMapper.Builder( - "field", - Version.CURRENT, - false, - false, - geoFormatterFactory - ).build(MapperBuilderContext.ROOT).fieldType(); + final MappedField mapper = new GeoShapeWithDocValuesFieldMapper.Builder("field", Version.CURRENT, false, false, geoFormatterFactory) + .build(MapperBuilderContext.ROOT) + .field(); // Test a polygon crossing the dateline Object sourceValue = "POLYGON((170 -10, -170 -10, -170 10, 170 10, 170 -10))"; String polygonDateLine = "MULTIPOLYGON (((180.0 -10.0, 180.0 10.0, 170.0 10.0, 170.0 -10.0, 180.0 -10.0))," diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java index 1467a10850188..fed11067dda9a 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.spatial.index.mapper; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import java.io.IOException; @@ -18,7 +18,7 @@ public class PointFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new PointFieldMapper.Builder("field", false).build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new PointFieldMapper.Builder("field", false).build(MapperBuilderContext.ROOT).field(); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(42.0, 27.1)); String wktPoint = "POINT (42.0 27.1)"; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java index efa127a5aa3b4..655ad5373fd95 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.spatial.index.mapper; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import java.io.IOException; @@ -18,7 +18,7 @@ public class ShapeFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new ShapeFieldMapper.Builder("field", false, true).build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new ShapeFieldMapper.Builder("field", false, true).build(MapperBuilderContext.ROOT).field(); Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.3, 15.0)); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java index cd174d4c6fb0a..97e8113ac9f3b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoGridQueryBuilderTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.h3.H3; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -114,10 +114,10 @@ public void testExceptionOnMissingTypes() { @Override protected void doAssertLuceneQuery(GeoGridQueryBuilder queryBuilder, Query query, SearchExecutionContext context) { - final MappedFieldType fieldType = context.getFieldType(queryBuilder.fieldName()); - if (fieldType == null) { + final MappedField mappedField = context.getMappedField(queryBuilder.fieldName()); + if (mappedField == null) { assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery); - } else if (fieldType.hasDocValues()) { + } else if (mappedField.hasDocValues()) { assertEquals(IndexOrDocValuesQuery.class, query.getClass()); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeWithDocValuesQueryBuilderTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeWithDocValuesQueryBuilderTests.java index 48f1c4401d3be..574e574189783 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeWithDocValuesQueryBuilderTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeWithDocValuesQueryBuilderTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -69,8 +69,8 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder() { protected void doAssertLuceneQuery(GeoShapeQueryBuilder queryBuilder, Query query, SearchExecutionContext context) { assertThat(true, equalTo(query instanceof ConstantScoreQuery)); Query geoShapeQuery = ((ConstantScoreQuery) query).getQuery(); - MappedFieldType fieldType = context.getFieldType("test"); - boolean IndexOrDocValuesQuery = fieldType.hasDocValues(); + MappedField mappedField = context.getMappedField("test"); + boolean IndexOrDocValuesQuery = mappedField.hasDocValues(); assertThat(IndexOrDocValuesQuery, equalTo(geoShapeQuery instanceof IndexOrDocValuesQuery)); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java index fb6d33c9335c5..986752d196cda 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.ingest.IngestDocument; @@ -217,7 +218,6 @@ public void testGeoShapeQueryAcrossDateline() throws IOException { Geometry geometry = SpatialUtils.createRegularGeoShapePolygon(circle, numSides); GeoShapeWithDocValuesFieldType shapeType = new GeoShapeWithDocValuesFieldType( - fieldName, true, false, Orientation.RIGHT, @@ -225,11 +225,13 @@ public void testGeoShapeQueryAcrossDateline() throws IOException { null, Collections.emptyMap() ); + MappedField mappedField = new MappedField(fieldName, shapeType); SearchExecutionContext mockedContext = mock(SearchExecutionContext.class); - when(mockedContext.getFieldType(any())).thenReturn(shapeType); - Query sameShapeQuery = shapeType.geoShapeQuery(mockedContext, fieldName, ShapeRelation.INTERSECTS, geometry); + when(mockedContext.getMappedField(any())).thenReturn(mappedField); + Query sameShapeQuery = shapeType.geoShapeQuery(fieldName, mockedContext, fieldName, ShapeRelation.INTERSECTS, geometry); Query pointOnDatelineQuery = shapeType.geoShapeQuery( + fieldName, mockedContext, fieldName, ShapeRelation.INTERSECTS, @@ -258,11 +260,12 @@ public void testShapeQuery() throws IOException { int numSides = randomIntBetween(4, 1000); Geometry geometry = SpatialUtils.createRegularShapePolygon(circle, numSides); - MappedFieldType shapeType = new ShapeFieldType(fieldName, true, Orientation.RIGHT, null, Collections.emptyMap()); + MappedFieldType shapeType = new ShapeFieldType(true, Orientation.RIGHT, null, Collections.emptyMap()); + MappedField mappedField = new MappedField(fieldName, shapeType); ShapeQueryProcessor processor = new ShapeQueryProcessor(); SearchExecutionContext mockedContext = mock(SearchExecutionContext.class); - when(mockedContext.getFieldType(any())).thenReturn(shapeType); + when(mockedContext.getMappedField(any())).thenReturn(mappedField); Query sameShapeQuery = processor.shapeQuery(geometry, fieldName, ShapeRelation.INTERSECTS, mockedContext); Query centerPointQuery = processor.shapeQuery( new Point(circle.getLon(), circle.getLat()), diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java index ad8d47781a4aa..0f167b2fac110 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -373,18 +373,14 @@ private void testCase( IndexSearcher indexSearcher = newIndexSearcher(indexReader); try { - MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("value_field"); - MappedFieldType groupFieldType = new KeywordFieldMapper.KeywordFieldType("group_id", false, true, Collections.emptyMap()); - MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType("sort_field", fieldNumberType); - - Terms terms = searchAndReduce( - indexSearcher, - new MatchAllDocsQuery(), - aggregationBuilder, - fieldType, - fieldType2, - groupFieldType + MappedField field = new MappedField("value_field", new GeoPointFieldMapper.GeoPointFieldType()); + MappedField groupField = new MappedField( + "group_id", + new KeywordFieldMapper.KeywordFieldType(false, true, Collections.emptyMap()) ); + MappedField field2 = new MappedField("sort_field", new NumberFieldMapper.NumberFieldType(fieldNumberType)); + + Terms terms = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, field, field2, groupField); verify.accept(terms); } finally { indexReader.close(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java index 37f10782b17d7..1672919d918dc 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.h3.CellBoundary; import org.elasticsearch.h3.H3; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; @@ -115,7 +115,7 @@ protected Rectangle getTile(double lng, double lat, int precision) { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return createBuilder("foo").field(fieldName); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java index be9b6a60058fc..15660d4270266 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -107,7 +108,7 @@ protected List getSupportedValuesSourceTypes() { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return createBuilder("foo").field(fieldName); } @@ -275,17 +276,9 @@ private void testCase( assertThat(aggregationBuilder.geoBoundingBox(), equalTo(geoBoundingBox)); } - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( - FIELD_NAME, - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() - ); + MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()); - Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, new MappedField(FIELD_NAME, fieldType)); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java index 8db7c84a976be..4c31336b570e1 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.Point; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -55,18 +55,13 @@ public void testEmpty() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -88,18 +83,13 @@ public void testUnmappedFieldWithDocs() throws Exception { GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("non_existent").wrapLongitude(false); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -117,14 +107,9 @@ public void testMissing() throws Exception { doc.add(new NumericDocValuesField("not_field", 1000L)); w.addDocument(doc); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); Point point = GeometryTestUtils.randomPoint(false); @@ -138,7 +123,7 @@ public void testMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertThat(bounds.top, equalTo(lat)); assertThat(bounds.bottom, equalTo(lat)); assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY)); @@ -155,14 +140,9 @@ public void testInvalidMissing() throws Exception { doc.add(new NumericDocValuesField("not_field", 1000L)); w.addDocument(doc); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field") @@ -172,7 +152,7 @@ public void testInvalidMissing() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType) + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField) ); assertThat(exception.getMessage(), startsWith("Unknown geometry type")); } @@ -220,18 +200,13 @@ public void testRandomShapes() throws Exception { } GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE)); assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE)); assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE)); @@ -244,7 +219,7 @@ public void testRandomShapes() throws Exception { } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new GeoBoundsAggregationBuilder("foo").field(fieldName); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java index 1efd37f802aec..83420456c5f85 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -59,18 +59,13 @@ public void testEmpty() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -87,20 +82,18 @@ public void testUnmapped() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "another_field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertNull(result.centroid()); - fieldType = new GeoShapeWithDocValuesFieldType("field", true, true, Orientation.RIGHT, null, null, Collections.emptyMap()); - result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + mappedField = new MappedField( + "field", + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) + ); + result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -121,16 +114,11 @@ public void testUnmappedWithMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "another_field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertThat(result.centroid(), equalTo(expectedCentroid)); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -195,19 +183,14 @@ public void testSingleValuedField() throws Exception { } private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) throws IOException { - MappedFieldType fieldType = new GeoShapeWithDocValuesFieldType( + MappedField mappedField = new MappedField( "field", - true, - true, - Orientation.RIGHT, - null, - null, - Collections.emptyMap() + new GeoShapeWithDocValuesFieldType(true, true, Orientation.RIGHT, null, null, Collections.emptyMap()) ); GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg").field("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, mappedField); assertEquals("my_agg", result.getName()); GeoPoint centroid = result.centroid(); @@ -219,7 +202,7 @@ private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) thro } @Override - protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + protected AggregationBuilder createAggBuilderForTypeTest(MappedField mappedField, String fieldName) { return new GeoCentroidAggregationBuilder("foo").field(fieldName); } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 425ff07a0418e..732e3949166b5 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -58,6 +58,7 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.SourceValueFetcher; @@ -230,7 +231,10 @@ Builder nullValue(String nullValue) { public WildcardFieldMapper build(MapperBuilderContext context) { return new WildcardFieldMapper( name, - new WildcardFieldType(context.buildFullName(name), nullValue.get(), ignoreAbove.get(), indexVersionCreated, meta.get()), + new MappedField( + context.buildFullName(name), + new WildcardFieldType(nullValue.get(), ignoreAbove.get(), indexVersionCreated, meta.get()) + ), ignoreAbove.get(), multiFieldsBuilder.build(this, context), copyTo.build(), @@ -254,8 +258,8 @@ public static final class WildcardFieldType extends MappedFieldType { private final int ignoreAbove; private final NamedAnalyzer analyzer; - private WildcardFieldType(String name, String nullValue, int ignoreAbove, Version version, Map meta) { - super(name, true, false, true, Defaults.TEXT_SEARCH_INFO, meta); + private WildcardFieldType(String nullValue, int ignoreAbove, Version version, Map meta) { + super(true, false, true, Defaults.TEXT_SEARCH_INFO, meta); if (version.onOrAfter(Version.V_7_10_0)) { this.analyzer = WILDCARD_ANALYZER_7_10; } else { @@ -266,17 +270,28 @@ private WildcardFieldType(String name, String nullValue, int ignoreAbove, Versio } @Override - public boolean mayExistInIndex(SearchExecutionContext context) { - return context.fieldExistsInIndex(name()); + public boolean mayExistInIndex(String name, SearchExecutionContext context) { + return context.fieldExistsInIndex(name); } @Override - public Query normalizedWildcardQuery(String value, MultiTermQuery.RewriteMethod method, SearchExecutionContext context) { - return wildcardQuery(value, method, false, context); + public Query normalizedWildcardQuery( + String name, + String value, + MultiTermQuery.RewriteMethod method, + SearchExecutionContext context + ) { + return wildcardQuery(name, value, method, false, context); } @Override - public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context) { + public Query wildcardQuery( + String name, + String wildcardPattern, + RewriteMethod method, + boolean caseInsensitive, + SearchExecutionContext context + ) { String ngramIndexPattern = addLineEndChars(wildcardPattern); // Break search term into tokens @@ -290,14 +305,14 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean switch (c) { case WildcardQuery.WILDCARD_STRING: if (sequence.length() > 0) { - getNgramTokens(tokens, sequence.toString()); + getNgramTokens(name, tokens, sequence.toString()); sequence = new StringBuilder(); } numWildcardStrings++; break; case WildcardQuery.WILDCARD_CHAR: if (sequence.length() > 0) { - getNgramTokens(tokens, sequence.toString()); + getNgramTokens(name, tokens, sequence.toString()); sequence = new StringBuilder(); } numWildcardChars++; @@ -320,7 +335,7 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean } if (sequence.length() > 0) { - getNgramTokens(tokens, sequence.toString()); + getNgramTokens(name, tokens, sequence.toString()); } BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); @@ -329,26 +344,27 @@ public Query wildcardQuery(String wildcardPattern, RewriteMethod method, boolean if (clauseCount >= MAX_CLAUSES_IN_APPROXIMATION_QUERY) { break; } - addClause(string, rewritten, Occur.MUST); + addClause(name, string, rewritten, Occur.MUST); clauseCount++; } Automaton automaton = caseInsensitive - ? AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term(name(), wildcardPattern), Integer.MAX_VALUE) - : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern)); + ? AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term(name, wildcardPattern), Integer.MAX_VALUE) + : WildcardQuery.toAutomaton(new Term(name, wildcardPattern)); if (clauseCount > 0) { // We can accelerate execution with the ngram query BooleanQuery approxQuery = rewritten.build(); - return new BinaryDvConfirmedAutomatonQuery(approxQuery, name(), wildcardPattern, automaton); + return new BinaryDvConfirmedAutomatonQuery(approxQuery, name, wildcardPattern, automaton); } else if (numWildcardChars == 0 || numWildcardStrings > 0) { // We have no concrete characters and we're not a pure length query e.g. ??? - return new FieldExistsQuery(name()); + return new FieldExistsQuery(name); } - return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name(), wildcardPattern, automaton); + return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name, wildcardPattern, automaton); } @Override public Query regexpQuery( + String name, String value, int syntaxFlags, int matchFlags, @@ -366,19 +382,19 @@ public Query regexpQuery( a = Operations.determinize(a, maxDeterminizedStates); a = MinimizationOperations.minimize(a, maxDeterminizedStates); if (Operations.isTotal(a)) { // Will match all - return existsQuery(context); + return existsQuery(name, context); } RegExp ngramRegex = new RegExp(addLineEndChars(value), syntaxFlags, matchFlags); Query approxBooleanQuery = toApproximationQuery(ngramRegex); - Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery); + Query approxNgramQuery = rewriteBoolToNgramQuery(name, approxBooleanQuery); RegExp regex = new RegExp(value, syntaxFlags, matchFlags); Automaton automaton = regex.toAutomaton(maxDeterminizedStates); // We can accelerate execution with the ngram query - return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); + return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name, value, automaton); } // Convert a regular expression to a simplified query consisting of BooleanQuery and TermQuery objects @@ -523,7 +539,7 @@ private static String toLowerCase(String string) { } // Takes a BooleanQuery + TermQuery tree representing query logic and rewrites using ngrams of appropriate size. - private Query rewriteBoolToNgramQuery(Query approxQuery) { + private Query rewriteBoolToNgramQuery(String name, Query approxQuery) { // TODO optimise more intelligently so we: // 1) favour full-length term queries eg abc over short eg a* when pruning too many clauses. // 2) make MAX_CLAUSES_IN_APPROXIMATION_QUERY a global cap rather than per-boolean clause. @@ -534,7 +550,7 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); int clauseCount = 0; for (BooleanClause clause : bq) { - Query q = rewriteBoolToNgramQuery(clause.getQuery()); + Query q = rewriteBoolToNgramQuery(name, clause.getQuery()); if (q != null) { if (clause.getOccur().equals(Occur.FILTER)) { // Can't drop "should" clauses because it can elevate a sibling optional item @@ -560,10 +576,10 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { // Break term into tokens Set tokens = new LinkedHashSet<>(); - getNgramTokens(tokens, s); + getNgramTokens(name, tokens, s); BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); for (String string : tokens) { - addClause(string, rewritten, Occur.FILTER); + addClause(name, string, rewritten, Occur.FILTER); } return rewritten.build(); } @@ -573,14 +589,14 @@ private Query rewriteBoolToNgramQuery(Query approxQuery) { throw new IllegalStateException("Invalid query type found parsing regex query:" + approxQuery); } - protected void getNgramTokens(Set tokens, String fragment) { + protected void getNgramTokens(String name, Set tokens, String fragment) { if (fragment.equals(TOKEN_START_STRING) || fragment.equals(TOKEN_END_STRING)) { // If a regex is a form of match-all e.g. ".*" we only produce the token start/end markers as search // terms which can be ignored. return; } // Break fragment into multiple Ngrams - TokenStream tokenizer = analyzer.tokenStream(name(), fragment); + TokenStream tokenizer = analyzer.tokenStream(name, fragment); CharTermAttribute termAtt = tokenizer.addAttribute(CharTermAttribute.class); int foundTokens = 0; try { @@ -606,7 +622,7 @@ protected void getNgramTokens(Set tokens, String fragment) { } } - private void addClause(String token, BooleanQuery.Builder bqBuilder, Occur occur) { + private void addClause(String name, String token, BooleanQuery.Builder bqBuilder, Occur occur) { assert token.codePointCount(0, token.length()) <= NGRAM_SIZE; int tokenSize = token.codePointCount(0, token.length()); if (tokenSize < 2 || token.equals(WildcardFieldMapper.TOKEN_END_STRING)) { @@ -616,10 +632,10 @@ private void addClause(String token, BooleanQuery.Builder bqBuilder, Occur occur return; } if (tokenSize == NGRAM_SIZE) { - TermQuery tq = new TermQuery(new Term(name(), token)); + TermQuery tq = new TermQuery(new Term(name, token)); bqBuilder.add(new BooleanClause(tq, occur)); } else { - PrefixQuery wq = new PrefixQuery(new Term(name(), token)); + PrefixQuery wq = new PrefixQuery(new Term(name, token)); wq.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE); bqBuilder.add(new BooleanClause(wq, occur)); } @@ -627,6 +643,7 @@ private void addClause(String token, BooleanQuery.Builder bqBuilder, Occur occur @Override public Query rangeQuery( + String name, Object lowerTerm, Object upperTerm, boolean includeLower, @@ -659,7 +676,7 @@ public Query rangeQuery( if (commonPrefix.length() > 0) { Set tokens = new HashSet<>(); - getNgramTokens(tokens, commonPrefix.toString()); + getNgramTokens(name, tokens, commonPrefix.toString()); BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder(); for (String token : tokens) { int tokenSize = token.codePointCount(0, token.length()); @@ -668,10 +685,10 @@ public Query rangeQuery( } if (tokenSize == NGRAM_SIZE) { - TermQuery tq = new TermQuery(new Term(name(), token)); + TermQuery tq = new TermQuery(new Term(name, token)); bqBuilder.add(new BooleanClause(tq, Occur.FILTER)); } else { - PrefixQuery wq = new PrefixQuery(new Term(name(), token)); + PrefixQuery wq = new PrefixQuery(new Term(name, token)); wq.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE); bqBuilder.add(new BooleanClause(wq, Occur.FILTER)); } @@ -685,13 +702,14 @@ public Query rangeQuery( Automaton automaton = TermRangeQuery.toAutomaton(lower, upper, includeLower, includeUpper); if (accelerationQuery == null) { - return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name(), lower + "-" + upper, automaton); + return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name, lower + "-" + upper, automaton); } - return new BinaryDvConfirmedAutomatonQuery(accelerationQuery, name(), lower + "-" + upper, automaton); + return new BinaryDvConfirmedAutomatonQuery(accelerationQuery, name, lower + "-" + upper, automaton); } @Override public Query fuzzyQuery( + String name, Object value, Fuzziness fuzziness, int prefixLength, @@ -711,13 +729,13 @@ public Query fuzzyQuery( Set prefixTokens = new LinkedHashSet<>(); postPrefixString = searchTerm.substring(prefixLength); String prefixCandidate = TOKEN_START_OR_END_CHAR + searchTerm.substring(0, prefixLength); - getNgramTokens(prefixTokens, prefixCandidate); + getNgramTokens(name, prefixTokens, prefixCandidate); for (String prefixToken : prefixTokens) { - addClause(prefixToken, approxBuilder, Occur.MUST); + addClause(name, prefixToken, approxBuilder, Occur.MUST); } } // Tokenize all content after the prefix - TokenStream tokenizer = analyzer.tokenStream(name(), postPrefixString); + TokenStream tokenizer = analyzer.tokenStream(name, postPrefixString); CharTermAttribute termAtt = tokenizer.addAttribute(CharTermAttribute.class); ArrayList postPrefixTokens = new ArrayList<>(); String firstToken = null; @@ -744,7 +762,7 @@ public Query fuzzyQuery( BooleanQuery.Builder ngramBuilder = new BooleanQuery.Builder(); int numClauses = 0; for (String token : postPrefixTokens) { - addClause(token, ngramBuilder, Occur.SHOULD); + addClause(name, token, ngramBuilder, Occur.SHOULD); numClauses++; } @@ -759,17 +777,17 @@ public Query fuzzyQuery( // Verification query FuzzyQuery fq = new FuzzyQuery( - new Term(name(), searchTerm), + new Term(name, searchTerm), fuzziness.asDistance(searchTerm), prefixLength, maxExpansions, transpositions ); if (ngramQ.clauses().size() == 0) { - return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name(), searchTerm, fq.getAutomata().automaton); + return new BinaryDvConfirmedAutomatonQuery(new MatchAllDocsQuery(), name, searchTerm, fq.getAutomata().automaton); } - return new BinaryDvConfirmedAutomatonQuery(ngramQ, name(), searchTerm, fq.getAutomata().automaton); + return new BinaryDvConfirmedAutomatonQuery(ngramQ, name, searchTerm, fq.getAutomata().automaton); } catch (IOException ioe) { throw new ElasticsearchParseException("Error parsing wildcard field fuzzy string [" + searchTerm + "]"); } @@ -786,9 +804,9 @@ public String familyTypeName() { } @Override - public Query termQuery(Object value, SearchExecutionContext context) { + public Query termQuery(String name, Object value, SearchExecutionContext context) { String searchTerm = BytesRefs.toString(value); - return wildcardQuery(escapeWildcardSyntax(searchTerm), MultiTermQuery.CONSTANT_SCORE_REWRITE, false, context); + return wildcardQuery(name, escapeWildcardSyntax(searchTerm), MultiTermQuery.CONSTANT_SCORE_REWRITE, false, context); } private String escapeWildcardSyntax(String term) { @@ -807,47 +825,48 @@ private String escapeWildcardSyntax(String term) { } @Override - public Query termQueryCaseInsensitive(Object value, SearchExecutionContext context) { + public Query termQueryCaseInsensitive(String name, Object value, SearchExecutionContext context) { String searchTerm = BytesRefs.toString(value); - return wildcardQuery(escapeWildcardSyntax(searchTerm), MultiTermQuery.CONSTANT_SCORE_REWRITE, true, context); + return wildcardQuery(name, escapeWildcardSyntax(searchTerm), MultiTermQuery.CONSTANT_SCORE_REWRITE, true, context); } @Override public Query prefixQuery( + String name, String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, SearchExecutionContext context ) { - return wildcardQuery(escapeWildcardSyntax(value) + "*", method, caseInsensitive, context); + return wildcardQuery(name, escapeWildcardSyntax(value) + "*", method, caseInsensitive, context); } @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { + public Query termsQuery(String name, Collection values, SearchExecutionContext context) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); for (Object value : values) { - bq.add(termQuery(value, context), Occur.SHOULD); + bq.add(termQuery(name, value, context), Occur.SHOULD); } return new ConstantScoreQuery(bq.build()); } @Override - public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - failIfNoDocValues(); + public IndexFieldData.Builder fielddataBuilder(String name, String fullyQualifiedIndexName, Supplier searchLookup) { + failIfNoDocValues(name); return (cache, breakerService) -> new StringBinaryIndexFieldData( - name(), + name, CoreValuesSourceType.KEYWORD, WildcardDocValuesField::new ); } @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + public ValueFetcher valueFetcher(String name, SearchExecutionContext context, String format) { if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats."); + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] doesn't support formats."); } - return new SourceValueFetcher(name(), context, nullValue) { + return new SourceValueFetcher(name, context, nullValue) { @Override protected String parseSourceValue(Object value) { String keywordValue = value.toString(); @@ -868,14 +887,14 @@ protected String parseSourceValue(Object value) { private WildcardFieldMapper( String simpleName, - WildcardFieldType mappedFieldType, + MappedField mappedField, int ignoreAbove, MultiFields multiFields, CopyTo copyTo, String nullValue, Version indexVersionCreated ) { - super(simpleName, mappedFieldType, multiFields, copyTo); + super(simpleName, mappedField, multiFields, copyTo); this.nullValue = nullValue; this.ignoreAbove = ignoreAbove; this.indexVersionCreated = indexVersionCreated; @@ -887,7 +906,7 @@ private WildcardFieldMapper( @Override public Map indexAnalyzers() { - return Map.of(mappedFieldType.name(), fieldType().analyzer); + return Map.of(mappedField.name(), fieldType().analyzer); } /** Values that have more chars than the return value of this method will @@ -926,13 +945,13 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio void createFields(String value, LuceneDocument parseDoc, List fields) { String ngramValue = addLineEndChars(value); - Field ngramField = new Field(fieldType().name(), ngramValue, ngramFieldType); + Field ngramField = new Field(name(), ngramValue, ngramFieldType); fields.add(ngramField); - CustomBinaryDocValuesField dvField = (CustomBinaryDocValuesField) parseDoc.getByKey(fieldType().name()); + CustomBinaryDocValuesField dvField = (CustomBinaryDocValuesField) parseDoc.getByKey(name()); if (dvField == null) { - dvField = new CustomBinaryDocValuesField(fieldType().name(), value.getBytes(StandardCharsets.UTF_8)); - parseDoc.addWithKey(fieldType().name(), dvField); + dvField = new CustomBinaryDocValuesField(name(), value.getBytes(StandardCharsets.UTF_8)); + parseDoc.addWithKey(name(), dvField); } else { dvField.add(value.getBytes(StandardCharsets.UTF_8)); } diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java index 3649a29edbae9..2e07d3c86bf50 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldAggregationTests.java @@ -34,15 +34,12 @@ public class WildcardFieldAggregationTests extends AggregatorTestCase { private static final int MAX_FIELD_LENGTH = 30; private WildcardFieldMapper wildcardFieldMapper; - private WildcardFieldMapper.WildcardFieldType wildcardFieldType; @Before public void setup() { WildcardFieldMapper.Builder builder = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, Version.CURRENT); builder.ignoreAbove(MAX_FIELD_LENGTH); wildcardFieldMapper = builder.build(MapperBuilderContext.ROOT); - - wildcardFieldType = wildcardFieldMapper.fieldType(); } private void addFields(LuceneDocument parseDoc, Document doc, String docContent) throws IOException { @@ -92,7 +89,7 @@ public void testTermsAggregation() throws IOException { assertEquals(3L, result.getBuckets().get(1).getDocCount()); assertEquals("c", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); - }, wildcardFieldType); + }, wildcardFieldMapper.field()); } public void testCompositeTermsAggregation() throws IOException { @@ -118,7 +115,7 @@ public void testCompositeTermsAggregation() throws IOException { assertEquals(2L, result.getBuckets().get(1).getDocCount()); assertEquals("{terms_key=d}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); - }, wildcardFieldType); + }, wildcardFieldMapper.field()); } public void testCompositeTermsSearchAfter() throws IOException { @@ -140,6 +137,6 @@ public void testCompositeTermsSearchAfter() throws IOException { assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{terms_key=d}", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); - }, wildcardFieldType); + }, wildcardFieldMapper.field()); } } diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index d614993d32cb0..792c5245d4ab3 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -56,6 +56,7 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperTestCase; @@ -179,7 +180,7 @@ public void testTooBigKeywordField() throws IOException { IndexSearcher searcher = newSearcher(reader); iw.close(); - Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("*a*", null, null); + Query wildcardFieldQuery = wildcardFieldType.field().wildcardQuery("*a*", null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); @@ -223,12 +224,12 @@ public void testBWCIndexVersion() throws IOException { iw.close(); // Unnatural circumstance - testing we fail if we were to use the new analyzer on old index - Query oldWildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("a b", null, null); + Query oldWildcardFieldQuery = wildcardFieldType.field().wildcardQuery("a b", null, null); TopDocs oldWildcardFieldTopDocs = searcher.search(oldWildcardFieldQuery, 10, Sort.INDEXORDER); assertThat(oldWildcardFieldTopDocs.totalHits.value, equalTo(0L)); // Natural circumstance test we revert to the old analyzer for old indices - Query wildcardFieldQuery = wildcardFieldType79.fieldType().wildcardQuery("a b", null, null); + Query wildcardFieldQuery = wildcardFieldType79.field().wildcardQuery("a b", null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); @@ -257,12 +258,12 @@ public void testTooBigQueryField() throws IOException { // Test wildcard query String queryString = randomABString((BooleanQuery.getMaxClauseCount() * 2) + 1); - Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(queryString, null, null); + Query wildcardFieldQuery = wildcardFieldType.field().wildcardQuery(queryString, null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); // Test regexp query - wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); + wildcardFieldQuery = wildcardFieldType.field().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); @@ -299,13 +300,13 @@ public void testTermAndPrefixQueryIgnoreWildcardSyntax() throws IOException { } private void expectTermMatch(IndexSearcher searcher, String term, long count) throws IOException { - Query q = wildcardFieldType.fieldType().termQuery(term, MOCK_CONTEXT); + Query q = wildcardFieldType.field().termQuery(term, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); assertThat(td.totalHits.value, equalTo(count)); } private void expectPrefixMatch(IndexSearcher searcher, String term, long count) throws IOException { - Query q = wildcardFieldType.fieldType().prefixQuery(term, null, MOCK_CONTEXT); + Query q = wildcardFieldType.field().prefixQuery(term, null, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); assertThat(td.totalHits.value, equalTo(count)); } @@ -353,22 +354,20 @@ public void testSearchResultsVersusKeywordField() throws IOException { case 0 -> { pattern = getRandomWildcardPattern(); boolean caseInsensitive = randomBoolean(); - wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(pattern, null, caseInsensitive, MOCK_CONTEXT); - keywordFieldQuery = keywordFieldType.fieldType().wildcardQuery(pattern, null, caseInsensitive, MOCK_CONTEXT); + wildcardFieldQuery = wildcardFieldType.field().wildcardQuery(pattern, null, caseInsensitive, MOCK_CONTEXT); + keywordFieldQuery = keywordFieldType.field().wildcardQuery(pattern, null, caseInsensitive, MOCK_CONTEXT); } case 1 -> { pattern = getRandomRegexPattern(values); int matchFlags = randomBoolean() ? 0 : RegExp.ASCII_CASE_INSENSITIVE; - wildcardFieldQuery = wildcardFieldType.fieldType() - .regexpQuery(pattern, RegExp.ALL, matchFlags, 20000, null, MOCK_CONTEXT); - keywordFieldQuery = keywordFieldType.fieldType() - .regexpQuery(pattern, RegExp.ALL, matchFlags, 20000, null, MOCK_CONTEXT); + wildcardFieldQuery = wildcardFieldType.field().regexpQuery(pattern, RegExp.ALL, matchFlags, 20000, null, MOCK_CONTEXT); + keywordFieldQuery = keywordFieldType.field().regexpQuery(pattern, RegExp.ALL, matchFlags, 20000, null, MOCK_CONTEXT); } case 2 -> { pattern = randomABString(5); boolean caseInsensitivePrefix = randomBoolean(); - wildcardFieldQuery = wildcardFieldType.fieldType().prefixQuery(pattern, null, caseInsensitivePrefix, MOCK_CONTEXT); - keywordFieldQuery = keywordFieldType.fieldType().prefixQuery(pattern, null, caseInsensitivePrefix, MOCK_CONTEXT); + wildcardFieldQuery = wildcardFieldType.field().prefixQuery(pattern, null, caseInsensitivePrefix, MOCK_CONTEXT); + keywordFieldQuery = keywordFieldType.field().prefixQuery(pattern, null, caseInsensitivePrefix, MOCK_CONTEXT); } case 3 -> { int edits = randomInt(2); @@ -385,14 +384,14 @@ public void testSearchResultsVersusKeywordField() throws IOException { // so we opt for one less prefixLength = Math.min(pattern.length() - 1, prefixLength); boolean transpositions = randomBoolean(); - wildcardFieldQuery = wildcardFieldType.fieldType() + wildcardFieldQuery = wildcardFieldType.field() .fuzzyQuery(pattern, fuzziness, prefixLength, 50, transpositions, MOCK_CONTEXT); - keywordFieldQuery = keywordFieldType.fieldType() + keywordFieldQuery = keywordFieldType.field() .fuzzyQuery(pattern, fuzziness, prefixLength, 50, transpositions, MOCK_CONTEXT); } case 4 -> { TermRangeQuery trq = getRandomRange(values); - wildcardFieldQuery = wildcardFieldType.fieldType() + wildcardFieldQuery = wildcardFieldType.field() .rangeQuery( trq.getLowerTerm(), trq.getUpperTerm(), @@ -403,7 +402,7 @@ public void testSearchResultsVersusKeywordField() throws IOException { null, MOCK_CONTEXT ); - keywordFieldQuery = keywordFieldType.fieldType() + keywordFieldQuery = keywordFieldType.field() .rangeQuery( trq.getLowerTerm(), trq.getUpperTerm(), @@ -502,7 +501,7 @@ public void testRangeQueryVersusKeywordField() throws IOException { BytesRef lower = bounds[0] == null ? null : new BytesRef(bounds[0]); BytesRef upper = bounds[1] == null ? null : new BytesRef(bounds[1]); TermRangeQuery trq = new TermRangeQuery(WILDCARD_FIELD_NAME, lower, upper, randomBoolean(), randomBoolean()); - Query wildcardFieldQuery = wildcardFieldType.fieldType() + Query wildcardFieldQuery = wildcardFieldType.field() .rangeQuery( trq.getLowerTerm(), trq.getUpperTerm(), @@ -513,7 +512,7 @@ public void testRangeQueryVersusKeywordField() throws IOException { null, MOCK_CONTEXT ); - Query keywordFieldQuery = keywordFieldType.fieldType() + Query keywordFieldQuery = keywordFieldType.field() .rangeQuery( trq.getLowerTerm(), trq.getUpperTerm(), @@ -547,12 +546,12 @@ public void testRegexAcceleration() throws IOException, ParseException { // All these expressions should rewrite to a match all with no verification step required at all String superfastRegexes[] = { ".*", "(foo|bar|.*)", "@" }; for (String regex : superfastRegexes) { - Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); assertTrue(regex + "should have been accelerated", wildcardFieldQuery instanceof FieldExistsQuery); } String matchNoDocsRegexes[] = { "" }; for (String regex : matchNoDocsRegexes) { - Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); assertTrue(wildcardFieldQuery instanceof MatchNoDocsQuery); } @@ -573,7 +572,7 @@ public void testRegexAcceleration() throws IOException, ParseException { for (String[] test : acceleratedTests) { String regex = test[0]; String expectedAccelerationQueryString = test[1].replaceAll("_", "" + WildcardFieldMapper.TOKEN_START_OR_END_CHAR); - Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); testExpectedAccelerationQuery(regex, wildcardFieldQuery, expectedAccelerationQueryString); } @@ -589,7 +588,7 @@ public void testRegexAcceleration() throws IOException, ParseException { "a*", "...*.." }; for (String regex : matchAllButVerifyTests) { - Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); BinaryDvConfirmedAutomatonQuery q = (BinaryDvConfirmedAutomatonQuery) wildcardFieldQuery; Query approximationQuery = unwrapAnyBoost(q.getApproximationQuery()); approximationQuery = getSimplifiedApproximationQuery(q.getApproximationQuery()); @@ -607,7 +606,7 @@ public void testRegexAcceleration() throws IOException, ParseException { for (String[] test : suboptimalTests) { String regex = test[0]; String expectedAccelerationQueryString = test[1].replaceAll("_", "" + WildcardFieldMapper.TOKEN_START_OR_END_CHAR); - Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); testExpectedAccelerationQuery(regex, wildcardFieldQuery, expectedAccelerationQueryString); } @@ -624,7 +623,7 @@ public void testWildcardAcceleration() throws IOException, ParseException { // All these expressions should rewrite to MatchAll with no verification step required at all String superfastPattern[] = { "*", "**", "*?" }; for (String pattern : superfastPattern) { - Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(pattern, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().wildcardQuery(pattern, null, MOCK_CONTEXT); assertTrue( pattern + " was not a pure match all query " + formatQuery(wildcardFieldQuery), wildcardFieldQuery instanceof FieldExistsQuery @@ -644,7 +643,7 @@ public void testWildcardAcceleration() throws IOException, ParseException { for (String[] test : tests) { String pattern = test[0]; String expectedAccelerationQueryString = test[1].replaceAll("_", "" + WildcardFieldMapper.TOKEN_START_OR_END_CHAR); - Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(pattern, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().wildcardQuery(pattern, null, MOCK_CONTEXT); testExpectedAccelerationQuery(pattern, wildcardFieldQuery, expectedAccelerationQueryString); assertTrue(unwrapAnyConstantScore(wildcardFieldQuery) instanceof BinaryDvConfirmedAutomatonQuery); } @@ -652,7 +651,7 @@ public void testWildcardAcceleration() throws IOException, ParseException { // TODO All these expressions have no acceleration at all and could be improved String slowPatterns[] = { "??" }; for (String pattern : slowPatterns) { - Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(pattern, null, MOCK_CONTEXT); + Query wildcardFieldQuery = wildcardFieldType.field().wildcardQuery(pattern, null, MOCK_CONTEXT); wildcardFieldQuery = unwrapAnyConstantScore(wildcardFieldQuery); BinaryDvConfirmedAutomatonQuery q = (BinaryDvConfirmedAutomatonQuery) wildcardFieldQuery; assertTrue( @@ -741,7 +740,7 @@ static class FuzzyTest { } Query getFuzzyQuery() { - return wildcardFieldType.fieldType().fuzzyQuery(pattern, fuzziness, prefixLength, 50, true, MOCK_CONTEXT); + return wildcardFieldType.field().fuzzyQuery(pattern, fuzziness, prefixLength, 50, true, MOCK_CONTEXT); } Query getExpectedApproxQuery() throws ParseException { @@ -812,7 +811,7 @@ static class RangeTest { } Query getRangeQuery() { - return wildcardFieldType.fieldType().rangeQuery(lower, upper, true, true, null, null, null, MOCK_CONTEXT); + return wildcardFieldType.field().rangeQuery(lower, upper, true, true, null, null, null, MOCK_CONTEXT); } Query getExpectedApproxQuery() throws ParseException { @@ -1060,11 +1059,11 @@ protected String convertToRandomRegex(String randomValue) { return result.toString(); } - protected MappedFieldType provideMappedFieldType(String name) { + protected MappedField provideMappedFieldType(String name) { if (name.equals(WILDCARD_FIELD_NAME)) { - return wildcardFieldType.fieldType(); + return wildcardFieldType.field(); } else { - return keywordFieldType.fieldType(); + return keywordFieldType.field(); } } @@ -1075,7 +1074,7 @@ protected final SearchExecutionContext createMockContext() { Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build() ); BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, Mockito.mock(BitsetFilterCache.Listener.class)); - TriFunction, IndexFieldData> indexFieldDataLookup = ( + TriFunction, IndexFieldData> indexFieldDataLookup = ( fieldType, fieldIndexName, searchLookup) -> { @@ -1104,7 +1103,7 @@ protected final SearchExecutionContext createMockContext() { emptyMap() ) { @Override - public MappedFieldType getFieldType(String name) { + public MappedField getMappedField(String name) { return provideMappedFieldType(name); } diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java index c7724e58ee181..c6ec5a412f2d3 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.Version; import org.elasticsearch.index.mapper.FieldTypeTestCase; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedField; import org.elasticsearch.index.mapper.MapperBuilderContext; import java.io.IOException; @@ -18,21 +18,21 @@ public class WildcardFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new WildcardFieldMapper.Builder("field", Version.CURRENT).build(MapperBuilderContext.ROOT).fieldType(); + MappedField mapper = new WildcardFieldMapper.Builder("field", Version.CURRENT).build(MapperBuilderContext.ROOT).field(); assertEquals(List.of("value"), fetchSourceValue(mapper, "value")); assertEquals(List.of("42"), fetchSourceValue(mapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(mapper, true)); - MappedFieldType ignoreAboveMapper = new WildcardFieldMapper.Builder("field", Version.CURRENT).ignoreAbove(4) + MappedField ignoreAboveMapper = new WildcardFieldMapper.Builder("field", Version.CURRENT).ignoreAbove(4) .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); assertEquals(List.of(), fetchSourceValue(ignoreAboveMapper, "value")); assertEquals(List.of("42"), fetchSourceValue(ignoreAboveMapper, 42L)); assertEquals(List.of("true"), fetchSourceValue(ignoreAboveMapper, true)); - MappedFieldType nullValueMapper = new WildcardFieldMapper.Builder("field", Version.CURRENT).nullValue("NULL") + MappedField nullValueMapper = new WildcardFieldMapper.Builder("field", Version.CURRENT).nullValue("NULL") .build(MapperBuilderContext.ROOT) - .fieldType(); + .field(); assertEquals(List.of("NULL"), fetchSourceValue(nullValueMapper, null)); } }