Skip to content

Commit

Permalink
Remove unnecessary references to index version. (#65402)
Browse files Browse the repository at this point in the history
* Remove unnecessary references in mapping code.
* Remove version from QueryAnalyzer#analyze.
  • Loading branch information
jtibshirani committed Nov 24, 2020
1 parent d088171 commit af2f084
Show file tree
Hide file tree
Showing 15 changed files with 142 additions and 156 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -395,8 +395,7 @@ void processQuery(Query query, ParseContext context) {
ParseContext.Document doc = context.doc();
PercolatorFieldType pft = (PercolatorFieldType) this.fieldType();
QueryAnalyzer.Result result;
Version indexVersion = context.indexSettings().getIndexVersionCreated();
result = QueryAnalyzer.analyze(query, indexVersion);
result = QueryAnalyzer.analyze(query);
if (result == QueryAnalyzer.Result.UNKNOWN) {
doc.add(new Field(pft.extractionResultField.name(), EXTRACTION_FAILED, INDEXED_KEYWORD));
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.index.query.DateRangeIncludingNowQuery;

Expand Down Expand Up @@ -84,9 +83,8 @@ private QueryAnalyzer() {
* this query in such a way that the PercolatorQuery always verifies if this query with the MemoryIndex.
*
* @param query The query to analyze.
* @param indexVersion The create version of the index containing the percolator queries.
*/
static Result analyze(Query query, Version indexVersion) {
static Result analyze(Query query) {
ResultBuilder builder = new ResultBuilder(false);
query.visit(builder);
return builder.getResult();
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
* <li>"min_input_length": 50 (default)</li>
* <li>"contexts" : CONTEXTS</li>
* </ul>
* see {@link ContextMappings#load(Object, Version)} for CONTEXTS<br>
* see {@link ContextMappings#load(Object)} for CONTEXTS<br>
* see {@link #parse(ParseContext)} for acceptable inputs for indexing<br>
* <p>
* This field type constructs completion queries that are run
Expand Down Expand Up @@ -128,7 +128,7 @@ public static class Builder extends FieldMapper.Builder {
m -> builder(m).preservePosInc.get(), Defaults.DEFAULT_POSITION_INCREMENTS)
.alwaysSerialize();
private final Parameter<ContextMappings> contexts = new Parameter<>("contexts", false, () -> null,
(n, c, o) -> ContextMappings.load(o, c.indexVersionCreated()), m -> builder(m).contexts.get())
(n, c, o) -> ContextMappings.load(o), m -> builder(m).contexts.get())
.setSerializer((b, n, c) -> {
if (c == null) {
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ public Builder put(MetadataFieldMapper.Builder mapper) {
public DocumentMapper build() {
Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
Mapping mapping = new Mapping(
indexSettings.getIndexVersionCreated(),
rootObjectMapper,
metadataMappers.values().toArray(new MetadataFieldMapper[0]),
meta);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentHelper;
Expand Down Expand Up @@ -110,7 +109,7 @@ private DocumentMapper parse(String type, Map<String, Object> mapping) throws Ma
Map<String, Object> fieldNodeMap = (Map<String, Object>) fieldNode;
docBuilder.put(typeParser.parse(fieldName, fieldNodeMap, parserContext));
fieldNodeMap.remove("type");
checkNoRemainingFields(fieldName, fieldNodeMap, parserContext.indexVersionCreated());
checkNoRemainingFields(fieldName, fieldNodeMap);
}
}

Expand All @@ -133,17 +132,16 @@ private DocumentMapper parse(String type, Map<String, Object> mapping) throws Ma
docBuilder.meta(Collections.unmodifiableMap(new HashMap<>(meta)));
}

checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");
checkNoRemainingFields(mapping, "Root mapping definition has unsupported parameters: ");

return docBuilder.build();
}

public static void checkNoRemainingFields(String fieldName, Map<?, ?> fieldNodeMap, Version indexVersionCreated) {
checkNoRemainingFields(fieldNodeMap, indexVersionCreated,
"Mapping definition for [" + fieldName + "] has unsupported parameters: ");
public static void checkNoRemainingFields(String fieldName, Map<?, ?> fieldNodeMap) {
checkNoRemainingFields(fieldNodeMap, "Mapping definition for [" + fieldName + "] has unsupported parameters: ");
}

public static void checkNoRemainingFields(Map<?, ?> fieldNodeMap, Version indexVersionCreated, String message) {
public static void checkNoRemainingFields(Map<?, ?> fieldNodeMap, String message) {
if (!fieldNodeMap.isEmpty()) {
throw new MapperParsingException(message + getRemainingFields(fieldNodeMap));
}
Expand Down
10 changes: 3 additions & 7 deletions server/src/main/java/org/elasticsearch/index/mapper/Mapping.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment;
Expand All @@ -44,16 +43,13 @@
*/
public final class Mapping implements ToXContentFragment {

final Version indexCreated;
final RootObjectMapper root;
final MetadataFieldMapper[] metadataMappers;
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap;
final Map<String, MetadataFieldMapper> metadataMappersByName;
final Map<String, Object> meta;

public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper,
MetadataFieldMapper[] metadataMappers, Map<String, Object> meta) {
this.indexCreated = indexCreated;
public Mapping(RootObjectMapper rootObjectMapper, MetadataFieldMapper[] metadataMappers, Map<String, Object> meta) {
this.metadataMappers = metadataMappers;
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>();
Map<String, MetadataFieldMapper> metadataMappersByName = new HashMap<>();
Expand Down Expand Up @@ -90,7 +86,7 @@ public void validate(MappingLookup mappers) {
* Generate a mapping update for the given root object mapper.
*/
public Mapping mappingUpdate(Mapper rootObjectMapper) {
return new Mapping(indexCreated, (RootObjectMapper) rootObjectMapper, metadataMappers, meta);
return new Mapping((RootObjectMapper) rootObjectMapper, metadataMappers, meta);
}

/** Get the root mapper with the given class. */
Expand Down Expand Up @@ -137,7 +133,7 @@ public Mapping merge(Mapping mergeWith, MergeReason reason) {
XContentHelper.mergeDefaults(mergedMeta, meta);
}

return new Mapping(indexCreated, mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta);
return new Mapping(mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta);
}

public MetadataFieldMapper getMetadataMapper(String mapperName) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ protected static void parseProperties(ObjectMapper.Builder objBuilder, Map<Strin
}
objBuilder.add(fieldBuilder);
propNode.remove("type");
DocumentMapperParser.checkNoRemainingFields(fieldName, propNode, parserContext.indexVersionCreated());
DocumentMapperParser.checkNoRemainingFields(fieldName, propNode);
iterator.remove();
} else if (isEmptyList) {
iterator.remove();
Expand All @@ -331,8 +331,7 @@ protected static void parseProperties(ObjectMapper.Builder objBuilder, Map<Strin
}
}

DocumentMapperParser.checkNoRemainingFields(propsNode, parserContext.indexVersionCreated(),
"DocType mapping definition has unsupported parameters: ");
DocumentMapperParser.checkNoRemainingFields(propsNode, "DocType mapping definition has unsupported parameters: ");

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ public static void parseRuntimeFields(Map<String, Object> node,
}
runtimeFieldTypeConsumer.accept(typeParser.parse(fieldName, propNode, parserContext));
propNode.remove("type");
DocumentMapperParser.checkNoRemainingFields(fieldName, propNode, parserContext.indexVersionCreated());
DocumentMapperParser.checkNoRemainingFields(fieldName, propNode);
iterator.remove();
} else {
throw new MapperParsingException("Expected map for runtime field [" + fieldName + "] definition but got a "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ private static PrefixConfig parsePrefixConfig(String propName, ParserContext par
Defaults.INDEX_PREFIX_MIN_CHARS);
int maxChars = XContentMapValues.nodeIntegerValue(indexPrefix.remove("max_chars"),
Defaults.INDEX_PREFIX_MAX_CHARS);
DocumentMapperParser.checkNoRemainingFields(propName, indexPrefix, parserContext.indexVersionCreated());
DocumentMapperParser.checkNoRemainingFields(propName, indexPrefix);
return new PrefixConfig(minChars, maxChars);
}

Expand Down Expand Up @@ -232,7 +232,7 @@ private static FielddataFrequencyFilter parseFrequencyFilter(String name, Parser
double minFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("min"), 0);
double maxFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("max"), Integer.MAX_VALUE);
int minSegmentSize = XContentMapValues.nodeIntegerValue(frequencyFilter.remove("min_segment_size"), 0);
DocumentMapperParser.checkNoRemainingFields(name, frequencyFilter, parserContext.indexVersionCreated());
DocumentMapperParser.checkNoRemainingFields(name, frequencyFilter);
return new FielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ public static boolean parseMultiField(Consumer<FieldMapper.Builder> multiFieldsB
FieldMapper.TypeParser fieldTypeParser = (FieldMapper.TypeParser) typeParser;
multiFieldsBuilder.accept(fieldTypeParser.parse(multiFieldName, multiFieldNodes, parserContext));
multiFieldNodes.remove("type");
DocumentMapperParser.checkNoRemainingFields(propName, multiFieldNodes, parserContext.indexVersionCreated());
DocumentMapperParser.checkNoRemainingFields(propName, multiFieldNodes);
}
return true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
Expand Down Expand Up @@ -78,7 +77,7 @@ public String getFieldName() {
/**
* Loads a <code>name</code>d {@link CategoryContextMapping} instance
* from a map.
* see {@link ContextMappings#load(Object, Version)}
* see {@link ContextMappings#load(Object)}
*
* Acceptable map param: <code>path</code>
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
Expand Down Expand Up @@ -224,26 +223,26 @@ public Map<String, Set<String>> getNamedContexts(List<CharSequence> contexts) {
* [{"name": .., "type": .., ..}, {..}]
*
*/
public static ContextMappings load(Object configuration, Version indexVersionCreated) throws ElasticsearchParseException {
public static ContextMappings load(Object configuration) throws ElasticsearchParseException {
final List<ContextMapping<?>> contextMappings;
if (configuration instanceof List) {
contextMappings = new ArrayList<>();
List<Object> configurations = (List<Object>) configuration;
for (Object contextConfig : configurations) {
contextMappings.add(load((Map<String, Object>) contextConfig, indexVersionCreated));
contextMappings.add(load((Map<String, Object>) contextConfig));
}
if (contextMappings.size() == 0) {
throw new ElasticsearchParseException("expected at least one context mapping");
}
} else if (configuration instanceof Map) {
contextMappings = Collections.singletonList(load(((Map<String, Object>) configuration), indexVersionCreated));
contextMappings = Collections.singletonList(load(((Map<String, Object>) configuration)));
} else {
throw new ElasticsearchParseException("expected a list or an entry of context mapping");
}
return new ContextMappings(contextMappings);
}

private static ContextMapping<?> load(Map<String, Object> contextConfig, Version indexVersionCreated) {
private static ContextMapping<?> load(Map<String, Object> contextConfig) {
String name = extractRequiredValue(contextConfig, FIELD_NAME);
String type = extractRequiredValue(contextConfig, FIELD_TYPE);
final ContextMapping<?> contextMapping;
Expand All @@ -257,7 +256,7 @@ private static ContextMapping<?> load(Map<String, Object> contextConfig, Version
default:
throw new ElasticsearchParseException("unknown context type[" + type + "]");
}
DocumentMapperParser.checkNoRemainingFields(name, contextConfig, indexVersionCreated);
DocumentMapperParser.checkNoRemainingFields(name, contextConfig);
return contextMapping;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception {
new BulkShardRequest(shardId, RefreshPolicy.NONE, items);

Engine.IndexResult mappingUpdate =
new Engine.IndexResult(new Mapping(null, mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap()));
new Engine.IndexResult(new Mapping(mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap()));
Translog.Location resultLocation = new Translog.Location(42, 42, 42);
Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation);

Expand Down Expand Up @@ -771,7 +771,7 @@ public void testRetries() throws Exception {
"I'm conflicted <(;_;)>");
Engine.IndexResult conflictedResult = new Engine.IndexResult(err, 0);
Engine.IndexResult mappingUpdate =
new Engine.IndexResult(new Mapping(null, mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap()));
new Engine.IndexResult(new Mapping(mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap()));
Translog.Location resultLocation = new Translog.Location(42, 42, 42);
Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation);

Expand Down Expand Up @@ -851,7 +851,7 @@ public void testForceExecutionOnRejectionAfterMappingUpdate() throws Exception {
BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items);

Engine.IndexResult mappingUpdate =
new Engine.IndexResult(new Mapping(null, mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap()));
new Engine.IndexResult(new Mapping(mock(RootObjectMapper.class), new MetadataFieldMapper[0], Collections.emptyMap()));
Translog.Location resultLocation1 = new Translog.Location(42, 36, 36);
Translog.Location resultLocation2 = new Translog.Location(42, 42, 42);
Engine.IndexResult success1 = new FakeIndexResult(1, 1, 10, true, resultLocation1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ public void testSendUpdateMappingUsingAutoPutMappingAction() {
mua.setClient(client);

RootObjectMapper rootObjectMapper = new RootObjectMapper.Builder("name", Version.CURRENT).build(new ContentPath());
Mapping update = new Mapping(Version.V_7_9_0, rootObjectMapper, new MetadataFieldMapper[0], Map.of());
Mapping update = new Mapping(rootObjectMapper, new MetadataFieldMapper[0], Map.of());

mua.sendUpdateMapping(new Index("name", "uuid"), update, ActionListener.wrap(() -> {}));
verify(indicesAdminClient).execute(eq(AutoPutMappingAction.INSTANCE), any(), any());
Expand Down

0 comments on commit af2f084

Please sign in to comment.