Skip to content

Commit

Permalink
Merge branch '6.x' into ccr-6.x
Browse files Browse the repository at this point in the history
* 6.x:
  Lazy initialize checkpoint tracker bit sets
  Remove checkpoint tracker bit sets setting
  Tests: Fix FullClusterRestartIT.testSnapshotRestore test failing in 6.x (#27218)
  Fix FullClusterRestartIT using lenient booleans with 6.0
  Fix stable BWC branch detection logic
  Add version 6.0.0
  Fix logic detecting unreleased versions
  Skips exists query tests on unsupported versions
  Enhances exists queries to reduce need for `_field_names` (#26930)
  Added new terms_set query
  Set request body to required to reflect the code base (#27188)
  Update Docker docs for 6.0.0-rc2 (#27166)
  Docs: restore now fails if it encounters incompatible settings (#26933)
  Convert index blocks to cluster block exceptions (#27050)
  [DOCS] Link remote info API in Cross Cluster Search docs page
  prevent duplicate fields when mixing parent and root nested includes (#27072)
  TopHitsAggregator must propagate calls to `setScorer`. (#27138)
  • Loading branch information
jasontedor committed Nov 2, 2017
2 parents 57ece57 + 8d3ffa2 commit 0ab744e
Show file tree
Hide file tree
Showing 77 changed files with 3,025 additions and 339 deletions.
5 changes: 5 additions & 0 deletions core/src/main/java/org/elasticsearch/Version.java
Expand Up @@ -114,6 +114,9 @@ public class Version implements Comparable<Version> {
public static final int V_6_0_0_rc2_ID = 6000052;
public static final Version V_6_0_0_rc2 =
new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final int V_6_0_0_ID = 6000099;
public static final Version V_6_0_0 =
new Version(V_6_0_0_ID, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final int V_6_1_0_ID = 6010099;
public static final Version V_6_1_0 = new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final Version CURRENT = V_6_1_0;
Expand All @@ -131,6 +134,8 @@ public static Version fromId(int id) {
switch (id) {
case V_6_1_0_ID:
return V_6_1_0;
case V_6_0_0_ID:
return V_6_0_0;
case V_6_0_0_rc2_ID:
return V_6_0_0_rc2;
case V_6_0_0_rc1_ID:
Expand Down
Expand Up @@ -62,7 +62,7 @@ protected DeleteIndexTemplateResponse newResponse() {

@Override
protected ClusterBlockException checkBlock(DeleteIndexTemplateRequest request, ClusterState state) {
return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA_WRITE, "");
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}

@Override
Expand Down
Expand Up @@ -66,7 +66,7 @@ protected PutIndexTemplateResponse newResponse() {

@Override
protected ClusterBlockException checkBlock(PutIndexTemplateRequest request, ClusterState state) {
return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA_WRITE, "");
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}

@Override
Expand Down
Expand Up @@ -120,7 +120,6 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
IndexSettings.QUERY_STRING_LENIENT_SETTING,
IndexSettings.ALLOW_UNMAPPED,
IndexSettings.INDEX_CHECK_ON_STARTUP,
LocalCheckpointTracker.SETTINGS_BIT_ARRAYS_SIZE,
IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD,
IndexSettings.MAX_SLICES_PER_SCROLL,
ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING,
Expand Down
Expand Up @@ -23,6 +23,7 @@
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
Expand Down Expand Up @@ -211,6 +212,11 @@ public Query queryStringTermQuery(Term term) {
public Query termQuery(Object value, QueryShardContext context) {
return queryStringTermQuery(new Term(name(), indexedValueForSearch(value)));
}

@Override
public Query existsQuery(QueryShardContext context) {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

private EnabledAttributeMapper enabledState;
Expand Down
Expand Up @@ -20,10 +20,14 @@
package org.elasticsearch.index.mapper;

import com.carrotsearch.hppc.ObjectArrayList;

import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
Expand Down Expand Up @@ -126,6 +130,15 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
return new BytesBinaryDVIndexFieldData.Builder();
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Binary fields do not support searching");
Expand Down Expand Up @@ -165,6 +178,11 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
} else {
field.add(value);
}
} else {
// Only add an entry to the field names field if the field is stored
// but has no doc values so exists query will work on a field with
// no doc values
createFieldNamesField(context, fields);
}

}
Expand Down
Expand Up @@ -23,7 +23,10 @@
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
Expand Down Expand Up @@ -136,6 +139,15 @@ public String typeName() {
return CONTENT_TYPE;
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Boolean nullValue() {
return (Boolean)super.nullValue();
Expand Down Expand Up @@ -253,6 +265,8 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
}
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0));
} else {
createFieldNamesField(context, fields);
}
}

Expand Down
Expand Up @@ -21,6 +21,8 @@
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.suggest.document.Completion50PostingsFormat;
import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
import org.apache.lucene.search.suggest.document.CompletionQuery;
Expand All @@ -40,11 +42,13 @@
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
Expand Down Expand Up @@ -257,6 +261,11 @@ public static synchronized PostingsFormat postingsFormat() {
return postingsFormat;
}

@Override
public Query existsQuery(QueryShardContext context) {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}

/**
* Completion prefix query
*/
Expand Down Expand Up @@ -456,6 +465,11 @@ public Mapper parse(ParseContext context) throws IOException {
context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight));
}
}
List<IndexableField> fields = new ArrayList<>(1);
createFieldNamesField(context, fields);
for (IndexableField field : fields) {
context.doc().add(field);
}
multiFields.parse(this, context);
return null;
}
Expand Down
Expand Up @@ -26,9 +26,12 @@
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
Expand Down Expand Up @@ -245,6 +248,15 @@ long parse(String value) {
return dateTimeFormatter().parser().parseMillis(value);
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
Query query = rangeQuery(value, value, true, true, ShapeRelation.INTERSECTS, null, null, context);
Expand Down Expand Up @@ -459,6 +471,8 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
}
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().name(), timestamp));
} else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) {
createFieldNamesField(context, fields);
}
if (fieldType().stored()) {
fields.add(new StoredField(fieldType().name(), timestamp));
Expand Down
12 changes: 12 additions & 0 deletions core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java
Expand Up @@ -22,6 +22,7 @@
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;

import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
Expand All @@ -33,6 +34,7 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper.FieldNamesFieldType;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;

Expand Down Expand Up @@ -308,6 +310,16 @@ public Mapper parse(ParseContext context) throws IOException {
*/
protected abstract void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException;

protected void createFieldNamesField(ParseContext context, List<IndexableField> fields) {
FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.docMapper()
.metadataMapper(FieldNamesFieldMapper.class).fieldType();
if (fieldNamesFieldType != null && fieldNamesFieldType.isEnabled()) {
for (String fieldName : FieldNamesFieldMapper.extractFieldNames(fieldType().name())) {
fields.add(new Field(FieldNamesFieldMapper.NAME, fieldName, fieldNamesFieldType));
}
}
}

@Override
public Iterator<Mapper> iterator() {
return multiFields.iterator();
Expand Down
Expand Up @@ -23,6 +23,10 @@
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand All @@ -44,6 +48,9 @@
*/
public class FieldNamesFieldMapper extends MetadataFieldMapper {

private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(
ESLoggerFactory.getLogger(FieldNamesFieldMapper.class));

public static final String NAME = "_field_names";

public static final String CONTENT_TYPE = "_field_names";
Expand Down Expand Up @@ -178,11 +185,18 @@ public boolean isEnabled() {
return enabled;
}

@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on _field_names");
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
if (isEnabled() == false) {
throw new IllegalStateException("Cannot run [exists] queries if the [_field_names] field is disabled");
}
DEPRECATION_LOGGER.deprecated(
"terms query on the _field_names field is deprecated and will be removed, use exists query instead");
return super.termQuery(value, context);
}
}
Expand All @@ -206,12 +220,14 @@ public void preParse(ParseContext context) throws IOException {

@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
if (context.indexSettings().getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).before(Version.V_6_1_0)) {
super.parse(context);
}
}

@Override
public Mapper parse(ParseContext context) throws IOException {
// we parse in post parse
// Adding values to the _field_names field is handled by the mappers for each field type
return null;
}

Expand Down
Expand Up @@ -23,7 +23,10 @@
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoPoint;
Expand All @@ -37,6 +40,7 @@
import org.elasticsearch.index.query.QueryShardException;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -180,6 +184,15 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
return new AbstractLatLonPointDVIndexFieldData.Builder();
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead: ["
Expand Down Expand Up @@ -209,6 +222,12 @@ protected void parse(ParseContext originalContext, GeoPoint point) throws IOExce
}
if (fieldType.hasDocValues()) {
context.doc().add(new LatLonDocValuesField(fieldType().name(), point.lat(), point.lon()));
} else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) {
List<IndexableField> fields = new ArrayList<>(1);
createFieldNamesField(context, fields);
for (IndexableField field : fields) {
context.doc().add(field);
}
}
// if the mapping contains multifields then use the geohash string
if (multiFields.iterator().hasNext()) {
Expand Down

0 comments on commit 0ab744e

Please sign in to comment.