diff --git a/server/src/main/java/org/apache/lucene/index/ShuffleForcedMergePolicy.java b/server/src/main/java/org/apache/lucene/index/ShuffleForcedMergePolicy.java new file mode 100644 index 0000000000000..711a52c690c56 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/index/ShuffleForcedMergePolicy.java @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.index; + +import org.elasticsearch.common.lucene.Lucene; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * A {@link FilterMergePolicy} that interleaves eldest and newest segments picked by {@link MergePolicy#findForcedMerges} + * and {@link MergePolicy#findForcedDeletesMerges}. This allows time-based indices, that usually have the eldest documents + * first, to be efficient at finding the most recent documents too. + */ +public class ShuffleForcedMergePolicy extends FilterMergePolicy { + private static final String SHUFFLE_MERGE_KEY = "es.shuffle_merge"; + + public ShuffleForcedMergePolicy(MergePolicy in) { + super(in); + } + + /** + * Return true if the provided reader was merged with interleaved segments. + */ + public static boolean isInterleavedSegment(LeafReader reader) { + SegmentReader segReader = Lucene.segmentReader(reader); + return segReader.getSegmentInfo().info.getDiagnostics().containsKey(SHUFFLE_MERGE_KEY); + } + + + @Override + public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException { + return wrap(in.findForcedDeletesMerges(segmentInfos, mergeContext)); + } + + @Override + public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, + Map segmentsToMerge, + MergeContext mergeContext) throws IOException { + return wrap(in.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge, mergeContext)); + } + + private MergeSpecification wrap(MergeSpecification mergeSpec) throws IOException { + if (mergeSpec == null) { + return null; + } + MergeSpecification newMergeSpec = new MergeSpecification(); + for (OneMerge toWrap : mergeSpec.merges) { + List newInfos = interleaveList(new ArrayList<>(toWrap.segments)); + newMergeSpec.add(new OneMerge(newInfos) { + @Override + public CodecReader wrapForMerge(CodecReader reader) throws IOException { + return toWrap.wrapForMerge(reader); + } + + @Override + public void setMergeInfo(SegmentCommitInfo info) { + // Record that this merged segment is current as of this schemaGen: + Map copy = new HashMap<>(info.info.getDiagnostics()); + copy.put(SHUFFLE_MERGE_KEY, ""); + info.info.setDiagnostics(copy); + super.setMergeInfo(info); + } + }); + } + + return newMergeSpec; + } + + // Return a new list that sort segments of the original one by name (older first) + // and then interleave them to colocate oldest and most recent segments together. + private List interleaveList(List infos) throws IOException { + List newInfos = new ArrayList<>(infos.size()); + Collections.sort(infos, Comparator.comparing(a -> a.info.name)); + int left = 0; + int right = infos.size() - 1; + while (left <= right) { + SegmentCommitInfo leftInfo = infos.get(left); + if (left == right) { + newInfos.add(infos.get(left)); + } else { + SegmentCommitInfo rightInfo = infos.get(right); + // smaller segment first + if (leftInfo.sizeInBytes() < rightInfo.sizeInBytes()) { + newInfos.add(leftInfo); + newInfos.add(rightInfo); + } else { + newInfos.add(rightInfo); + newInfos.add(leftInfo); + } + } + left ++; + right --; + } + return newInfos; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 8b4bc32dc74bc..6b3c264185a9c 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -36,6 +36,7 @@ import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.ShuffleForcedMergePolicy; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; @@ -56,6 +57,7 @@ import org.elasticsearch.Assertions; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.lease.Releasable; @@ -2226,6 +2228,13 @@ private IndexWriterConfig getIndexWriterConfig() { new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD, softDeletesPolicy::getRetentionQuery, new PrunePostingsMergePolicy(mergePolicy, IdFieldMapper.NAME))); } + boolean shuffleForcedMerge = Booleans.parseBoolean(System.getProperty("es.shuffle_forced_merge", Boolean.TRUE.toString())); + if (shuffleForcedMerge) { + // We wrap the merge policy for all indices even though it is mostly useful for time-based indices + // but there should be no overhead for other type of indices so it's simpler than adding a setting + // to enable it. + mergePolicy = new ShuffleForcedMergePolicy(mergePolicy); + } iwc.setMergePolicy(new ElasticsearchMergePolicy(mergePolicy)); iwc.setSimilarity(engineConfig.getSimilarity()); iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().getMbFrac()); diff --git a/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java index 0905b3147dcff..11696fef817c5 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java @@ -61,6 +61,11 @@ public ElasticsearchMergePolicy(MergePolicy delegate) { super(delegate); } + /** return the wrapped merge policy */ + public MergePolicy getDelegate() { + return in; + } + private boolean shouldUpgrade(SegmentCommitInfo info) { org.apache.lucene.util.Version old = info.info.getVersion(); org.apache.lucene.util.Version cur = Version.CURRENT.luceneVersion; diff --git a/server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java b/server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java new file mode 100644 index 0000000000000..2978e27445483 --- /dev/null +++ b/server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.index; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; + +import java.io.IOException; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class ShuffleForcedMergePolicyTests extends BaseMergePolicyTestCase { + public void testDiagnostics() throws IOException { + try (Directory dir = newDirectory()) { + IndexWriterConfig iwc = newIndexWriterConfig(); + MergePolicy mp = new ShuffleForcedMergePolicy(newLogMergePolicy()); + iwc.setMergePolicy(mp); + boolean sorted = random().nextBoolean(); + if (sorted) { + iwc.setIndexSort(new Sort(new SortField("sort", SortField.Type.INT))); + } + int numDocs = atLeast(100); + + try (IndexWriter writer = new IndexWriter(dir, iwc)) { + for (int i = 0; i < numDocs; i++) { + if (i % 10 == 0) { + writer.flush(); + } + Document doc = new Document(); + doc.add(new StringField("id", "" + i, Field.Store.NO)); + doc.add(new NumericDocValuesField("sort", random().nextInt())); + writer.addDocument(doc); + } + try (DirectoryReader reader = DirectoryReader.open(writer)) { + assertThat(reader.leaves().size(), greaterThan(2)); + assertSegmentReaders(reader, leaf -> { + assertFalse(ShuffleForcedMergePolicy.isInterleavedSegment(leaf)); + }); + } + writer.forceMerge(1); + try (DirectoryReader reader = DirectoryReader.open(writer)) { + assertThat(reader.leaves().size(), equalTo(1)); + assertSegmentReaders(reader, leaf -> { + assertTrue(ShuffleForcedMergePolicy.isInterleavedSegment(leaf)); + }); + } + } + } + } + + private void assertSegmentReaders(DirectoryReader reader, Consumer checkLeaf) { + for (LeafReaderContext leaf : reader.leaves()) { + checkLeaf.accept(leaf.reader()); + } + } + + @Override + protected MergePolicy mergePolicy() { + return new ShuffleForcedMergePolicy(newLogMergePolicy()); + } + + @Override + protected void assertSegmentInfos(MergePolicy policy, SegmentInfos infos) throws IOException {} + + @Override + protected void assertMerge(MergePolicy policy, MergePolicy.MergeSpecification merge) throws IOException {} +} diff --git a/server/src/test/java/org/elasticsearch/index/engine/PrunePostingsMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/PrunePostingsMergePolicyTests.java index 9fb9445181327..eb15b64ae1a76 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/PrunePostingsMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/PrunePostingsMergePolicyTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.ShuffleForcedMergePolicy; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; @@ -51,7 +52,7 @@ public void testPrune() throws IOException { iwc.setSoftDeletesField("_soft_deletes"); MergePolicy mp = new SoftDeletesRetentionMergePolicy("_soft_deletes", MatchAllDocsQuery::new, new PrunePostingsMergePolicy(newLogMergePolicy(), "id")); - iwc.setMergePolicy(mp); + iwc.setMergePolicy(new ShuffleForcedMergePolicy(mp)); boolean sorted = randomBoolean(); if (sorted) { iwc.setIndexSort(new Sort(new SortField("sort", SortField.Type.INT))); diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index ef895e1a4ce8e..0017c2c6e3300 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.ShuffleForcedMergePolicy; import org.apache.lucene.index.StandardDirectoryReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; @@ -57,7 +58,7 @@ public void testPruneAll() throws IOException { IndexWriterConfig iwc = newIndexWriterConfig(); RecoverySourcePruneMergePolicy mp = new RecoverySourcePruneMergePolicy("extra_source", MatchNoDocsQuery::new, newLogMergePolicy()); - iwc.setMergePolicy(mp); + iwc.setMergePolicy(new ShuffleForcedMergePolicy(mp)); try (IndexWriter writer = new IndexWriter(dir, iwc)) { for (int i = 0; i < 20; i++) { if (i > 0 && randomBoolean()) {