Skip to content

Commit

Permalink
Add a new merge policy that interleaves old and new segments on force…
Browse files Browse the repository at this point in the history
… merge

This change adds a new merge policy that interleaves eldest and newest segments picked by MergePolicy#findForcedMerges
and MergePolicy#findForcedDeletesMerges. This allows time-based indices, that usually have the eldest documents
first, to be efficient at finding the most recent documents too. Although we wrap this merge policy for all indices
even though it is mostly useful for time-based but there should be no overhead for other type of indices so it's simpler
than adding a setting to enable it. This change is needed in order to ensure that the optimizations that we are working
on in # remain efficient even after running a force merge.

Relates elastic#37043
  • Loading branch information
jimczi committed Oct 25, 2019
1 parent 18d87e3 commit 132579b
Show file tree
Hide file tree
Showing 5 changed files with 219 additions and 2 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.lucene.index;

import org.elasticsearch.common.lucene.Lucene;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
* A {@link FilterMergePolicy} that interleaves eldest and newest segments picked by {@link MergePolicy#findForcedMerges}
* and {@link MergePolicy#findForcedDeletesMerges}. This allows time-based indices, that usually have the eldest documents
* first, to be efficient at finding the most recent documents too.
*/
public class ShuflleForcedMergePolicy extends FilterMergePolicy {
private static final String SHUFFLE_MERGE_KEY = "es.shuffle_merge";

public ShuflleForcedMergePolicy(MergePolicy in) {
super(in);
}

/**
* Return <code>true</code> if the provided reader was merged with interleaved segments.
*/
public static boolean isInterleavedSegment(LeafReader reader) {
SegmentReader segReader = Lucene.segmentReader(reader);
return segReader.getSegmentInfo().info.getDiagnostics().containsKey(SHUFFLE_MERGE_KEY);
}


@Override
public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException {
return wrap(in.findForcedDeletesMerges(segmentInfos, mergeContext));
}

@Override
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount,
Map<SegmentCommitInfo, Boolean> segmentsToMerge,
MergeContext mergeContext) throws IOException {
return wrap(in.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge, mergeContext));
}

private MergeSpecification wrap(MergeSpecification mergeSpec) throws IOException {
if (mergeSpec == null) {
return null;
}
MergeSpecification newMergeSpec = new MergeSpecification();
for (OneMerge toWrap : mergeSpec.merges) {
List<SegmentCommitInfo> newInfos = interleaveList(toWrap.segments);
newMergeSpec.add(new OneMerge(newInfos) {
@Override
public CodecReader wrapForMerge(CodecReader reader) throws IOException {
return toWrap.wrapForMerge(reader);
}

@Override
public void setMergeInfo(SegmentCommitInfo info) {
// Record that this merged segment is current as of this schemaGen:
Map<String, String> copy = new HashMap<>(info.info.getDiagnostics());
copy.put(SHUFFLE_MERGE_KEY, "");
info.info.setDiagnostics(copy);
super.setMergeInfo(info);
}
});
}

return newMergeSpec;
}

// Return a new list that sort segments of the original one by name (older first)
// and then interleave them to colocate oldest and most recent segments together.
private List<SegmentCommitInfo> interleaveList(List<SegmentCommitInfo> infos) throws IOException {
List<SegmentCommitInfo> newInfos = new ArrayList<>(infos.size());
Collections.sort(infos, Comparator.comparing(a -> a.info.name));
int left = 0;
int right = infos.size() - 1;
while (left <= right) {
SegmentCommitInfo leftInfo = infos.get(left);
if (left == right) {
newInfos.add(infos.get(left));
} else {
SegmentCommitInfo rightInfo = infos.get(right);
// smaller segment first
if (leftInfo.sizeInBytes() < rightInfo.sizeInBytes()) {
newInfos.add(leftInfo);
newInfos.add(rightInfo);
} else {
newInfos.add(rightInfo);
newInfos.add(leftInfo);
}
}
left ++;
right --;
}
return newInfos;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.ShuflleForcedMergePolicy;
import org.apache.lucene.index.SoftDeletesRetentionMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
Expand Down Expand Up @@ -2224,6 +2225,10 @@ private IndexWriterConfig getIndexWriterConfig() {
new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD, softDeletesPolicy::getRetentionQuery,
new PrunePostingsMergePolicy(mergePolicy, IdFieldMapper.NAME)));
}
// We wrap the merge policy for all indices even though it is mostly useful for time-based indices
// but there should be no overhead for other type of indices so it's simpler than adding a setting
// to enable it.
mergePolicy = new ShuflleForcedMergePolicy(mergePolicy);
iwc.setMergePolicy(new ElasticsearchMergePolicy(mergePolicy));
iwc.setSimilarity(engineConfig.getSimilarity());
iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().getMbFrac());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.lucene.index;

import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.store.Directory;

import java.io.IOException;
import java.util.function.Consumer;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;

public class ShuffleForcedMergePolicyTests extends BaseMergePolicyTestCase {
public void testDiagnostics() throws IOException {
try (Directory dir = newDirectory()) {
IndexWriterConfig iwc = newIndexWriterConfig();
MergePolicy mp = new ShuflleForcedMergePolicy(newLogMergePolicy());
iwc.setMergePolicy(mp);
boolean sorted = random().nextBoolean();
if (sorted) {
iwc.setIndexSort(new Sort(new SortField("sort", SortField.Type.INT)));
}
int numDocs = atLeast(100);

try (IndexWriter writer = new IndexWriter(dir, iwc)) {
for (int i = 0; i < numDocs; i++) {
if (i % 10 == 0) {
writer.flush();
}
Document doc = new Document();
doc.add(new StringField("id", "" + i, Field.Store.NO));
doc.add(new NumericDocValuesField("sort", random().nextInt()));
writer.addDocument(doc);
}
try (DirectoryReader reader = DirectoryReader.open(writer)) {
assertThat(reader.leaves().size(), greaterThan(2));
assertSegmentReaders(reader, leaf -> {
assertFalse(ShuflleForcedMergePolicy.isInterleavedSegment(leaf));
});
}
writer.forceMerge(1);
try (DirectoryReader reader = DirectoryReader.open(writer)) {
assertThat(reader.leaves().size(), equalTo(1));
assertSegmentReaders(reader, leaf -> {
assertTrue(ShuflleForcedMergePolicy.isInterleavedSegment(leaf));
});
}
}
}
}

private void assertSegmentReaders(DirectoryReader reader, Consumer<LeafReader> checkLeaf) {
for (LeafReaderContext leaf : reader.leaves()) {
checkLeaf.accept(leaf.reader());
}
}

@Override
protected MergePolicy mergePolicy() {
return new ShuflleForcedMergePolicy(newLogMergePolicy());
}

@Override
protected void assertSegmentInfos(MergePolicy policy, SegmentInfos infos) throws IOException {}

@Override
protected void assertMerge(MergePolicy policy, MergePolicy.MergeSpecification merge) throws IOException {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.ShuflleForcedMergePolicy;
import org.apache.lucene.index.SoftDeletesRetentionMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
Expand All @@ -51,7 +52,7 @@ public void testPrune() throws IOException {
iwc.setSoftDeletesField("_soft_deletes");
MergePolicy mp = new SoftDeletesRetentionMergePolicy("_soft_deletes", MatchAllDocsQuery::new,
new PrunePostingsMergePolicy(newLogMergePolicy(), "id"));
iwc.setMergePolicy(mp);
iwc.setMergePolicy(new ShuflleForcedMergePolicy(mp));
boolean sorted = randomBoolean();
if (sorted) {
iwc.setIndexSort(new Sort(new SortField("sort", SortField.Type.INT)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.ShuflleForcedMergePolicy;
import org.apache.lucene.index.StandardDirectoryReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
Expand All @@ -57,7 +58,7 @@ public void testPruneAll() throws IOException {
IndexWriterConfig iwc = newIndexWriterConfig();
RecoverySourcePruneMergePolicy mp = new RecoverySourcePruneMergePolicy("extra_source", MatchNoDocsQuery::new,
newLogMergePolicy());
iwc.setMergePolicy(mp);
iwc.setMergePolicy(new ShuflleForcedMergePolicy(mp));
try (IndexWriter writer = new IndexWriter(dir, iwc)) {
for (int i = 0; i < 20; i++) {
if (i > 0 && randomBoolean()) {
Expand Down

0 comments on commit 132579b

Please sign in to comment.