Skip to content

Commit

Permalink
Verify consistency of version and source in disruption tests
Browse files Browse the repository at this point in the history
With this change, we will verify the consistency of version and source
(besides id, seq_no, and term) of live documents between shard copies
at the end of disruption tests.

Port of elastic/elasticsearch#41614
Required by #9394

(cherry picked from commit ddba3be)
  • Loading branch information
marregui authored and mergify-bot committed Nov 29, 2019
1 parent ae24281 commit 18398d1
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3995,7 +3995,7 @@ public void testRestoreLocalHistoryFromTranslog() throws IOException {
Randomness.shuffle(seqNos);
final EngineConfig engineConfig;
final SeqNoStats prevSeqNoStats;
final List<DocIdSeqNoAndTerm> prevDocs;
final List<DocIdSeqNoAndSource> prevDocs;
final int totalTranslogOps;
try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) {
engineConfig = engine.config();
Expand Down Expand Up @@ -5053,7 +5053,7 @@ public void testRebuildLocalCheckpointTracker() throws Exception {
commits.add(new ArrayList<>());
try (Store store = createStore()) {
EngineConfig config = config(indexSettings, store, translogPath, NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get);
final List<DocIdSeqNoAndTerm> docs;
final List<DocIdSeqNoAndSource> docs;
try (InternalEngine engine = createEngine(config)) {
List<Engine.Operation> flushedOperations = new ArrayList<>();
for (Engine.Operation op : operations) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,24 +20,34 @@
package org.elasticsearch.index.engine;


import org.apache.lucene.util.BytesRef;

import java.util.Objects;

/** A tuple of document id, sequence number and primary term of a document */
public final class DocIdSeqNoAndTerm {
/** A tuple of document id, sequence number, primary term, source and version of a document */
public final class DocIdSeqNoAndSource {
private final String id;
private final BytesRef source;
private final long seqNo;
private final long primaryTerm;
private final long version;

public DocIdSeqNoAndTerm(String id, long seqNo, long primaryTerm) {
public DocIdSeqNoAndSource(String id, BytesRef source, long seqNo, long primaryTerm, long version) {
this.id = id;
this.source = source;
this.seqNo = seqNo;
this.primaryTerm = primaryTerm;
this.version = version;
}

public String getId() {
return id;
}

public BytesRef getSource() {
return source;
}

public long getSeqNo() {
return seqNo;
}
Expand All @@ -46,21 +56,27 @@ public long getPrimaryTerm() {
return primaryTerm;
}

public long getVersion() {
return version;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DocIdSeqNoAndTerm that = (DocIdSeqNoAndTerm) o;
return Objects.equals(id, that.id) && seqNo == that.seqNo && primaryTerm == that.primaryTerm;
DocIdSeqNoAndSource that = (DocIdSeqNoAndSource) o;
return Objects.equals(id, that.id) && Objects.equals(source, that.source)
&& seqNo == that.seqNo && primaryTerm == that.primaryTerm && version == that.version;
}

@Override
public int hashCode() {
return Objects.hash(id, seqNo, primaryTerm);
return Objects.hash(id, source, seqNo, primaryTerm, version);
}

@Override
public String toString() {
return "DocIdSeqNoAndTerm{" + "id='" + id + " seqNo=" + seqNo + " primaryTerm=" + primaryTerm + "}";
return "doc{" + "id='" + id + " seqNo=" + seqNo + " primaryTerm=" + primaryTerm
+ " version=" + version + " source= " + (source != null ? source.utf8ToString() : null) + "}";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.Index;
Expand Down Expand Up @@ -899,16 +900,17 @@ public static Engine.Result applyOperation(Engine engine, Engine.Operation opera
/**
* Gets a collection of tuples of docId, sequence number, and primary term of all live documents in the provided engine.
*/
public static List<DocIdSeqNoAndTerm> getDocIds(Engine engine, boolean refresh) throws IOException {
public static List<DocIdSeqNoAndSource> getDocIds(Engine engine, boolean refresh) throws IOException {
if (refresh) {
engine.refresh("test_get_doc_ids");
}
try (Engine.Searcher searcher = engine.acquireSearcher("test_get_doc_ids")) {
List<DocIdSeqNoAndTerm> docs = new ArrayList<>();
List<DocIdSeqNoAndSource> docs = new ArrayList<>();
for (LeafReaderContext leafContext : searcher.reader().leaves()) {
LeafReader reader = leafContext.reader();
NumericDocValues seqNoDocValues = reader.getNumericDocValues(SeqNoFieldMapper.NAME);
NumericDocValues primaryTermDocValues = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
NumericDocValues versionDocValues = reader.getNumericDocValues(VersionFieldMapper.NAME);
Bits liveDocs = reader.getLiveDocs();
for (int i = 0; i < reader.maxDoc(); i++) {
if (liveDocs == null || liveDocs.get(i)) {
Expand All @@ -917,20 +919,25 @@ public static List<DocIdSeqNoAndTerm> getDocIds(Engine engine, boolean refresh)
continue;
}
final long primaryTerm = primaryTermDocValues.longValue();
Document uuid = reader.document(i, Collections.singleton(IdFieldMapper.NAME));
BytesRef binaryID = uuid.getBinaryValue(IdFieldMapper.NAME);
String id = Uid.decodeId(Arrays.copyOfRange(binaryID.bytes, binaryID.offset,
binaryID.offset + binaryID.length));
Document doc = reader.document(i, Sets.newHashSet(IdFieldMapper.NAME, SourceFieldMapper.NAME));
BytesRef binaryID = doc.getBinaryValue(IdFieldMapper.NAME);
String id = Uid.decodeId(Arrays.copyOfRange(binaryID.bytes, binaryID.offset, binaryID.offset + binaryID.length));
final BytesRef source = doc.getBinaryValue(SourceFieldMapper.NAME);
if (seqNoDocValues.advanceExact(i) == false) {
throw new AssertionError("seqNoDocValues not found for doc[" + i + "] id[" + id + "]");
}
final long seqNo = seqNoDocValues.longValue();
docs.add(new DocIdSeqNoAndTerm(id, seqNo, primaryTerm));
if (versionDocValues.advanceExact(i) == false) {
throw new AssertionError("versionDocValues not found for doc[" + i + "] id[" + id + "]");
}
final long version = versionDocValues.longValue();
docs.add(new DocIdSeqNoAndSource(id, source, seqNo, primaryTerm, version));
}
}
}
docs.sort(Comparator.comparing(DocIdSeqNoAndTerm::getId)
.thenComparingLong(DocIdSeqNoAndTerm::getSeqNo).thenComparingLong(DocIdSeqNoAndTerm::getPrimaryTerm));
docs.sort(Comparator.comparingLong(DocIdSeqNoAndSource::getSeqNo)
.thenComparingLong(DocIdSeqNoAndSource::getPrimaryTerm)
.thenComparing((DocIdSeqNoAndSource::getId)));
return docs;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import org.elasticsearch.index.MapperTestUtils;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.query.DisabledQueryCache;
import org.elasticsearch.index.engine.DocIdSeqNoAndTerm;
import org.elasticsearch.index.engine.DocIdSeqNoAndSource;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.engine.EngineTestCase;
Expand Down Expand Up @@ -655,10 +655,10 @@ private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) th
}

public static Set<String> getShardDocUIDs(final IndexShard shard) throws IOException {
return getDocIdAndSeqNos(shard).stream().map(DocIdSeqNoAndTerm::getId).collect(Collectors.toSet());
return getDocIdAndSeqNos(shard).stream().map(DocIdSeqNoAndSource::getId).collect(Collectors.toSet());
}

public static List<DocIdSeqNoAndTerm> getDocIdAndSeqNos(final IndexShard shard) throws IOException {
public static List<DocIdSeqNoAndSource> getDocIdAndSeqNos(final IndexShard shard) throws IOException {
return EngineTestCase.getDocIds(shard.getEngine(), true);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.CommitStats;
import org.elasticsearch.index.engine.DocIdSeqNoAndTerm;
import org.elasticsearch.index.engine.DocIdSeqNoAndSource;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.InternalEngine;
import org.elasticsearch.index.seqno.SeqNoStats;
Expand Down Expand Up @@ -1246,7 +1246,7 @@ public void assertSameDocIdsOnShards() throws Exception {
if (primaryShard == null) {
continue;
}
final List<DocIdSeqNoAndTerm> docsOnPrimary;
final List<DocIdSeqNoAndSource> docsOnPrimary;
try {
docsOnPrimary = IndexShardTestCase.getDocIdAndSeqNos(primaryShard);
} catch (AlreadyClosedException ex) {
Expand All @@ -1257,7 +1257,7 @@ public void assertSameDocIdsOnShards() throws Exception {
if (replicaShard == null) {
continue;
}
final List<DocIdSeqNoAndTerm> docsOnReplica;
final List<DocIdSeqNoAndSource> docsOnReplica;
try {
docsOnReplica = IndexShardTestCase.getDocIdAndSeqNos(replicaShard);
} catch (AlreadyClosedException ex) {
Expand Down

0 comments on commit 18398d1

Please sign in to comment.