Skip to content

Commit

Permalink
Add index details to HLRC get-snapshots parser (#72149)
Browse files Browse the repository at this point in the history
In #71754 we added some per-index details to the `SnapshotInfo` blobs
stored in the repository. This class is also used as part of the
response to a get-snapshots request by the HLRC, but the HLRC uses a
different parser from the one which reads the blobs held in the
repository and this other parser was not extended to deal with the new
details. This commit addresses that.

Backport of #72079
  • Loading branch information
DaveCTurner committed Apr 23, 2021
1 parent 306edf4 commit 2788c45
Show file tree
Hide file tree
Showing 3 changed files with 83 additions and 52 deletions.
81 changes: 42 additions & 39 deletions server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
Expand Down Expand Up @@ -96,6 +97,7 @@ public static final class SnapshotInfoBuilder {
private List<String> indices = null;
private List<String> dataStreams = null;
private List<SnapshotFeatureInfo> featureStates = null;
private Map<String, IndexSnapshotDetails> indexSnapshotDetails = null;
private long startTime = 0L;
private long endTime = 0L;
private ShardStatsBuilder shardStatsBuilder = null;
Expand Down Expand Up @@ -132,6 +134,10 @@ private void setFeatureStates(List<SnapshotFeatureInfo> featureStates) {
this.featureStates = featureStates;
}

private void setIndexSnapshotDetails(Map<String, IndexSnapshotDetails> indexSnapshotDetails) {
this.indexSnapshotDetails = indexSnapshotDetails;
}

private void setStartTime(long startTime) {
this.startTime = startTime;
}
Expand Down Expand Up @@ -175,6 +181,10 @@ public SnapshotInfo build() {
featureStates = Collections.emptyList();
}

if (indexSnapshotDetails == null) {
indexSnapshotDetails = Collections.emptyMap();
}

SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state);
Version version = this.version == -1 ? Version.CURRENT : Version.fromId(this.version);

Expand All @@ -185,8 +195,22 @@ public SnapshotInfo build() {
shardFailures = new ArrayList<>();
}

return new SnapshotInfo(snapshotId, indices, dataStreams, featureStates, reason, version, startTime, endTime, totalShards,
successfulShards, shardFailures, includeGlobalState, userMetadata, snapshotState, Collections.emptyMap()
return new SnapshotInfo(
snapshotId,
indices,
dataStreams,
featureStates,
reason,
version,
startTime,
endTime,
totalShards,
successfulShards,
shardFailures,
includeGlobalState,
userMetadata,
snapshotState,
indexSnapshotDetails
);
}
}
Expand Down Expand Up @@ -227,6 +251,10 @@ int getSuccessfulShards() {
SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setDataStreams, new ParseField(DATA_STREAMS));
SNAPSHOT_INFO_PARSER.declareObjectArray(SnapshotInfoBuilder::setFeatureStates, SnapshotFeatureInfo.SNAPSHOT_FEATURE_INFO_PARSER,
new ParseField(FEATURE_STATES));
SNAPSHOT_INFO_PARSER.declareObject(
SnapshotInfoBuilder::setIndexSnapshotDetails,
(p, c) -> p.map(HashMap::new, p2 -> IndexSnapshotDetails.PARSER.parse(p2, c)),
new ParseField(INDEX_DETAILS));
SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS));
SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS));
SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS));
Expand Down Expand Up @@ -864,7 +892,7 @@ public static SnapshotInfo fromXContentInternal(final XContentParser parser) thr
if (USER_METADATA.equals(currentFieldName)) {
userMetadata = parser.map();
} else if (INDEX_DETAILS.equals(currentFieldName)) {
indexSnapshotDetails = parser.map(HashMap::new, IndexSnapshotDetails::fromXContent);
indexSnapshotDetails = parser.map(HashMap::new, p -> IndexSnapshotDetails.PARSER.parse(p, null));
} else {
// It was probably created by newer version - ignoring
parser.skipChildren();
Expand Down Expand Up @@ -982,6 +1010,17 @@ public static class IndexSnapshotDetails implements ToXContentObject, Writeable

public static final IndexSnapshotDetails SKIPPED = new IndexSnapshotDetails(0, ByteSizeValue.ZERO, 0);

public static final ConstructingObjectParser<IndexSnapshotDetails, Void> PARSER = new ConstructingObjectParser<>(
IndexSnapshotDetails.class.getName(),
true,
a -> new IndexSnapshotDetails((int)a[0], ByteSizeValue.ofBytes((long) a[1]), (int)a[2]));

static {
PARSER.declareInt(ConstructingObjectParser.constructorArg(), new ParseField(SHARD_COUNT));
PARSER.declareLong(ConstructingObjectParser.constructorArg(), new ParseField(SIZE));
PARSER.declareInt(ConstructingObjectParser.constructorArg(), new ParseField(MAX_SEGMENTS_PER_SHARD));
}

private final int shardCount;
private final ByteSizeValue size;
private final int maxSegmentsPerShard;
Expand Down Expand Up @@ -1048,42 +1087,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.endObject();
return builder;
}

public static IndexSnapshotDetails fromXContent(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
int shardCount = -1;
ByteSizeValue size = null;
int maxSegmentsPerShard = -1;
String currentFieldName;
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser);
currentFieldName = parser.currentName();
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, parser.nextToken(), parser);
switch (currentFieldName) {
case SHARD_COUNT:
shardCount = parser.intValue();
break;
case SIZE:
size = new ByteSizeValue(parser.longValue());
break;
case MAX_SEGMENTS_PER_SHARD:
maxSegmentsPerShard = parser.intValue();
break;
}
}

if (shardCount < 1) {
throw new IllegalArgumentException("field [" + SHARD_COUNT + "] missing or invalid: " + shardCount);
}
if (size == null) {
throw new IllegalArgumentException("field [" + SIZE + "] missing");
}
if (maxSegmentsPerShard < 0) {
throw new IllegalArgumentException("field [" + MAX_SEGMENTS_PER_SHARD + "] missing or invalid: " + maxSegmentsPerShard);
}

return new IndexSnapshotDetails(shardCount, size, maxSegmentsPerShard);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,11 @@

import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.snapshots.SnapshotFeatureInfo;
import org.elasticsearch.snapshots.SnapshotFeatureInfoTests;
import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.snapshots.SnapshotInfo;
import org.elasticsearch.snapshots.SnapshotInfoTestUtils;
Expand All @@ -27,7 +29,7 @@
import java.util.function.Predicate;
import java.util.regex.Pattern;

import static org.elasticsearch.snapshots.SnapshotFeatureInfoTests.randomSnapshotFeatureInfo;
import static org.elasticsearch.snapshots.SnapshotInfo.INDEX_DETAILS_XCONTENT_PARAM;

public class GetSnapshotsResponseTests extends AbstractSerializingTestCase<GetSnapshotsResponse> {

Expand All @@ -36,6 +38,12 @@ protected GetSnapshotsResponse doParseInstance(XContentParser parser) throws IOE
return GetSnapshotsResponse.fromXContent(parser);
}

@Override
protected ToXContent.Params getToXContentParams() {
// Explicitly include the index details, excluded by default, since this is required for a faithful round-trip
return new ToXContent.MapParams(org.elasticsearch.common.collect.Map.of(INDEX_DETAILS_XCONTENT_PARAM, "true"));
}

@Override
protected GetSnapshotsResponse createTestInstance() {
ArrayList<SnapshotInfo> snapshots = new ArrayList<>();
Expand All @@ -44,11 +52,20 @@ protected GetSnapshotsResponse createTestInstance() {
String reason = randomBoolean() ? null : "reason";
ShardId shardId = new ShardId("index", UUIDs.base64UUID(), 2);
List<SnapshotShardFailure> shardFailures = Collections.singletonList(new SnapshotShardFailure("node-id", shardId, "reason"));
List<SnapshotFeatureInfo> featureInfos = randomList(0, () -> randomSnapshotFeatureInfo());
snapshots.add(new SnapshotInfo(snapshotId, Arrays.asList("index1", "index2"), Collections.singletonList("ds"),
featureInfos, reason, System.currentTimeMillis(), randomIntBetween(2, 3), shardFailures, randomBoolean(),
SnapshotInfoTestUtils.randomUserMetadata(), System.currentTimeMillis(), Collections.emptyMap()
));
List<SnapshotFeatureInfo> featureInfos = randomList(0, SnapshotFeatureInfoTests::randomSnapshotFeatureInfo);
snapshots.add(new SnapshotInfo(
snapshotId,
Arrays.asList("index1", "index2"),
Collections.singletonList("ds"),
featureInfos,
reason,
System.currentTimeMillis(),
randomIntBetween(2, 3),
shardFailures,
randomBoolean(),
SnapshotInfoTestUtils.randomUserMetadata(),
System.currentTimeMillis(),
SnapshotInfoTestUtils.randomIndexSnapshotDetails()));
}
return new GetSnapshotsResponse(snapshots);
}
Expand All @@ -58,15 +75,26 @@ protected Writeable.Reader<GetSnapshotsResponse> instanceReader() {
return GetSnapshotsResponse::new;
}

@Override
protected boolean supportsUnknownFields() {
return true;
}

@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// Don't inject random fields into the custom snapshot metadata, because the metadata map is equality-checked after doing a
// round-trip through xContent serialization/deserialization. Even though the rest of the object ignores unknown fields,
// `metadata` doesn't ignore unknown fields (it just includes them in the parsed object, because the keys are arbitrary), so any
// new fields added to the metadata before it gets deserialized that weren't in the serialized version will cause the equality
// check to fail.

// The actual fields are nested in an array, so this regex matches fields with names of the form `snapshots.3.metadata`
return Pattern.compile("snapshots\\.\\d+\\.metadata.*").asPredicate();
// `metadata` doesn't ignore unknown fields (it just includes them in the parsed object, because the keys are arbitrary),
// so any new fields added to the metadata before it gets deserialized that weren't in the serialized version will
// cause the equality check to fail.
//
// Also don't inject random junk into the index details section, since this is keyed by index name but the values
// are required to be a valid IndexSnapshotDetails
//
// The actual fields are nested in an array, so this regex matches fields with names of the form
// `responses.0.snapshots.3.metadata`
final Pattern metadataPattern = Pattern.compile("snapshots\\.\\d+\\.metadata.*");
final Pattern indexDetailsPattern = Pattern.compile("snapshots\\.\\d+\\.index_details");
return s -> metadataPattern.matcher(s).matches() || indexDetailsPattern.matcher(s).matches();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ static SnapshotInfo createRandomSnapshotInfo() {
);
}

private static Map<String, SnapshotInfo.IndexSnapshotDetails> randomIndexSnapshotDetails() {
public static Map<String, SnapshotInfo.IndexSnapshotDetails> randomIndexSnapshotDetails() {
final Map<String, SnapshotInfo.IndexSnapshotDetails> result = new HashMap<>();
final int size = between(0, 10);
while (result.size() < size) {
Expand Down

0 comments on commit 2788c45

Please sign in to comment.