Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions server/src/main/java/org/elasticsearch/TransportVersions.java
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,11 @@ static TransportVersion def(int id) {
public static final TransportVersion V_8_15_2 = def(8_702_0_03);
public static final TransportVersion V_8_16_0 = def(8_772_0_01);
public static final TransportVersion V_8_16_1 = def(8_772_0_04);
public static final TransportVersion INITIAL_ELASTICSEARCH_8_16_5 = def(8_772_0_05);
public static final TransportVersion INITIAL_ELASTICSEARCH_8_16_6 = def(8_772_0_06);
public static final TransportVersion V_8_17_0 = def(8_797_0_02);
public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_3 = def(8_797_0_03);
public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_4 = def(8_797_0_04);
// TODO: leave this version until the very end to satisfy max transport version test
public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_5 = def(8_797_0_05);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,11 @@ public SimulateBulkRequest(StreamInput in) throws IOException {
componentTemplateSubstitutions = Map.of();
indexTemplateSubstitutions = Map.of();
}
this.mappingAddition = (Map<String, Object>) in.readGenericValue();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
this.mappingAddition = (Map<String, Object>) in.readGenericValue();
} else {
mappingAddition = Map.of();
}
if (in.getTransportVersion().supports(SIMULATE_INGEST_MAPPING_MERGE_TYPE)) {
mappingMergeType = in.readOptionalString();
} else {
Expand All @@ -167,7 +171,9 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeGenericValue(componentTemplateSubstitutions);
out.writeGenericValue(indexTemplateSubstitutions);
}
out.writeGenericValue(mappingAddition);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
out.writeGenericValue(mappingAddition);
}
if (out.getTransportVersion().supports(SIMULATE_INGEST_MAPPING_MERGE_TYPE)) {
out.writeOptionalString(mappingMergeType);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,16 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio
requireDataStream = false;
}

if (in.getTransportVersion().before(TransportVersions.V_8_17_0)) {
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
in.readZLong(); // obsolete normalisedBytesParsed
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) {
in.readBoolean(); // obsolete originatesFromUpdateByScript
in.readBoolean(); // obsolete originatesFromUpdateByDoc
}
}

includeSourceOnError = in.readBoolean();

if (in.getTransportVersion().supports(INDEX_REQUEST_INCLUDE_TSID)) {
Expand Down Expand Up @@ -796,6 +806,15 @@ private void writeBody(StreamOutput out) throws IOException {
out.writeBoolean(requireDataStream);
}

if (out.getTransportVersion().before(TransportVersions.V_8_17_0)) {
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) {
out.writeZLong(-1); // obsolete normalisedBytesParsed
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) {
out.writeBoolean(false); // obsolete originatesFromUpdateByScript
out.writeBoolean(false); // obsolete originatesFromUpdateByDoc
}
}
out.writeBoolean(includeSourceOnError);
if (out.getTransportVersion().supports(INDEX_REQUEST_INCLUDE_TSID)) {
out.writeBytesRef(tsid);
Expand Down
3 changes: 2 additions & 1 deletion server/src/main/java/org/elasticsearch/index/IndexMode.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

package org.elasticsearch.index;

import org.elasticsearch.TransportVersions;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService;
import org.elasticsearch.cluster.metadata.ProjectMetadata;
Expand Down Expand Up @@ -605,7 +606,7 @@ public static void writeTo(IndexMode indexMode, StreamOutput out) throws IOExcep
case STANDARD -> 0;
case TIME_SERIES -> 1;
case LOGSDB -> 2;
case LOOKUP -> 3;
case LOOKUP -> out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0) ? 3 : 0;
};
out.writeByte((byte) code);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
package org.elasticsearch.ingest;

import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.SimpleDiffable;
import org.elasticsearch.common.Strings;
Expand All @@ -24,6 +25,7 @@
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xcontent.json.JsonXContent;

import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -170,7 +172,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
public static PipelineConfiguration readFrom(StreamInput in) throws IOException {
final String id = in.readString();
final Map<String, Object> config;
config = in.readGenericMap();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
config = in.readGenericMap();
} else {
final BytesReference bytes = in.readSlicedBytesReference();
final XContentType type = in.readEnum(XContentType.class);
config = XContentHelper.convertToMap(bytes, true, type).v2();
}
return new PipelineConfiguration(id, config);
}

Expand All @@ -188,7 +196,14 @@ public void writeTo(StreamOutput out) throws IOException {
final TransportVersion transportVersion = out.getTransportVersion();
final Map<String, Object> configForTransport = configForTransport(transportVersion);
out.writeString(id);
out.writeGenericMap(configForTransport);
if (transportVersion.onOrAfter(TransportVersions.V_8_17_0)) {
out.writeGenericMap(configForTransport);
} else {
XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).prettyPrint();
builder.map(configForTransport);
out.writeBytesReference(BytesReference.bytes(builder));
XContentHelper.writeTo(out, XContentType.JSON);
}
}

@Override
Expand Down
36 changes: 28 additions & 8 deletions server/src/main/java/org/elasticsearch/monitor/os/OsStats.java
Original file line number Diff line number Diff line change
Expand Up @@ -505,7 +505,11 @@ public Cgroup(

Cgroup(final StreamInput in) throws IOException {
cpuAcctControlGroup = in.readString();
cpuAcctUsageNanos = in.readBigInteger();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
cpuAcctUsageNanos = in.readBigInteger();
} else {
cpuAcctUsageNanos = BigInteger.valueOf(in.readLong());
}
cpuControlGroup = in.readString();
cpuCfsPeriodMicros = in.readLong();
cpuCfsQuotaMicros = in.readLong();
Expand All @@ -518,7 +522,11 @@ public Cgroup(
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeString(cpuAcctControlGroup);
out.writeBigInteger(cpuAcctUsageNanos);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
out.writeBigInteger(cpuAcctUsageNanos);
} else {
out.writeLong(cpuAcctUsageNanos.longValue());
}
out.writeString(cpuControlGroup);
out.writeLong(cpuCfsPeriodMicros);
out.writeLong(cpuCfsQuotaMicros);
Expand Down Expand Up @@ -614,16 +622,28 @@ public CpuStat(
}

CpuStat(final StreamInput in) throws IOException {
numberOfElapsedPeriods = in.readBigInteger();
numberOfTimesThrottled = in.readBigInteger();
timeThrottledNanos = in.readBigInteger();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
numberOfElapsedPeriods = in.readBigInteger();
numberOfTimesThrottled = in.readBigInteger();
timeThrottledNanos = in.readBigInteger();
} else {
numberOfElapsedPeriods = BigInteger.valueOf(in.readLong());
numberOfTimesThrottled = BigInteger.valueOf(in.readLong());
timeThrottledNanos = BigInteger.valueOf(in.readLong());
}
}

@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeBigInteger(numberOfElapsedPeriods);
out.writeBigInteger(numberOfTimesThrottled);
out.writeBigInteger(timeThrottledNanos);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
out.writeBigInteger(numberOfElapsedPeriods);
out.writeBigInteger(numberOfTimesThrottled);
out.writeBigInteger(timeThrottledNanos);
} else {
out.writeLong(numberOfElapsedPeriods.longValue());
out.writeLong(numberOfTimesThrottled.longValue());
out.writeLong(timeThrottledNanos.longValue());
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
package org.elasticsearch.xpack.core.application;

import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xcontent.XContentBuilder;
Expand All @@ -27,8 +28,13 @@ public LogsDBFeatureSetUsage(StreamInput input) throws IOException {
super(input);
indicesCount = input.readVInt();
indicesWithSyntheticSource = input.readVInt();
numDocs = input.readVLong();
sizeInBytes = input.readVLong();
if (input.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
numDocs = input.readVLong();
sizeInBytes = input.readVLong();
} else {
numDocs = 0;
sizeInBytes = 0;
}
hasCustomCutoffDate = input.readBoolean();
}

Expand All @@ -37,8 +43,10 @@ public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(indicesCount);
out.writeVInt(indicesWithSyntheticSource);
out.writeVLong(numDocs);
out.writeVLong(sizeInBytes);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) {
out.writeVLong(numDocs);
out.writeVLong(sizeInBytes);
}
out.writeBoolean(hasCustomCutoffDate);
}

Expand All @@ -61,7 +69,7 @@ public LogsDBFeatureSetUsage(

@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.minimumCompatible();
return TransportVersions.V_8_17_0;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@
public class RemoteClusterPermissions implements NamedWriteable, ToXContentObject {

public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = TransportVersions.V_8_15_0;
public static final TransportVersion ROLE_MONITOR_STATS = TransportVersions.V_8_17_0;

public static final String NAME = "remote_cluster_permissions";
private static final Logger logger = LogManager.getLogger(RemoteClusterPermissions.class);
Expand All @@ -78,7 +79,7 @@ public class RemoteClusterPermissions implements NamedWriteable, ToXContentObjec
static Map<TransportVersion, Set<String>> allowedRemoteClusterPermissions = Map.of(
ROLE_REMOTE_CLUSTER_PRIVS,
Set.of(ClusterPrivilegeResolver.MONITOR_ENRICH.name()),
TransportVersion.minimumCompatible(),
ROLE_MONITOR_STATS,
Set.of(ClusterPrivilegeResolver.MONITOR_STATS.name())
);
static final TransportVersion lastTransportVersionPermission = allowedRemoteClusterPermissions.keySet()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -678,6 +678,17 @@ public void testWriteTo_ForHasBeenReroutedChanges() throws IOException {
false
);

{
// From a version before the rerouting logic was added
InferenceAction.Request deserializedInstance = copyWriteable(
instance,
getNamedWriteableRegistry(),
instanceReader(),
TransportVersions.V_8_17_0
);

assertEquals(instance, deserializedInstance);
}
{
// From a version with rerouting
InferenceAction.Request deserializedInstance = copyWriteable(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.TransportVersionUtils;
import org.elasticsearch.transport.RemoteClusterPortSettings;
import org.elasticsearch.xcontent.ObjectPath;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
Expand All @@ -30,6 +31,7 @@
import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection;
import org.elasticsearch.xpack.core.security.user.AnonymousUser;
import org.elasticsearch.xpack.core.security.user.User;
import org.hamcrest.Matchers;

import java.io.IOException;
import java.util.Arrays;
Expand All @@ -44,13 +46,15 @@
import static org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper.randomCloudApiKeyAuthentication;
import static org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo;
import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfoTests.randomRoleDescriptorsIntersection;
import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_MONITOR_STATS;
import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;

Expand Down Expand Up @@ -1023,6 +1027,84 @@ public void testMaybeRewriteMetadataForApiKeyRoleDescriptorsWithRemoteCluster()
);
}

public void testMaybeRewriteMetadataForApiKeyRoleDescriptorsWithRemoteClusterRemovePrivs() throws IOException {
final String apiKeyId = randomAlphaOfLengthBetween(1, 10);
final String apiKeyName = randomAlphaOfLengthBetween(1, 10);
Map<String, Object> metadata = Map.ofEntries(
entry(AuthenticationField.API_KEY_ID_KEY, apiKeyId),
entry(AuthenticationField.API_KEY_NAME_KEY, apiKeyName),
entry(AuthenticationField.API_KEY_ROLE_DESCRIPTORS_KEY, new BytesArray("""
{"base_role":{"cluster":["all"],
"remote_cluster":[{"privileges":["monitor_enrich", "monitor_stats"],"clusters":["*"]}]
}}""")),
entry(AuthenticationField.API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY, new BytesArray("""
{"limited_by_role":{"cluster":["*"],
"remote_cluster":[{"privileges":["monitor_enrich", "monitor_stats"],"clusters":["*"]}]
}}"""))
);

final Authentication with2privs = AuthenticationTestHelper.builder()
.apiKey()
.metadata(metadata)
.transportVersion(TransportVersion.current())
.build();

// pick a version that will only remove one of the two privileges
final TransportVersion olderVersion = TransportVersionUtils.randomVersionBetween(
random(),
ROLE_REMOTE_CLUSTER_PRIVS,
TransportVersionUtils.getPreviousVersion(ROLE_MONITOR_STATS)
);

Map<String, Object> rewrittenMetadata = with2privs.maybeRewriteForOlderVersion(olderVersion).getEffectiveSubject().getMetadata();
assertThat(rewrittenMetadata.keySet(), equalTo(with2privs.getAuthenticatingSubject().getMetadata().keySet()));

// only one of the two privileges are left after the rewrite
BytesReference baseRoleBytes = (BytesReference) rewrittenMetadata.get(AuthenticationField.API_KEY_ROLE_DESCRIPTORS_KEY);
Map<String, Object> baseRoleAsMap = XContentHelper.convertToMap(baseRoleBytes, false, XContentType.JSON).v2();
assertThat(ObjectPath.eval("base_role.remote_cluster.0.privileges", baseRoleAsMap), Matchers.contains("monitor_enrich"));
assertThat(ObjectPath.eval("base_role.remote_cluster.0.clusters", baseRoleAsMap), notNullValue());
BytesReference limitedByRoleBytes = (BytesReference) rewrittenMetadata.get(
AuthenticationField.API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY
);
Map<String, Object> limitedByRoleAsMap = XContentHelper.convertToMap(limitedByRoleBytes, false, XContentType.JSON).v2();
assertThat(ObjectPath.eval("limited_by_role.remote_cluster.0.privileges", limitedByRoleAsMap), Matchers.contains("monitor_enrich"));
assertThat(ObjectPath.eval("limited_by_role.remote_cluster.0.clusters", limitedByRoleAsMap), notNullValue());

// same version, but it removes the only defined privilege
metadata = Map.ofEntries(
entry(AuthenticationField.API_KEY_ID_KEY, apiKeyId),
entry(AuthenticationField.API_KEY_NAME_KEY, apiKeyName),
entry(AuthenticationField.API_KEY_ROLE_DESCRIPTORS_KEY, new BytesArray("""
{"base_role":{"cluster":["all"],
"remote_cluster":[{"privileges":["monitor_stats"],"clusters":["*"]}]
}}""")),
entry(AuthenticationField.API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY, new BytesArray("""
{"limited_by_role":{"cluster":["*"],
"remote_cluster":[{"privileges":["monitor_stats"],"clusters":["*"]}]
}}"""))
);

final Authentication with1priv = AuthenticationTestHelper.builder()
.apiKey()
.metadata(metadata)
.transportVersion(TransportVersion.current())
.build();

rewrittenMetadata = with1priv.maybeRewriteForOlderVersion(olderVersion).getEffectiveSubject().getMetadata();
assertThat(rewrittenMetadata.keySet(), equalTo(with1priv.getAuthenticatingSubject().getMetadata().keySet()));

// the one privileges is removed after the rewrite, which removes the full "remote_cluster" object
baseRoleBytes = (BytesReference) rewrittenMetadata.get(AuthenticationField.API_KEY_ROLE_DESCRIPTORS_KEY);
baseRoleAsMap = XContentHelper.convertToMap(baseRoleBytes, false, XContentType.JSON).v2();
assertThat(ObjectPath.eval("base_role.remote_cluster", baseRoleAsMap), nullValue());
assertThat(ObjectPath.eval("base_role.cluster", baseRoleAsMap), notNullValue());
limitedByRoleBytes = (BytesReference) rewrittenMetadata.get(AuthenticationField.API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY);
limitedByRoleAsMap = XContentHelper.convertToMap(limitedByRoleBytes, false, XContentType.JSON).v2();
assertThat(ObjectPath.eval("limited_by_role.remote_cluster", limitedByRoleAsMap), nullValue());
assertThat(ObjectPath.eval("limited_by_role.cluster", limitedByRoleAsMap), notNullValue());
}

public void testMaybeRemoveRemoteIndicesFromRoleDescriptors() {
final boolean includeClusterPrivileges = randomBoolean();
final BytesReference roleWithoutRemoteIndices = new BytesArray(Strings.format("""
Expand Down
Loading