Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings;
import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
Expand Down Expand Up @@ -43,8 +42,6 @@
import java.util.Map;
import java.util.Objects;

import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk;

/**
* Node statistics (dynamic, changes depending on when created).
*/
Expand Down Expand Up @@ -345,58 +342,46 @@ public void writeTo(StreamOutput out) throws IOException {

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params outerParams) {

return Iterators.concat(

singleChunk((builder, params) -> {
builder.field("name", getNode().getName());
builder.field("transport_address", getNode().getAddress().toString());
builder.field("host", getNode().getHostName());
builder.field("ip", getNode().getAddress());

builder.startArray("roles");
for (DiscoveryNodeRole role : getNode().getRoles()) {
builder.value(role.roleName());
}
builder.endArray();

if (getNode().getAttributes().isEmpty() == false) {
builder.startObject("attributes");
for (Map.Entry<String, String> attrEntry : getNode().getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
builder.endObject();
return ChunkedToXContent.builder(outerParams).append((builder, params) -> {
builder.field("name", getNode().getName());
builder.field("transport_address", getNode().getAddress().toString());
builder.field("host", getNode().getHostName());
builder.field("ip", getNode().getAddress());

builder.startArray("roles");
for (DiscoveryNodeRole role : getNode().getRoles()) {
builder.value(role.roleName());
}
builder.endArray();

if (getNode().getAttributes().isEmpty() == false) {
builder.startObject("attributes");
for (Map.Entry<String, String> attrEntry : getNode().getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}

return builder;
}),

ifPresent(getIndices()).toXContentChunked(outerParams),

singleChunk(
(builder, p) -> builder.value(ifPresent(getOs()), p).value(ifPresent(getProcess()), p).value(ifPresent(getJvm()), p)
),

ifPresent(getThreadPool()).toXContentChunked(outerParams),
singleChunk(ifPresent(getFs())),
ifPresent(getTransport()).toXContentChunked(outerParams),
ifPresent(getHttp()).toXContentChunked(outerParams),
singleChunk(ifPresent(getBreaker())),
ifPresent(getScriptStats()).toXContentChunked(outerParams),
singleChunk(ifPresent(getDiscoveryStats())),
ifPresent(getIngestStats()).toXContentChunked(outerParams),
singleChunk(ifPresent(getAdaptiveSelectionStats())),
ifPresent(getScriptCacheStats()).toXContentChunked(outerParams),
singleChunk(
builder.endObject();
}
return builder;
})

.appendIfPresent(getIndices())
.append((builder, p) -> builder.value(ifPresent(getOs()), p).value(ifPresent(getProcess()), p).value(ifPresent(getJvm()), p))

.appendIfPresent(getThreadPool())
.appendIfPresent(getFs())
.appendIfPresent(getTransport())
.appendIfPresent(getHttp())
.appendIfPresent(getBreaker())
.appendIfPresent(getScriptStats())
.appendIfPresent(getDiscoveryStats())
.appendIfPresent(getIngestStats())
.appendIfPresent(getAdaptiveSelectionStats())
.appendIfPresent(getScriptCacheStats())
.append(
(builder, p) -> builder.value(ifPresent(getIndexingPressureStats()), p)
.value(ifPresent(getRepositoriesStats()), p)
.value(ifPresent(getNodeAllocationStats()), p)
)
);
}

private static ChunkedToXContent ifPresent(@Nullable ChunkedToXContent chunkedToXContent) {
return Objects.requireNonNullElse(chunkedToXContent, ChunkedToXContent.EMPTY);
);
}

private static ToXContent ifPresent(@Nullable ToXContent toXContent) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,9 @@
import org.elasticsearch.action.support.nodes.BaseNodesXContentResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.xcontent.ToXContent;

import java.io.IOException;
Expand All @@ -42,15 +41,12 @@ protected void writeNodesTo(StreamOutput out, List<NodeStats> nodes) throws IOEx

@Override
protected Iterator<? extends ToXContent> xContentChunks(ToXContent.Params outerParams) {
return Iterators.concat(
ChunkedToXContentHelper.startObject("nodes"),
Iterators.flatMap(getNodes().iterator(), nodeStats -> Iterators.concat(Iterators.single((builder, params) -> {
builder.startObject(nodeStats.getNode().getId());
builder.field("timestamp", nodeStats.getTimestamp());
return builder;
}), nodeStats.toXContentChunked(outerParams), ChunkedToXContentHelper.endObject())),
ChunkedToXContentHelper.endObject()
);
return ChunkedToXContent.builder(outerParams)
.object(
"nodes",
getNodes().iterator(),
(b, ns) -> b.object(ns.getNode().getId(), ob -> ob.field("timestamp", ns.getTimestamp()).append(ns))
);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@

import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.xcontent.ToXContent;
Expand All @@ -30,11 +29,12 @@ protected BaseNodesXContentResponse(ClusterName clusterName, List<TNodeResponse>

@Override
public final Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return Iterators.concat(Iterators.single((b, p) -> {
b.startObject();
RestActions.buildNodesHeader(b, p, this);
return b.field("cluster_name", getClusterName().value());
}), xContentChunks(params), ChunkedToXContentHelper.endObject());
return ChunkedToXContent.builder(params)
.object(
ob -> ob.append((b, p) -> RestActions.buildNodesHeader(b, p, this))
.field("cluster_name", getClusterName().value())
.append(xContentChunks(params))
);
}

protected abstract Iterator<? extends ToXContent> xContentChunks(ToXContent.Params outerParams);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@

package org.elasticsearch.cluster;

import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.features.NodeFeature;
Expand Down Expand Up @@ -244,15 +244,12 @@ public ClusterFeatures apply(ClusterFeatures part) {

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return Iterators.concat(
Iterators.single((builder, p) -> builder.startArray()),
nodeFeatures.entrySet().stream().sorted(Map.Entry.comparingByKey()).<ToXContent>map(e -> (builder, p) -> {
return ChunkedToXContent.builder(params)
.array(nodeFeatures.entrySet().stream().sorted(Map.Entry.comparingByKey()).iterator(), e -> (builder, p) -> {
String[] features = e.getValue().toArray(String[]::new);
Arrays.sort(features);
return builder.startObject().field("node_id", e.getKey()).array("features", features).endObject();
}).iterator(),
Iterators.single((builder, p) -> builder.endArray())
);
});
}

@Override
Expand Down
52 changes: 14 additions & 38 deletions server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
Expand All @@ -36,9 +35,6 @@

import static org.elasticsearch.cluster.routing.ShardRouting.newUnassigned;
import static org.elasticsearch.cluster.routing.UnassignedInfo.Reason.REINITIALIZED;
import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endArray;
import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk;
import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject;

/**
* ClusterInfo is an object representing a map of nodes to {@link DiskUsage}
Expand Down Expand Up @@ -142,7 +138,7 @@ private static ShardRouting createFakeShardRoutingFromNodeAndShard(NodeAndShard

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return Iterators.concat(startObject("nodes"), Iterators.map(leastAvailableSpaceUsage.entrySet().iterator(), c -> (builder, p) -> {
return ChunkedToXContent.builder(params).object("nodes", leastAvailableSpaceUsage.entrySet().iterator(), c -> (builder, p) -> {
builder.startObject(c.getKey());
{ // node
builder.field("node_name", c.getValue().nodeName());
Expand All @@ -160,52 +156,32 @@ public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params
}
builder.endObject(); // end "most_available"
}
builder.endObject(); // end $nodename
return builder;
}),
singleChunk(
(builder, p) -> builder.endObject() // end "nodes"
.startObject("shard_sizes")
),

Iterators.map(
return builder.endObject(); // end $nodename
})
.object(
"shard_sizes",
shardSizes.entrySet().iterator(),
c -> (builder, p) -> builder.humanReadableField(c.getKey() + "_bytes", c.getKey(), ByteSizeValue.ofBytes(c.getValue()))
),
singleChunk(
(builder, p) -> builder.endObject() // end "shard_sizes"
.startObject("shard_data_set_sizes")
),
Iterators.map(
)
.object(
"shard_data_set_sizes",
shardDataSetSizes.entrySet().iterator(),
c -> (builder, p) -> builder.humanReadableField(
c.getKey() + "_bytes",
c.getKey().toString(),
ByteSizeValue.ofBytes(c.getValue())
)
),
singleChunk(
(builder, p) -> builder.endObject() // end "shard_data_set_sizes"
.startObject("shard_paths")
),
Iterators.map(dataPath.entrySet().iterator(), c -> (builder, p) -> builder.field(c.getKey().toString(), c.getValue())),
singleChunk(
(builder, p) -> builder.endObject() // end "shard_paths"
.startArray("reserved_sizes")
),
Iterators.map(reservedSpace.entrySet().iterator(), c -> (builder, p) -> {
)
.object("shard_paths", dataPath.entrySet().iterator(), (xb, c) -> xb.field(c.getKey().toString(), c.getValue()))
.array("reserved_sizes", reservedSpace.entrySet().iterator(), c -> (builder, p) -> {
builder.startObject();
{
builder.field("node_id", c.getKey().nodeId);
builder.field("path", c.getKey().path);
c.getValue().toXContent(builder, params);
c.getValue().toXContent(builder, p);
}
return builder.endObject(); // NodeAndPath
}),

endArray() // end "reserved_sizes"

);
});
}

/**
Expand Down Expand Up @@ -314,7 +290,7 @@ public String toString() {

// exposed for tests, computed here rather than exposing all the collections separately
int getChunkCount() {
return leastAvailableSpaceUsage.size() + shardSizes.size() + shardDataSetSizes.size() + dataPath.size() + reservedSpace.size() + 6;
return leastAvailableSpaceUsage.size() + shardSizes.size() + shardDataSetSizes.size() + dataPath.size() + reservedSpace.size() + 10;
}

public record NodeAndShard(String nodeId, ShardId shardId) implements Writeable {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@
import org.elasticsearch.cluster.NamedDiff;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ParseField;
Expand Down Expand Up @@ -232,13 +231,10 @@ public static DataStreamMetadata fromXContent(XContentParser parser) throws IOEx
}

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params ignored) {
return Iterators.concat(
ChunkedToXContentHelper.xContentValuesMap(DATA_STREAM.getPreferredName(), dataStreams),
ChunkedToXContentHelper.startObject(DATA_STREAM_ALIASES.getPreferredName()),
dataStreamAliases.values().iterator(),
ChunkedToXContentHelper.endObject()
);
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContent.builder(params)
.object(DATA_STREAM.getPreferredName(), b -> b.appendXContentFields(dataStreams))
.xContentObject(DATA_STREAM_ALIASES.getPreferredName(), dataStreamAliases.values().iterator());
}

@Override
Expand Down
Loading