Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
20dc681
ES|QL: Make _tsid available in metadata
leontyevdv Sep 22, 2025
27b4a4c
[CI] Auto commit changes from spotless
Sep 22, 2025
43610a7
ES|QL: Make _tsid available in metadata
leontyevdv Sep 22, 2025
3a003e1
Merge remote-tracking branch 'origin/feature/esql-add-tsid-into-metad…
leontyevdv Sep 22, 2025
8954568
[CI] Auto commit changes from spotless
Sep 22, 2025
f2e7404
ES|QL: Make _tsid available in metadata
leontyevdv Sep 22, 2025
abcdbd0
Merge remote-tracking branch 'origin/feature/esql-add-tsid-into-metad…
leontyevdv Sep 22, 2025
98434a6
ES|QL: Make _tsid available in metadata
leontyevdv Sep 23, 2025
09338d7
[CI] Update transport version definitions
Sep 23, 2025
31b3823
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 23, 2025
ec17c13
ES|QL: Make _tsid available in metadata
leontyevdv Sep 23, 2025
7c497df
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 23, 2025
35da14a
ES|QL: Make _tsid available in metadata
leontyevdv Sep 23, 2025
f14c65d
Update docs/changelog/135204.yaml
leontyevdv Sep 23, 2025
80d6197
[CI] Update transport version definitions
Sep 23, 2025
a492b47
ES|QL: Make _tsid available in metadata
leontyevdv Sep 23, 2025
7c9b453
Merge remote-tracking branch 'origin/feature/esql-add-tsid-into-metad…
leontyevdv Sep 23, 2025
d283f77
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 25, 2025
d1fcc5e
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 26, 2025
3e1f1dd
ES|QL: Make _tsid available in metadata
leontyevdv Sep 26, 2025
f289285
ES|QL: Make _tsid available in metadata
leontyevdv Sep 26, 2025
0687445
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 26, 2025
62a63e6
ES|QL: Make _tsid available in metadata
leontyevdv Sep 26, 2025
5b6cc34
ES|QL: Make _tsid available in metadata
leontyevdv Sep 26, 2025
0d9ff88
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 26, 2025
1324a9f
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 29, 2025
3f23565
Merge branch 'main' into feature/esql-add-tsid-into-metadata-list
leontyevdv Sep 29, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/changelog/135204.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 135204
summary: Make `_tsid` available in metadata
area: ES|QL
type: enhancement
issues:
- 133205
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,15 @@ public Query termQuery(Object value, SearchExecutionContext context) {
throw new IllegalArgumentException("[" + NAME + "] is not searchable");
}

@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
BytesRef binaryValue = (BytesRef) value;
return TimeSeriesIdFieldMapper.encodeTsid(binaryValue);
}

@Override
public BlockLoader blockLoader(BlockLoaderContext blContext) {
return new BlockDocValuesReader.BytesRefsFromOrdsBlockLoader(name());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -768,4 +768,23 @@ public void testParseWithDynamicMappingInvalidRoutingHash() {
});
assertThat(failure.getMessage(), equalTo("[5:1] failed to parse: Illegal base64 character 20"));
}

public void testValueForDisplay() throws Exception {
DocumentMapper docMapper = createDocumentMapper("a", mapping(b -> {
b.startObject("a").field("type", "keyword").field("time_series_dimension", true).endObject();
b.startObject("b").field("type", "long").field("time_series_dimension", true).endObject();
}));

ParsedDocument doc = parseDocument(docMapper, b -> b.field("a", "value").field("b", 100));
BytesRef tsidBytes = doc.rootDoc().getBinaryValue("_tsid");
assertThat(tsidBytes, not(nullValue()));

TimeSeriesIdFieldMapper.TimeSeriesIdFieldType fieldType = TimeSeriesIdFieldMapper.FIELD_TYPE;
Object displayValue = fieldType.valueForDisplay(tsidBytes);
Object encodedValue = TimeSeriesIdFieldMapper.encodeTsid(tsidBytes);

assertThat(displayValue, equalTo(encodedValue));
assertThat(displayValue.getClass(), is(String.class));
assertThat(fieldType.valueForDisplay(null), nullValue());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ public class MetadataAttribute extends TypedAttribute {
Map.entry(IgnoredFieldMapper.NAME, new MetadataAttributeConfiguration(DataType.KEYWORD, true)),
Map.entry(SourceFieldMapper.NAME, new MetadataAttributeConfiguration(DataType.SOURCE, false)),
Map.entry(IndexModeFieldMapper.NAME, new MetadataAttributeConfiguration(DataType.KEYWORD, true)),
Map.entry(SCORE, new MetadataAttributeConfiguration(DataType.DOUBLE, false))
Map.entry(SCORE, new MetadataAttributeConfiguration(DataType.DOUBLE, false)),
Map.entry(TSID_FIELD, new MetadataAttributeConfiguration(DataType.TSID_DATA_TYPE, false))
);

private record MetadataAttributeConfiguration(DataType dataType, boolean searchable) {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ public enum DataType implements Writeable {
}

private static final Collection<DataType> TYPES = Arrays.stream(values())
.filter(d -> d != DOC_DATA_TYPE && d != TSID_DATA_TYPE)
.filter(d -> d != DOC_DATA_TYPE)
.sorted(Comparator.comparing(DataType::typeName))
.toList();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
import org.elasticsearch.geometry.utils.Geohash;
import org.elasticsearch.h3.H3;
import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.mapper.RoutingPathFields;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils;
Expand Down Expand Up @@ -901,8 +902,9 @@ public static Literal randomLiteral(DataType type) {
throw new UncheckedIOException(e);
}
}
case TSID_DATA_TYPE -> randomTsId().toBytesRef();
case DENSE_VECTOR -> Arrays.asList(randomArray(10, 10, i -> new Float[10], ESTestCase::randomFloat));
case UNSUPPORTED, OBJECT, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new IllegalArgumentException(
case UNSUPPORTED, OBJECT, DOC_DATA_TYPE, PARTIAL_AGG -> throw new IllegalArgumentException(
"can't make random values for [" + type.typeName() + "]"
);
}, type);
Expand All @@ -918,6 +920,22 @@ static Version randomVersion() {
};
}

static BytesReference randomTsId() {
RoutingPathFields routingPathFields = new RoutingPathFields(null);

int numDimensions = randomIntBetween(1, 4);
for (int i = 0; i < numDimensions; i++) {
String fieldName = "dim" + i;
if (randomBoolean()) {
routingPathFields.addString(fieldName, randomAlphaOfLength(randomIntBetween(3, 10)));
} else {
routingPathFields.addLong(fieldName, randomLongBetween(1, 1000));
}
}

return routingPathFields.buildHash();
}

public static WildcardLike wildcardLike(Expression left, String exp) {
return new WildcardLike(EMPTY, left, new WildcardPattern(exp), false);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -525,3 +525,37 @@ true | two | 2024-05-10T00:18:00.000Z
false | two | 2024-05-10T00:20:00.000Z
false | two | 2024-05-10T00:22:00.000Z
;

tsidMetadataAttributeCount
required_capability: ts_command_v0
required_capability: metadata_tsid_field

TS k8s METADATA _tsid
| STATS cnt = count_distinct(_tsid)
;

cnt:long
9
;

tsidMetadataAttributeAggregation
required_capability: ts_command_v0
required_capability: metadata_tsid_field

TS k8s METADATA _tsid
| STATS cnt = count_distinct(_tsid) BY cluster, pod
| SORT cluster
;
ignoreOrder:true

cnt:long | cluster:keyword | pod:keyword
1 | staging | one
1 | staging | two
1 | staging | three
1 | qa | one
1 | qa | two
1 | qa | three
1 | prod | one
1 | prod | two
1 | prod | three
;
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

package org.elasticsearch.xpack.esql.action;

import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER;
import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasSize;

// @TestLogging(value = "org.elasticsearch.xpack.esql.session:DEBUG", reason = "to better understand planning")
public class CrossClusterTimeSeriesIT extends AbstractCrossClusterTestCase {

private static final String INDEX_NAME = "hosts";

record Doc(String host, String cluster, long timestamp, int requestCount, double cpu, ByteSizeValue memory) {}

public void testTsIdMetadataInResponse() {
populateTimeSeriesIndex(LOCAL_CLUSTER, INDEX_NAME);
populateTimeSeriesIndex(REMOTE_CLUSTER_1, INDEX_NAME);

try (EsqlQueryResponse resp = runQuery("TS hosts, cluster-a:hosts METADATA _tsid", Boolean.TRUE)) {
assertNotNull(
resp.columns().stream().map(ColumnInfoImpl::name).filter(name -> name.equalsIgnoreCase("_tsid")).findFirst().orElse(null)
);

assertCCSExecutionInfoDetails(resp.getExecutionInfo(), 2);
}
}

public void testTsIdMetadataInResponseWithFailure() {
populateTimeSeriesIndex(LOCAL_CLUSTER, INDEX_NAME);
populateTimeSeriesIndex(REMOTE_CLUSTER_1, INDEX_NAME);

try (
EsqlQueryResponse resp = runQuery(
"TS hosts, cluster-a:hosts METADATA _tsid | WHERE host IS NOT NULL | STATS cnt = count_distinct(_tsid)",
Boolean.TRUE
)
) {
List<List<Object>> values = getValuesList(resp);
assertThat(values, hasSize(1));
assertNotNull(values.getFirst().getFirst());
assertCCSExecutionInfoDetails(resp.getExecutionInfo(), 2);
}
}

private void populateTimeSeriesIndex(String clusterAlias, String indexName) {
int numShards = randomIntBetween(1, 5);
String clusterTag = Strings.isEmpty(clusterAlias) ? "local" : clusterAlias;
Settings settings = Settings.builder()
.put("mode", "time_series")
.putList("routing_path", List.of("host", "cluster"))
.put("index.number_of_shards", numShards)
.build();

client(clusterAlias).admin()
.indices()
.prepareCreate(indexName)
.setSettings(settings)
.setMapping(
"@timestamp",
"type=date",
"host",
"type=keyword,time_series_dimension=true",
"cluster",
"type=keyword,time_series_dimension=true",
"cpu",
"type=double,time_series_metric=gauge",
"memory",
"type=long,time_series_metric=gauge",
"request_count",
"type=integer,time_series_metric=counter",
"cluster_tag",
"type=keyword"
)
.get();

final List<Doc> docs = getRandomDocs();

for (Doc doc : docs) {
client().prepareIndex(indexName)
.setSource(
"@timestamp",
doc.timestamp,
"host",
doc.host,
"cluster",
doc.cluster,
"cpu",
doc.cpu,
"memory",
doc.memory.getBytes(),
"request_count",
doc.requestCount,
"cluster_tag",
clusterTag
)
.get();
}
client().admin().indices().prepareRefresh(indexName).get();
}

private List<Doc> getRandomDocs() {
final List<Doc> docs = new ArrayList<>();

Map<String, String> hostToClusters = new HashMap<>();
for (int i = 0; i < 5; i++) {
hostToClusters.put("p" + i, randomFrom("qa", "prod"));
}
long timestamp = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-04-15T00:00:00Z");

Map<String, Integer> requestCounts = new HashMap<>();
int numDocs = between(20, 100);
for (int i = 0; i < numDocs; i++) {
List<String> hosts = randomSubsetOf(between(1, hostToClusters.size()), hostToClusters.keySet());
timestamp += between(1, 10) * 1000L;
for (String host : hosts) {
var requestCount = requestCounts.compute(host, (k, curr) -> {
if (curr == null || randomInt(100) <= 20) {
return randomIntBetween(0, 10);
} else {
return curr + randomIntBetween(1, 10);
}
});
int cpu = randomIntBetween(0, 100);
ByteSizeValue memory = ByteSizeValue.ofBytes(randomIntBetween(1024, 1024 * 1024));
docs.add(new Doc(host, hostToClusters.get(host), timestamp, requestCount, cpu, memory));
}
}

Randomness.shuffle(docs);

return docs;
}

private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInfo, int expectedNumClusters) {
assertNotNull(executionInfo);
assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L));
assertTrue(executionInfo.isCrossClusterSearch());
assertThat(executionInfo.getClusters().size(), equalTo(expectedNumClusters));

List<EsqlExecutionInfo.Cluster> clusters = executionInfo.clusterAliases().stream().map(executionInfo::getCluster).toList();

for (EsqlExecutionInfo.Cluster cluster : clusters) {
assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L));
assertThat(cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
assertThat(cluster.getSkippedShards(), equalTo(0));
assertThat(cluster.getFailedShards(), equalTo(0));
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -686,4 +686,21 @@ public void testNullMetricsAreSkipped() {
assertEquals("Did not filter nulls on counter type", 50, resp.documentsFound());
}
}

public void testTSIDMetadataAttribute() {
List<ColumnInfoImpl> columns = List.of(
new ColumnInfoImpl("_tsid", DataType.TSID_DATA_TYPE, null),
new ColumnInfoImpl("cluster", DataType.KEYWORD, null)
);

try (EsqlQueryResponse resp = run(" TS hosts METADATA _tsid | KEEP _tsid, cluster | LIMIT 1")) {
assertThat(resp.columns(), equalTo(columns));

List<List<Object>> values = EsqlTestUtils.getValuesList(resp);
assertThat(values, hasSize(1));
assertThat(values.getFirst().get(0), Matchers.notNullValue());
assertThat(values.getFirst().get(1), Matchers.notNullValue());
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -1558,9 +1558,12 @@ public enum Cap {

INLINE_STATS_FIX_OPTIMIZED_AS_LOCAL_RELATION(INLINESTATS_V11.enabled),

DENSE_VECTOR_AGG_METRIC_DOUBLE_IF_FNS
DENSE_VECTOR_AGG_METRIC_DOUBLE_IF_FNS,

;
/**
* Support for requesting the "_tsid" metadata field.
*/
METADATA_TSID_FIELD;

private final boolean enabled;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.elasticsearch.compute.data.FloatBlock;
import org.elasticsearch.compute.data.IntBlock;
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
Expand Down Expand Up @@ -199,8 +200,16 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa
return builder.value(((FloatBlock) block).getFloat(valueIndex));
}
};
case DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, TSID_DATA_TYPE, SHORT, BYTE, OBJECT, FLOAT, HALF_FLOAT, SCALED_FLOAT,
PARTIAL_AGG -> throw new IllegalArgumentException("can't convert values of type [" + columnInfo.type() + "]");
case TSID_DATA_TYPE -> new PositionToXContent(block) {
@Override
protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex)
throws IOException {
BytesRef bytesRef = ((BytesRefBlock) block).getBytesRef(valueIndex, scratch);
return builder.value(TimeSeriesIdFieldMapper.encodeTsid(bytesRef));
}
};
case DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, SHORT, BYTE, OBJECT, FLOAT, HALF_FLOAT, SCALED_FLOAT, PARTIAL_AGG ->
throw new IllegalArgumentException("can't convert values of type [" + columnInfo.type() + "]");
};
}
}
Loading