diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 3a6b3a6698364..df37a6535ec32 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -430,11 +430,11 @@ protected boolean supportsExponentialHistograms() { try { return RestEsqlTestCase.hasCapabilities( client(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V4.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.capabilityName()) ) && RestEsqlTestCase.hasCapabilities( remoteClusterClient(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V4.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.capabilityName()) ); } catch (IOException e) { throw new RuntimeException(e); diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index 31162a1fa6e6c..b80a5feb4ca43 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -58,7 +58,7 @@ protected boolean supportsSourceFieldMapping() { protected boolean supportsExponentialHistograms() { return RestEsqlTestCase.hasCapabilities( client(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V4.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.capabilityName()) ); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 38b7822479c65..ce5bb6cf8ccc6 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -289,7 +289,7 @@ protected boolean supportsSourceFieldMapping() throws IOException { protected boolean supportsExponentialHistograms() { return RestEsqlTestCase.hasCapabilities( client(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V4.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.capabilityName()) ); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index bd42ee08ed384..61122a7926218 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -172,7 +172,11 @@ public class CsvTestsDataLoader { private static final TestDataset DENSE_VECTOR = new TestDataset("dense_vector"); private static final TestDataset COLORS = new TestDataset("colors"); private static final TestDataset COLORS_CMYK_LOOKUP = new TestDataset("colors_cmyk").withSetting("lookup-settings.json"); - private static final TestDataset EXP_HISTO_SAMPLE = new TestDataset("exp_histo_sample"); + private static final TestDataset EXP_HISTO_SAMPLE = new TestDataset( + "exp_histo_sample", + "exp_histo_sample-mappings.json", + "exp_histo_sample.csv" + ).withSetting("exp_histo_sample-settings.json"); public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/exp_histo_sample.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/exp_histo_sample.csv index a39068f375203..cbb66e6c68090 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/exp_histo_sample.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/data/exp_histo_sample.csv @@ -1,11 +1,11 @@ @timestamp:date,instance:keyword,responseTime:exponential_histogram -2025-01-01T00:00:00Z,dummy-empty,{"scale":-7} -2025-01-01T00:00:00Z,dummy-full,{"scale":0\,"sum":-3775.0\,"min":-100.0\,"max":50.0\,"zero":{"count":1\,"threshold":1.0E-4}\,"positive":{"indices":[-1\,0\,1\,2\,3\,4\,5]\,"counts":[1\,1\,2\,4\,8\,16\,18]}\,"negative":{"indices":[-1\,0\,1\,2\,3\,4\,5\,6]\,"counts":[1\,1\,2\,4\,8\,16\,32\,36]}} -2025-01-01T00:00:00Z,dummy-no_zero_bucket,{"scale":0\,"sum":-3775.0\,"min":-100.0\,"max":50.0\,"positive":{"indices":[-1\,0\,1\,2\,3\,4\,5]\,"counts":[1\,1\,2\,4\,8\,16\,18]}\,"negative":{"indices":[-1\,0\,1\,2\,3\,4\,5\,6]\,"counts":[1\,1\,2\,4\,8\,16\,32\,36]}} -2025-01-01T00:00:00Z,dummy-positive_only,{"scale":2\,"sum":1275.0\,"min":1.0\,"max":50.0\,"positive":{"indices":[-1\,3\,6\,7\,9\,10\,11\,12\,13\,14\,15\,16\,17\,18\,19\,20\,21\,22]\,"counts":[1\,1\,1\,1\,1\,1\,2\,1\,2\,2\,3\,3\,3\,4\,6\,6\,7\,5]}} -2025-01-01T00:00:00Z,dummy-negative_only,{"scale":2\,"sum":-1275.0\,"min":-50.0\,"max":-1.0\,"negative":{"indices":[-1\,3\,6\,7\,9\,10\,11\,12\,13\,14\,15\,16\,17\,18\,19\,20\,21\,22]\,"counts":[1\,1\,1\,1\,1\,1\,2\,1\,2\,2\,3\,3\,3\,4\,6\,6\,7\,5]}} -2025-01-01T00:00:00Z,dummy-zero_threshold_only,{"scale":0\,"zero":{"threshold":2.0E-5}} -2025-01-01T00:00:00Z,dummy-zero_count_only,{"scale":2\,"min":0\,"max":0\,"zero":{"count":101}} +2025-09-24T00:00:00Z,dummy-empty,{"scale":-7} +2025-09-24T00:00:00Z,dummy-full,{"scale":0\,"sum":-3775.0\,"min":-100.0\,"max":50.0\,"zero":{"count":1\,"threshold":1.0E-4}\,"positive":{"indices":[-1\,0\,1\,2\,3\,4\,5]\,"counts":[1\,1\,2\,4\,8\,16\,18]}\,"negative":{"indices":[-1\,0\,1\,2\,3\,4\,5\,6]\,"counts":[1\,1\,2\,4\,8\,16\,32\,36]}} +2025-09-24T00:00:00Z,dummy-no_zero_bucket,{"scale":0\,"sum":-3775.0\,"min":-100.0\,"max":50.0\,"positive":{"indices":[-1\,0\,1\,2\,3\,4\,5]\,"counts":[1\,1\,2\,4\,8\,16\,18]}\,"negative":{"indices":[-1\,0\,1\,2\,3\,4\,5\,6]\,"counts":[1\,1\,2\,4\,8\,16\,32\,36]}} +2025-09-24T00:00:00Z,dummy-positive_only,{"scale":2\,"sum":1275.0\,"min":1.0\,"max":50.0\,"positive":{"indices":[-1\,3\,6\,7\,9\,10\,11\,12\,13\,14\,15\,16\,17\,18\,19\,20\,21\,22]\,"counts":[1\,1\,1\,1\,1\,1\,2\,1\,2\,2\,3\,3\,3\,4\,6\,6\,7\,5]}} +2025-09-24T00:00:00Z,dummy-negative_only,{"scale":2\,"sum":-1275.0\,"min":-50.0\,"max":-1.0\,"negative":{"indices":[-1\,3\,6\,7\,9\,10\,11\,12\,13\,14\,15\,16\,17\,18\,19\,20\,21\,22]\,"counts":[1\,1\,1\,1\,1\,1\,2\,1\,2\,2\,3\,3\,3\,4\,6\,6\,7\,5]}} +2025-09-24T00:00:00Z,dummy-zero_threshold_only,{"scale":0\,"zero":{"threshold":2.0E-5}} +2025-09-24T00:00:00Z,dummy-zero_count_only,{"scale":2\,"min":0\,"max":0\,"zero":{"count":101}} 2025-09-25T00:01:00Z,instance-2,{"scale":4\,"sum":0.10814399999999999\,"min":2.65E-4\,"max":0.067933\,"positive":{"indices":[-191\,-188\,-184\,-182\,-181\,-180\,-179\,-178\,-176\,-175\,-172\,-171\,-170\,-169\,-168\,-167\,-152\,-150\,-149\,-137\,-131\,-112\,-110\,-63]\,"counts":[1\,1\,1\,2\,2\,5\,2\,1\,1\,1\,1\,1\,1\,2\,3\,1\,1\,1\,1\,1\,1\,1\,1\,1]}} 2025-09-25T00:01:00Z,instance-0,{"scale":3\,"sum":9.269670999999999\,"min":3.79E-4\,"max":0.873616\,"positive":{"indices":[-91\,-90\,-88\,-87\,-86\,-85\,-84\,-83\,-82\,-77\,-76\,-75\,-73\,-72\,-66\,-65\,-63\,-61\,-58\,-55\,-54\,-53\,-51\,-49\,-46\,-44\,-43\,-38\,-35\,-31\,-30\,-24\,-23\,-22\,-21\,-19\,-17\,-16\,-15\,-14\,-13\,-10\,-9\,-8\,-5\,-4\,-2]\,"counts":[2\,2\,1\,6\,2\,1\,1\,1\,1\,2\,1\,3\,1\,1\,1\,1\,1\,1\,2\,1\,1\,1\,1\,2\,1\,2\,1\,2\,1\,1\,1\,1\,1\,1\,1\,1\,1\,2\,1\,3\,2\,2\,2\,1\,1\,2\,1]}} 2025-09-25T00:01:00Z,instance-1,{"scale":4\,"sum":0.149232\,"min":2.58E-4\,"max":0.061096\,"positive":{"indices":[-191\,-183\,-182\,-181\,-180\,-179\,-177\,-175\,-174\,-172\,-171\,-168\,-167\,-166\,-155\,-153\,-151\,-150\,-148\,-113\,-66\,-65]\,"counts":[1\,1\,3\,2\,2\,1\,1\,4\,1\,1\,1\,1\,2\,1\,1\,1\,3\,1\,1\,1\,1\,1]}} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exp_histo_sample-mappings.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exp_histo_sample-mappings.json new file mode 100644 index 0000000000000..27746ef0df517 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exp_histo_sample-mappings.json @@ -0,0 +1,15 @@ +{ + "properties": { + "@timestamp": { + "type": "date" + }, + "instance": { + "type": "keyword", + "time_series_dimension": true + }, + "responseTime": { + "type": "exponential_histogram", + "time_series_metric": "histogram" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exp_histo_sample-settings.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exp_histo_sample-settings.json new file mode 100644 index 0000000000000..e4209c65a45bc --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exp_histo_sample-settings.json @@ -0,0 +1,10 @@ +{ + "index": { + "mode": "time_series", + "routing_path": ["instance"], + "time_series": { + "start_time": "2025-09-24T00:00:00Z", + "end_time": "2025-09-26T00:00:00Z" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec index c92df280cca73..ff5de3e488115 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec @@ -1,5 +1,5 @@ loadFiltered -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE STARTS_WITH(instance, "dummy") | SORT instance | KEEP instance, responseTime ; @@ -14,10 +14,8 @@ dummy-zero_count_only | "{""scale"":2,""sum"":0.0,""min"":0.0,""max"":0.0,"" dummy-zero_threshold_only | "{""scale"":0,""zero"":{""threshold"":2.0E-5}}" ; - - allAggsGrouped -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | EVAL instance = CASE(STARTS_WITH(instance, "dummy"), "dummy-grouped", instance) @@ -36,7 +34,7 @@ instance-2 | 2.2E-4 | 2.744054 | 6.469E-4 | 0.0016068 | 27.706 allAggsFiltered -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | STATS min = MIN(responseTime) WHERE instance == "instance-0", @@ -55,7 +53,7 @@ min:double | max:double | median:double | p75:double | sum:double | avg:double allAggsGroupedFiltered -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") @@ -79,7 +77,7 @@ instance-2 | null | null | 6.469E-4 | null | null allAggsGroupedEmptyGroups -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | STATS min = MIN(responseTime) WHERE instance == "idontexist", @@ -96,7 +94,7 @@ null | null | null | null | null | null ; allAggsInlineGrouped -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | INLINE STATS min = MIN(responseTime), max = MAX(responseTime), median = MEDIAN(responseTime), p75 = PERCENTILE(responseTime,75), sum = SUM(responseTime), avg = AVG(responseTime) BY instance @@ -127,7 +125,7 @@ instance-0 | 2.4E-4 | 6.786232 | 0.0211404 | 0.2608237 allAggsOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS min = MIN(responseTime), max = MAX(responseTime), median = MEDIAN(responseTime), p75 = PERCENTILE(responseTime,75), sum = SUM(responseTime), avg = AVG(responseTime) @@ -140,7 +138,7 @@ NULL | NULL | NULL | NULL | NULL | NULL histoAsCaseValue -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | INLINE STATS p50 = PERCENTILE(responseTime, 50) BY instance, @timestamp @@ -154,7 +152,7 @@ filteredCount:long ; ungroupedPercentiles -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS p0 = PERCENTILE(responseTime,0), p50 = PERCENTILE(responseTime,50), p99 = PERCENTILE(responseTime, 99), p100 = PERCENTILE(responseTime,100) @@ -169,7 +167,7 @@ p0:double | p50:double | p99:double | p100:double groupedPercentiles -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS p0 = PERCENTILE(responseTime,0), p50 = PERCENTILE(responseTime,50), p99 = PERCENTILE(responseTime, 99), p100 = PERCENTILE(responseTime,100) BY instance @@ -187,7 +185,7 @@ instance-2 | 2.2E-4 | 6.469E-4 | 0.0857672 | 2.7059714542564097 percentileOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS p50 = PERCENTILE(responseTime,50) @@ -201,7 +199,7 @@ NULL ungroupedMinMax -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS min = MIN(responseTime), max = MAX(responseTime) @@ -215,7 +213,7 @@ min:double | max:double groupedMinMax -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS min = MIN(responseTime), max = MAX(responseTime) BY instance @@ -232,7 +230,7 @@ instance-2 | 2.2E-4 | 2.744054 minMaxOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS min = MIN(responseTime), max = MAX(responseTime) @@ -245,7 +243,7 @@ NULL | NULL ungroupedAvg -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS avg = ROUND(AVG(responseTime), 7) // rounding to avoid floating point precision issues @@ -258,7 +256,7 @@ avg:double groupedAvg -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS avg = ROUND(AVG(responseTime), 7) BY instance // rounding to avoid floating point precision issues @@ -274,7 +272,7 @@ instance-2 | 0.008197 avgOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS avg = AVG(responseTime) @@ -287,7 +285,7 @@ NULL ungroupedSum -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS sum = ROUND(SUM(responseTime), 7) // rounding to avoid floating point precision issues @@ -300,7 +298,7 @@ sum:double groupedSum -required_capability: exponential_histogram_pre_tech_preview_v4 +required_capability: exponential_histogram_pre_tech_preview_v5 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS sum = ROUND(SUM(responseTime), 7) BY instance // rounding to avoid floating point precision issues @@ -313,3 +311,92 @@ instance-0 | 1472.744209 instance-1 | 36.198484 instance-2 | 27.706021 ; + + +timeseriesAllAggsNoBucket +required_capability: exponential_histogram_pre_tech_preview_v5 +required_capability: ts_command_v0 + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | STATS min = MIN(responseTime), max = MAX(responseTime), median = MEDIAN(responseTime), p75 = PERCENTILE(responseTime,75), sum = SUM(responseTime), avg = AVG(responseTime) BY instance + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP instance, min, max, median, p75, sum, avg + | SORT instance +; + +instance:keyword | min:double | max:double | median:double | p75:double | sum:double | avg:double +instance-0 | 2.4E-4 | 6.786232 | 0.0211404 | 0.2608237 | 1472.744209 | 0.1665812 +instance-1 | 2.17E-4 | 3.190723 | 6.469E-4 | 0.0016068 | 36.198484 | 0.011138 +instance-2 | 2.2E-4 | 2.744054 | 6.469E-4 | 0.0016068 | 27.706021 | 0.008197 +; + + +timeseriesAllAggsFilteredAndBucketed +required_capability: exponential_histogram_pre_tech_preview_v5 +required_capability: ts_command_v0 + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | WHERE TRANGE(to_datetime("2025-09-25T00:30:00Z"), to_datetime("2025-09-25T01:00:00Z")) + | STATS min = MIN(responseTime), max = MAX(responseTime), median = MEDIAN(responseTime), p75 = PERCENTILE(responseTime,75), sum = SUM(responseTime), avg = AVG(responseTime) BY instance, time=TBUCKET(10m) + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP instance, time, min, max, median, p75, sum, avg + | SORT instance, time +; + +instance:keyword | time:datetime | min:double | max:double | median:double | p75:double | sum:double | avg:double +instance-0 | 2025-09-25T00:30:00.000Z | 2.5E-4 | 1.291403 | 0.0274157 | 0.2391763 | 128.220162 | 0.1582965 +instance-0 | 2025-09-25T00:40:00.000Z | 2.93E-4 | 1.381711 | 0.013139 | 0.2193257 | 139.229386 | 0.1567898 +instance-0 | 2025-09-25T00:50:00.000Z | 2.57E-4 | 1.337726 | 0.029897 | 0.2608237 | 146.914416 | 0.1680943 +instance-0 | 2025-09-25T01:00:00.000Z | 3.58E-4 | 1.384036 | 0.017812 | 0.1665956 | 15.216617 | 0.1729161 +instance-1 | 2025-09-25T00:30:00.000Z | 2.37E-4 | 0.144625 | 6.831E-4 | 0.0015387 | 1.323601 | 0.0046119 +instance-1 | 2025-09-25T00:40:00.000Z | 2.33E-4 | 0.189058 | 5.681E-4 | 0.0014734 | 1.422738 | 0.0049746 +instance-1 | 2025-09-25T00:50:00.000Z | 2.29E-4 | 0.06199 | 5.681E-4 | 0.001411 | 0.737455 | 0.0025695 +instance-1 | 2025-09-25T01:00:00.000Z | 3.31E-4 | 0.252402 | 7.055E-4 | 0.0015898 | 0.399745 | 0.0133248 +instance-2 | 2025-09-25T00:30:00.000Z | 2.21E-4 | 0.136683 | 6.469E-4 | 0.0015727 | 1.710478 | 0.0057982 +instance-2 | 2025-09-25T00:40:00.000Z | 2.25E-4 | 0.098632 | 5.932E-4 | 0.001411 | 1.278917 | 0.0041255 +instance-2 | 2025-09-25T00:50:00.000Z | 2.2E-4 | 0.092109 | 5.807E-4 | 0.0014578 | 0.946027 | 0.0032622 +instance-2 | 2025-09-25T01:00:00.000Z | 2.45E-4 | 0.008362 | 4.777E-4 | 6.756E-4 | 0.033433 | 0.0011529 +; + + +timeseriesAllAggsUngrouped +required_capability: exponential_histogram_pre_tech_preview_v5 +required_capability: ts_command_v0 + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | STATS min = MIN(responseTime), max = MAX(responseTime), median = MEDIAN(responseTime), p75 = PERCENTILE(responseTime,75), sum = SUM(responseTime), avg = AVG(responseTime) + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP min, max, median, p75, sum, avg +; + +min:double | max:double | median:double | p75:double | sum:double | avg:double +2.17E-4 | 6.786232 | 0.0016965 | 0.0542885 | 1536.648714 | 0.0993245 +; + + +timseriesAllAggsGroupedFiltered +required_capability: exponential_histogram_pre_tech_preview_v5 +required_capability: ts_command_v0 + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | STATS min = MIN(responseTime) WHERE instance == "instance-0", + max = MAX(responseTime) WHERE instance == "instance-1", + median = MEDIAN(responseTime) WHERE instance == "instance-2", + p75 = PERCENTILE(responseTime,75) WHERE instance == "instance-0", + sum = SUM(responseTime) WHERE instance == "instance-1", + avg = AVG(responseTime) WHERE instance == "instance-2" + BY instance + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP instance, min, max, median, p75, sum, avg + | SORT instance +; + +instance:keyword | min:double | max:double | median:double | p75:double | sum:double | avg:double +instance-0 | 2.4E-4 | null | null | 0.2608237 | null | null +instance-1 | null | 3.190723 | null | null | 36.198484 | null +instance-2 | null | null | 6.469E-4 | null | null | 0.008197 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-exp_histo_sample.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-exp_histo_sample.json deleted file mode 100644 index c4fba1a5aa1a6..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-exp_histo_sample.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "properties": { - "@timestamp": { - "type": "date" - }, - "instance": { - "type": "keyword" - }, - "responseTime": { - "type": "exponential_histogram" - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 325ab9944549b..e74de3f495502 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -1572,7 +1572,7 @@ public enum Cap { * When implementing changes on this type, we'll simply increment the version suffix at the end to prevent bwc tests from running. * As soon as we move into tech preview, we'll replace this capability with a "EXPONENTIAL_HISTOGRAM_TECH_PREVIEW" one. */ - EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V4(EXPONENTIAL_HISTOGRAM_FEATURE_FLAG), + EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5(EXPONENTIAL_HISTOGRAM_FEATURE_FLAG), /** * Create new block when filtering OrdinalBytesRefBlock diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java index 4827ae08ff7ab..1163fbde777fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -55,7 +55,8 @@ public static List getNamedWriteables() { Absent.ENTRY, AbsentOverTime.ENTRY, DimensionValues.ENTRY, - HistogramMerge.ENTRY + HistogramMerge.ENTRY, + HistogramMergeOverTime.ENTRY ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMerge.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMerge.java index daf1c06144f74..0c16705a98618 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMerge.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMerge.java @@ -37,17 +37,17 @@ public class HistogramMerge extends AggregateFunction implements ToAggregator { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, - "Merge", + "HistogramMerge", HistogramMerge::new ); @FunctionInfo(returnType = "exponential_histogram", type = FunctionType.AGGREGATE) public HistogramMerge(Source source, @Param(name = "histogram", type = { "exponential_histogram" }) Expression field) { - this(source, field, Literal.TRUE); + this(source, field, Literal.TRUE, NO_WINDOW); } - public HistogramMerge(Source source, Expression field, Expression filter) { - super(source, field, filter, NO_WINDOW, emptyList()); + public HistogramMerge(Source source, Expression field, Expression filter, Expression window) { + super(source, field, filter, window, emptyList()); } private HistogramMerge(StreamInput in) throws IOException { @@ -71,16 +71,16 @@ protected TypeResolution resolveType() { @Override protected NodeInfo info() { - return NodeInfo.create(this, HistogramMerge::new, field(), filter()); + return NodeInfo.create(this, HistogramMerge::new, field(), filter(), window()); } @Override public HistogramMerge replaceChildren(List newChildren) { - return new HistogramMerge(source(), newChildren.get(0), newChildren.get(1)); + return new HistogramMerge(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } public HistogramMerge withFilter(Expression filter) { - return new HistogramMerge(source(), field(), filter); + return new HistogramMerge(source(), field(), filter, window()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMergeOverTime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMergeOverTime.java new file mode 100644 index 0000000000000..ce5005986c844 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMergeOverTime.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.esql.expression.function.Param; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.emptyList; + +/** + * Currently just a surrogate for applying {@link HistogramMerge} per series. + */ +public class HistogramMergeOverTime extends TimeSeriesAggregateFunction implements OptionalArgument { + // TODO Eventually we want to replace this with some increase/rate implementation + // for histograms to be consistent with counters on extrapolation. + + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "HistogramMergeOverTime", + HistogramMergeOverTime::new + ); + + @FunctionInfo(returnType = "exponential_histogram", type = FunctionType.TIME_SERIES_AGGREGATE) + public HistogramMergeOverTime( + Source source, + @Param(name = "histogram", type = "exponential_histogram") Expression field, + @Param(name = "window", type = "time_duration", optional = true) Expression window + ) { + this(source, field, Literal.TRUE, Objects.requireNonNullElse(window, NO_WINDOW)); + } + + public HistogramMergeOverTime(Source source, Expression field, Expression filter, Expression window) { + super(source, field, filter, window, emptyList()); + } + + private HistogramMergeOverTime(StreamInput in) throws IOException { + super(in); + } + + @Override + protected TypeResolution resolveType() { + return perTimeSeriesAggregation().resolveType(); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public DataType dataType() { + return perTimeSeriesAggregation().dataType(); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, HistogramMergeOverTime::new, field(), filter(), window()); + } + + @Override + public HistogramMergeOverTime replaceChildren(List newChildren) { + return new HistogramMergeOverTime(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + } + + @Override + public HistogramMergeOverTime withFilter(Expression filter) { + return new HistogramMergeOverTime(source(), field(), filter, window()); + } + + @Override + public AggregateFunction perTimeSeriesAggregation() { + return new HistogramMerge(source(), field(), filter(), window()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index ff13b29df75f5..1eec0cd0121d1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -170,7 +170,7 @@ public Expression surrogate() { var field = field(); if (field.dataType() == DataType.EXPONENTIAL_HISTOGRAM) { - return new HistogramPercentile(source(), new HistogramMerge(source(), field, filter()), percentile()); + return new HistogramPercentile(source(), new HistogramMerge(source(), field, filter(), window()), percentile()); } if (field.foldable()) { return new MvPercentile(source(), new ToDouble(source(), field), percentile()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/histogram/ExtractHistogramComponent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/histogram/ExtractHistogramComponent.java index 3181f0ed682bd..70ede3e9c9c88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/histogram/ExtractHistogramComponent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/histogram/ExtractHistogramComponent.java @@ -83,7 +83,7 @@ public static Expression create(Source source, Expression field, ExponentialHist return new ExtractHistogramComponent(source, field, new Literal(source, component.ordinal(), INTEGER)); } - Expression field() { + public Expression field() { return field; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateTimeSeriesAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateTimeSeriesAggregate.java index a2217a1291396..9f3f93aa07d6b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateTimeSeriesAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateTimeSeriesAggregate.java @@ -24,11 +24,13 @@ import org.elasticsearch.xpack.esql.expression.function.TimestampAware; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.DimensionValues; +import org.elasticsearch.xpack.esql.expression.function.aggregate.HistogramMergeOverTime; import org.elasticsearch.xpack.esql.expression.function.aggregate.LastOverTime; import org.elasticsearch.xpack.esql.expression.function.aggregate.TimeSeriesAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.grouping.TBucket; +import org.elasticsearch.xpack.esql.expression.function.scalar.histogram.ExtractHistogramComponent; import org.elasticsearch.xpack.esql.expression.function.scalar.internal.PackDimension; import org.elasticsearch.xpack.esql.expression.function.scalar.internal.UnpackDimension; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; @@ -220,8 +222,15 @@ protected LogicalPlan rule(TimeSeriesAggregate aggregate, LogicalOptimizerContex secondPassAggs.add(new Alias(alias.source(), alias.name(), outerAgg, agg.id())); } else { // TODO: reject over_time_aggregation only - final Expression aggField = af.field(); - var tsAgg = new LastOverTime(af.source(), aggField, af.window(), timestamp.get()); + Expression aggField = findAggregatedField(af); + + // We use merge_over_time as default for histograms and last_over_time for other types + TimeSeriesAggregateFunction tsAgg; + if (aggField.dataType() == DataType.EXPONENTIAL_HISTOGRAM) { + tsAgg = new HistogramMergeOverTime(af.source(), aggField, Literal.TRUE, af.window()); + } else { + tsAgg = new LastOverTime(af.source(), aggField, af.window(), timestamp.get()); + } final AggregateFunction firstStageFn; if (inlineFilter != null) { firstStageFn = tsAgg.perTimeSeriesAggregation().withFilter(inlineFilter); @@ -369,6 +378,25 @@ protected LogicalPlan rule(TimeSeriesAggregate aggregate, LogicalOptimizerContex } } + private static Expression findAggregatedField(AggregateFunction af) { + // TODO this is a temporary workaround to deal with surrogate-based aggregates on histograms + // E.g. a SUM(myHistogram) is replaced with the following surrogate: SUM(EXTRACT_HISTOGRAM_COMPONENT(myHistogram, "sum")) + // So we need to make sure to apply the implicit merge_over_time aggregation to + // "myHistogram" instead of EXTRACT_HISTOGRAM_COMPONENT(...) + // In the long term we probably want to revisit our strategy of how we apply implicit _over_time aggregations + // Other examples where the current approach likely doesn't work as expected is E.g. SUM(gaugeA + gaugeB), + // which currently translates to SUM(last_over_time(gaugeA + gaugeB)), but probably should be + // SUM(last_over_time(gaugeA) + last_over_time(gaugeB)) instead. + // One possible strategy would be to search for all field references in the expression. + // Then check if there is TimeSeriesAggregateFunction on the path to the outer aggregation (in the chain of parents). + // If not, wrap the field reference with the appropriate TimeSeriesAggregateFunction based on its type + Expression aggregatedExpression = af.field(); + if (aggregatedExpression instanceof ExtractHistogramComponent extractHistogramComponent) { + return extractHistogramComponent.field(); + } + return aggregatedExpression; + } + private static List mergeExpressions( List aggregates, List groupings diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TimeSeriesAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TimeSeriesAggregate.java index 428ecbb749b26..dde6875ad2969 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TimeSeriesAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TimeSeriesAggregate.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.esql.expression.function.aggregate.HistogramMergeOverTime; import org.elasticsearch.xpack.esql.expression.function.aggregate.LastOverTime; import org.elasticsearch.xpack.esql.expression.function.aggregate.TimeSeriesAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; @@ -219,13 +220,18 @@ protected void checkTimeSeriesAggregates(Failures failures) { // reject COUNT(keyword), but allow COUNT(numeric) } else if (outer instanceof TimeSeriesAggregateFunction == false && outer.field() instanceof AggregateFunction == false) { Expression field = outer.field(); - var lastOverTime = new LastOverTime( - source(), - field, - AggregateFunction.NO_WINDOW, - new Literal(source(), null, DataType.DATETIME) - ); - if (lastOverTime.typeResolved() != Expression.TypeResolution.TYPE_RESOLVED) { + TimeSeriesAggregateFunction overTimeAgg; + if (field.dataType() == DataType.EXPONENTIAL_HISTOGRAM) { + overTimeAgg = new HistogramMergeOverTime(source(), field, Literal.TRUE, AggregateFunction.NO_WINDOW); + } else { + overTimeAgg = new LastOverTime( + source(), + field, + AggregateFunction.NO_WINDOW, + new Literal(source(), null, DataType.DATETIME) + ); + } + if (overTimeAgg.typeResolved() != Expression.TypeResolution.TYPE_RESOLVED) { failures.add( fail( this, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMergeOverTimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMergeOverTimeTests.java new file mode 100644 index 0000000000000..8128647897a7f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/HistogramMergeOverTimeTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.List; +import java.util.function.Supplier; + +public class HistogramMergeOverTimeTests extends AbstractFunctionTestCase { + + public HistogramMergeOverTimeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return HistogramMergeTests.parameters(); + } + + @Override + protected Expression build(Source source, List args) { + return new HistogramMergeOverTime(source, args.get(0), null); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java index cc74092ac5578..32946f5345b69 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.junit.BeforeClass; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -169,13 +171,20 @@ public static void init() { TEST_VERIFIER ); + List metricIndices = new ArrayList<>(); + if (EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.isEnabled()) { + Map expHistoMetricMapping = loadMapping("exp_histo_sample-mappings.json"); + metricIndices.add( + EsIndexGenerator.esIndex("exp_histo_sample", expHistoMetricMapping, Map.of("exp_histo_sample", IndexMode.TIME_SERIES)) + ); + } metricMapping = loadMapping("k8s-mappings.json"); - var metricsIndex = EsIndexGenerator.esIndex("k8s", metricMapping, Map.of("k8s", IndexMode.TIME_SERIES)); + metricIndices.add(EsIndexGenerator.esIndex("k8s", metricMapping, Map.of("k8s", IndexMode.TIME_SERIES))); metricsAnalyzer = new Analyzer( testAnalyzerContext( EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), - indexResolutions(metricsIndex), + indexResolutions(metricIndices.toArray(EsIndex[]::new)), enrichResolution, emptyInferenceResolution() ), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index ab71a0cc36027..dcd2b5c86834c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.DimensionValues; +import org.elasticsearch.xpack.esql.expression.function.aggregate.HistogramMerge; import org.elasticsearch.xpack.esql.expression.function.aggregate.LastOverTime; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; @@ -68,6 +69,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; +import org.elasticsearch.xpack.esql.expression.function.scalar.histogram.ExtractHistogramComponent; +import org.elasticsearch.xpack.esql.expression.function.scalar.histogram.HistogramPercentile; import org.elasticsearch.xpack.esql.expression.function.scalar.internal.PackDimension; import org.elasticsearch.xpack.esql.expression.function.scalar.internal.UnpackDimension; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; @@ -7843,6 +7846,138 @@ public void testTranslateWithInlineFilterWithImplicitLastOverTime() { assertThat(lastOverTime.filter(), instanceOf(Equals.class)); } + public void testTranslateHistogramSumWithImplicitMergeOverTime() { + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.isEnabled()); + var query = """ + TS exp_histo_sample | STATS SUM(responseTime) BY bucket(@timestamp, 1 minute) | LIMIT 10 + """; + var plan = logicalOptimizerWithLatestVersion.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var limit = as(plan, Limit.class); + Aggregate finalAgg = as(limit.child(), Aggregate.class); + assertThat(finalAgg, not(instanceOf(TimeSeriesAggregate.class))); + Eval sumExtractionEval = as(finalAgg.child(), Eval.class); // extracts sum from merged per-series histograms + TimeSeriesAggregate aggsByTsid = as(sumExtractionEval.child(), TimeSeriesAggregate.class); + assertNotNull(aggsByTsid.timeBucket()); + assertThat(aggsByTsid.timeBucket().buckets().fold(FoldContext.small()), equalTo(Duration.ofMinutes(1))); + Eval evalBucket = as(aggsByTsid.child(), Eval.class); + assertThat(evalBucket.fields(), hasSize(1)); + EsRelation relation = as(evalBucket.child(), EsRelation.class); + assertThat(relation.indexMode(), equalTo(IndexMode.STANDARD)); + + var crossSeriesSum = as(Alias.unwrap(finalAgg.aggregates().get(0)), Sum.class); + assertFalse(crossSeriesSum.hasFilter()); + + var sumExtraction = as(Alias.unwrap(sumExtractionEval.expressions().get(0)), ExtractHistogramComponent.class); + + var mergePerSeries = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), HistogramMerge.class); + assertFalse(mergePerSeries.hasFilter()); + assertThat(Expressions.attribute(mergePerSeries.field()).name(), equalTo("responseTime")); + + assertThat(Expressions.attribute(aggsByTsid.groupings().get(1)).id(), equalTo(evalBucket.fields().get(0).id())); + Bucket bucket = as(Alias.unwrap(evalBucket.fields().get(0)), Bucket.class); + assertThat(Expressions.attribute(bucket.field()).name(), equalTo("@timestamp")); + } + + public void testTranslateHistogramSumWithImplicitMergeOverTimeAndFilter() { + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.isEnabled()); + var query = """ + TS exp_histo_sample | STATS SUM(responseTime) WHERE instance == "foobar" BY bucket(@timestamp, 1 minute) | LIMIT 10 + """; + var plan = logicalOptimizerWithLatestVersion.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var limit = as(plan, Limit.class); + Aggregate finalAgg = as(limit.child(), Aggregate.class); + assertThat(finalAgg, not(instanceOf(TimeSeriesAggregate.class))); + Eval sumExtractionEval = as(finalAgg.child(), Eval.class); // extracts sum from merged per-series histograms + TimeSeriesAggregate aggsByTsid = as(sumExtractionEval.child(), TimeSeriesAggregate.class); + assertNotNull(aggsByTsid.timeBucket()); + assertThat(aggsByTsid.timeBucket().buckets().fold(FoldContext.small()), equalTo(Duration.ofMinutes(1))); + Eval evalBucket = as(aggsByTsid.child(), Eval.class); + assertThat(evalBucket.fields(), hasSize(1)); + EsRelation relation = as(evalBucket.child(), EsRelation.class); + assertThat(relation.indexMode(), equalTo(IndexMode.STANDARD)); + + var crossSeriesSum = as(Alias.unwrap(finalAgg.aggregates().get(0)), Sum.class); + assertFalse(crossSeriesSum.hasFilter()); + + var sumExtraction = as(Alias.unwrap(sumExtractionEval.expressions().get(0)), ExtractHistogramComponent.class); + + var mergePerSeries = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), HistogramMerge.class); + assertTrue(mergePerSeries.hasFilter()); + assertThat(mergePerSeries.filter(), instanceOf(Equals.class)); + assertThat(Expressions.attribute(mergePerSeries.field()).name(), equalTo("responseTime")); + + assertThat(Expressions.attribute(aggsByTsid.groupings().get(1)).id(), equalTo(evalBucket.fields().get(0).id())); + Bucket bucket = as(Alias.unwrap(evalBucket.fields().get(0)), Bucket.class); + assertThat(Expressions.attribute(bucket.field()).name(), equalTo("@timestamp")); + } + + public void testTranslateHistogramPercentileWithImplicitMergeOverTime() { + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.isEnabled()); + var query = """ + TS exp_histo_sample | STATS PERCENTILE(responseTime, 50) BY bucket(@timestamp, 1 minute) | LIMIT 10 + """; + var plan = logicalOptimizerWithLatestVersion.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var project = as(plan, Project.class); + var percentileExtractionEval = as(project.child(), Eval.class); + var limit = as(percentileExtractionEval.child(), Limit.class); + Aggregate finalAgg = as(limit.child(), Aggregate.class); + assertThat(finalAgg, not(instanceOf(TimeSeriesAggregate.class))); + TimeSeriesAggregate aggsByTsid = as(finalAgg.child(), TimeSeriesAggregate.class); + assertNotNull(aggsByTsid.timeBucket()); + assertThat(aggsByTsid.timeBucket().buckets().fold(FoldContext.small()), equalTo(Duration.ofMinutes(1))); + Eval evalBucket = as(aggsByTsid.child(), Eval.class); + assertThat(evalBucket.fields(), hasSize(1)); + EsRelation relation = as(evalBucket.child(), EsRelation.class); + assertThat(relation.indexMode(), equalTo(IndexMode.STANDARD)); + + var percentileExtraction = as(Alias.unwrap(percentileExtractionEval.expressions().get(0)), HistogramPercentile.class); + + var crossSeriesMerge = as(Alias.unwrap(finalAgg.aggregates().get(0)), HistogramMerge.class); + assertFalse(crossSeriesMerge.hasFilter()); + + var mergePerSeries = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), HistogramMerge.class); + assertFalse(mergePerSeries.hasFilter()); + assertThat(Expressions.attribute(mergePerSeries.field()).name(), equalTo("responseTime")); + + assertThat(Expressions.attribute(aggsByTsid.groupings().get(1)).id(), equalTo(evalBucket.fields().get(0).id())); + Bucket bucket = as(Alias.unwrap(evalBucket.fields().get(0)), Bucket.class); + assertThat(Expressions.attribute(bucket.field()).name(), equalTo("@timestamp")); + } + + public void testTranslateHistogramPercentileWithImplicitMergeOverTimeAndFilter() { + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.isEnabled()); + var query = """ + TS exp_histo_sample | STATS PERCENTILE(responseTime, 50) WHERE instance == "foobar" BY bucket(@timestamp, 1 minute) | LIMIT 10 + """; + var plan = logicalOptimizerWithLatestVersion.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + var project = as(plan, Project.class); + var percentileExtractionEval = as(project.child(), Eval.class); + var limit = as(percentileExtractionEval.child(), Limit.class); + Aggregate finalAgg = as(limit.child(), Aggregate.class); + assertThat(finalAgg, not(instanceOf(TimeSeriesAggregate.class))); + TimeSeriesAggregate aggsByTsid = as(finalAgg.child(), TimeSeriesAggregate.class); + assertNotNull(aggsByTsid.timeBucket()); + assertThat(aggsByTsid.timeBucket().buckets().fold(FoldContext.small()), equalTo(Duration.ofMinutes(1))); + Eval evalBucket = as(aggsByTsid.child(), Eval.class); + assertThat(evalBucket.fields(), hasSize(1)); + EsRelation relation = as(evalBucket.child(), EsRelation.class); + assertThat(relation.indexMode(), equalTo(IndexMode.STANDARD)); + + var percentileExtraction = as(Alias.unwrap(percentileExtractionEval.expressions().get(0)), HistogramPercentile.class); + + var crossSeriesMerge = as(Alias.unwrap(finalAgg.aggregates().get(0)), HistogramMerge.class); + assertFalse(crossSeriesMerge.hasFilter()); + + var mergePerSeries = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), HistogramMerge.class); + assertTrue(mergePerSeries.hasFilter()); + assertThat(mergePerSeries.filter(), instanceOf(Equals.class)); + assertThat(Expressions.attribute(mergePerSeries.field()).name(), equalTo("responseTime")); + + assertThat(Expressions.attribute(aggsByTsid.groupings().get(1)).id(), equalTo(evalBucket.fields().get(0).id())); + Bucket bucket = as(Alias.unwrap(evalBucket.fields().get(0)), Bucket.class); + assertThat(Expressions.attribute(bucket.field()).name(), equalTo("@timestamp")); + } + public void testTranslateOverTimeWithWindow() { { int window = between(1, 20);