diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..59c460d826445 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregatorFunction.java @@ -0,0 +1,187 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.ExponentialHistogramBlock; +import org.elasticsearch.compute.data.ExponentialHistogramScratch; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; + +/** + * {@link AggregatorFunction} implementation for {@link FirstExponentialHistogramByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class FirstExponentialHistogramByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.EXPONENTIAL_HISTOGRAM), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final ExponentialHistogramStates.WithLongSingleState state; + + private final List channels; + + public FirstExponentialHistogramByTimestampAggregatorFunction(DriverContext driverContext, + List channels, ExponentialHistogramStates.WithLongSingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static FirstExponentialHistogramByTimestampAggregatorFunction create( + DriverContext driverContext, List channels) { + return new FirstExponentialHistogramByTimestampAggregatorFunction(driverContext, channels, FirstExponentialHistogramByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + ExponentialHistogramBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + ExponentialHistogramBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + int valueValueCount = valueBlock.getValueCount(p); + if (valueValueCount == 0) { + continue; + } + int timestampValueCount = timestampBlock.getValueCount(p); + if (timestampValueCount == 0) { + continue; + } + int valueStart = valueBlock.getFirstValueIndex(p); + int valueEnd = valueStart + valueValueCount; + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(p); + int timestampEnd = timestampStart + timestampValueCount; + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + FirstExponentialHistogramByTimestampAggregator.combine(state, valueValue, timestampValue); + } + } + } + } + + private void addRawBlock(ExponentialHistogramBlock valueBlock, LongBlock timestampBlock, + BooleanVector mask) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + int valueValueCount = valueBlock.getValueCount(p); + if (valueValueCount == 0) { + continue; + } + int timestampValueCount = timestampBlock.getValueCount(p); + if (timestampValueCount == 0) { + continue; + } + int valueStart = valueBlock.getFirstValueIndex(p); + int valueEnd = valueStart + valueValueCount; + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(p); + int timestampEnd = timestampStart + timestampValueCount; + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + FirstExponentialHistogramByTimestampAggregator.combine(state, valueValue, timestampValue); + } + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + FirstExponentialHistogramByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getExponentialHistogram(values.getFirstValueIndex(0), valuesScratch), seen.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = FirstExponentialHistogramByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..c9c33cdeac6f8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link FirstExponentialHistogramByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class FirstExponentialHistogramByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public FirstExponentialHistogramByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return FirstExponentialHistogramByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return FirstExponentialHistogramByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public FirstExponentialHistogramByTimestampAggregatorFunction aggregator( + DriverContext driverContext, List channels) { + return FirstExponentialHistogramByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public FirstExponentialHistogramByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return FirstExponentialHistogramByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return FirstExponentialHistogramByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..d74af5ba07d80 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,321 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.ExponentialHistogramBlock; +import org.elasticsearch.compute.data.ExponentialHistogramScratch; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link FirstExponentialHistogramByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class FirstExponentialHistogramByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.EXPONENTIAL_HISTOGRAM), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final ExponentialHistogramStates.WithLongGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public FirstExponentialHistogramByTimestampGroupingAggregatorFunction(List channels, + ExponentialHistogramStates.WithLongGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static FirstExponentialHistogramByTimestampGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new FirstExponentialHistogramByTimestampGroupingAggregatorFunction(channels, FirstExponentialHistogramByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + ExponentialHistogramBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + if (valueBlock.isNull(valuesPosition)) { + continue; + } + if (timestampBlock.isNull(valuesPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valueStart = valueBlock.getFirstValueIndex(valuesPosition); + int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition); + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition); + int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition); + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + FirstExponentialHistogramByTimestampAggregator.combine(state, groupId, valueValue, timestampValue); + } + } + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == seen.getPositionCount(); + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + FirstExponentialHistogramByTimestampAggregator.combineIntermediate(state, groupId, timestamps.getLong(valuesPosition), values.getExponentialHistogram(values.getFirstValueIndex(valuesPosition), valuesScratch), seen.getBoolean(valuesPosition)); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + if (valueBlock.isNull(valuesPosition)) { + continue; + } + if (timestampBlock.isNull(valuesPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valueStart = valueBlock.getFirstValueIndex(valuesPosition); + int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition); + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition); + int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition); + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + FirstExponentialHistogramByTimestampAggregator.combine(state, groupId, valueValue, timestampValue); + } + } + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == seen.getPositionCount(); + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + FirstExponentialHistogramByTimestampAggregator.combineIntermediate(state, groupId, timestamps.getLong(valuesPosition), values.getExponentialHistogram(values.getFirstValueIndex(valuesPosition), valuesScratch), seen.getBoolean(valuesPosition)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + if (valueBlock.isNull(valuesPosition)) { + continue; + } + if (timestampBlock.isNull(valuesPosition)) { + continue; + } + int groupId = groups.getInt(groupPosition); + int valueStart = valueBlock.getFirstValueIndex(valuesPosition); + int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition); + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition); + int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition); + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + FirstExponentialHistogramByTimestampAggregator.combine(state, groupId, valueValue, timestampValue); + } + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == seen.getPositionCount(); + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + FirstExponentialHistogramByTimestampAggregator.combineIntermediate(state, groupId, timestamps.getLong(valuesPosition), values.getExponentialHistogram(values.getFirstValueIndex(valuesPosition), valuesScratch), seen.getBoolean(valuesPosition)); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = FirstExponentialHistogramByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..d6db913c806e1 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregatorFunction.java @@ -0,0 +1,187 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.ExponentialHistogramBlock; +import org.elasticsearch.compute.data.ExponentialHistogramScratch; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; + +/** + * {@link AggregatorFunction} implementation for {@link LastExponentialHistogramByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class LastExponentialHistogramByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.EXPONENTIAL_HISTOGRAM), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final ExponentialHistogramStates.WithLongSingleState state; + + private final List channels; + + public LastExponentialHistogramByTimestampAggregatorFunction(DriverContext driverContext, + List channels, ExponentialHistogramStates.WithLongSingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static LastExponentialHistogramByTimestampAggregatorFunction create( + DriverContext driverContext, List channels) { + return new LastExponentialHistogramByTimestampAggregatorFunction(driverContext, channels, LastExponentialHistogramByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + ExponentialHistogramBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + ExponentialHistogramBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + int valueValueCount = valueBlock.getValueCount(p); + if (valueValueCount == 0) { + continue; + } + int timestampValueCount = timestampBlock.getValueCount(p); + if (timestampValueCount == 0) { + continue; + } + int valueStart = valueBlock.getFirstValueIndex(p); + int valueEnd = valueStart + valueValueCount; + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(p); + int timestampEnd = timestampStart + timestampValueCount; + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + LastExponentialHistogramByTimestampAggregator.combine(state, valueValue, timestampValue); + } + } + } + } + + private void addRawBlock(ExponentialHistogramBlock valueBlock, LongBlock timestampBlock, + BooleanVector mask) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + int valueValueCount = valueBlock.getValueCount(p); + if (valueValueCount == 0) { + continue; + } + int timestampValueCount = timestampBlock.getValueCount(p); + if (timestampValueCount == 0) { + continue; + } + int valueStart = valueBlock.getFirstValueIndex(p); + int valueEnd = valueStart + valueValueCount; + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(p); + int timestampEnd = timestampStart + timestampValueCount; + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + LastExponentialHistogramByTimestampAggregator.combine(state, valueValue, timestampValue); + } + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + LastExponentialHistogramByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getExponentialHistogram(values.getFirstValueIndex(0), valuesScratch), seen.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = LastExponentialHistogramByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..917e49829a855 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link LastExponentialHistogramByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class LastExponentialHistogramByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public LastExponentialHistogramByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return LastExponentialHistogramByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return LastExponentialHistogramByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public LastExponentialHistogramByTimestampAggregatorFunction aggregator( + DriverContext driverContext, List channels) { + return LastExponentialHistogramByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public LastExponentialHistogramByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return LastExponentialHistogramByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return LastExponentialHistogramByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..d24f9ae3afb18 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,321 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.ExponentialHistogramBlock; +import org.elasticsearch.compute.data.ExponentialHistogramScratch; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link LastExponentialHistogramByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class LastExponentialHistogramByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.EXPONENTIAL_HISTOGRAM), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final ExponentialHistogramStates.WithLongGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public LastExponentialHistogramByTimestampGroupingAggregatorFunction(List channels, + ExponentialHistogramStates.WithLongGroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static LastExponentialHistogramByTimestampGroupingAggregatorFunction create( + List channels, DriverContext driverContext) { + return new LastExponentialHistogramByTimestampGroupingAggregatorFunction(channels, LastExponentialHistogramByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + ExponentialHistogramBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + if (valueBlock.isNull(valuesPosition)) { + continue; + } + if (timestampBlock.isNull(valuesPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valueStart = valueBlock.getFirstValueIndex(valuesPosition); + int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition); + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition); + int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition); + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + LastExponentialHistogramByTimestampAggregator.combine(state, groupId, valueValue, timestampValue); + } + } + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == seen.getPositionCount(); + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + LastExponentialHistogramByTimestampAggregator.combineIntermediate(state, groupId, timestamps.getLong(valuesPosition), values.getExponentialHistogram(values.getFirstValueIndex(valuesPosition), valuesScratch), seen.getBoolean(valuesPosition)); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + if (valueBlock.isNull(valuesPosition)) { + continue; + } + if (timestampBlock.isNull(valuesPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valueStart = valueBlock.getFirstValueIndex(valuesPosition); + int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition); + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition); + int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition); + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + LastExponentialHistogramByTimestampAggregator.combine(state, groupId, valueValue, timestampValue); + } + } + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == seen.getPositionCount(); + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + LastExponentialHistogramByTimestampAggregator.combineIntermediate(state, groupId, timestamps.getLong(valuesPosition), values.getExponentialHistogram(values.getFirstValueIndex(valuesPosition), valuesScratch), seen.getBoolean(valuesPosition)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + ExponentialHistogramScratch valueScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + if (valueBlock.isNull(valuesPosition)) { + continue; + } + if (timestampBlock.isNull(valuesPosition)) { + continue; + } + int groupId = groups.getInt(groupPosition); + int valueStart = valueBlock.getFirstValueIndex(valuesPosition); + int valueEnd = valueStart + valueBlock.getValueCount(valuesPosition); + for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) { + ExponentialHistogram valueValue = valueBlock.getExponentialHistogram(valueOffset, valueScratch); + int timestampStart = timestampBlock.getFirstValueIndex(valuesPosition); + int timestampEnd = timestampStart + timestampBlock.getValueCount(valuesPosition); + for (int timestampOffset = timestampStart; timestampOffset < timestampEnd; timestampOffset++) { + long timestampValue = timestampBlock.getLong(timestampOffset); + LastExponentialHistogramByTimestampAggregator.combine(state, groupId, valueValue, timestampValue); + } + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + ExponentialHistogramBlock values = (ExponentialHistogramBlock) valuesUncast; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == seen.getPositionCount(); + ExponentialHistogramScratch valuesScratch = new ExponentialHistogramScratch(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + LastExponentialHistogramByTimestampAggregator.combineIntermediate(state, groupId, timestamps.getLong(valuesPosition), values.getExponentialHistogram(values.getFirstValueIndex(valuesPosition), valuesScratch), seen.getBoolean(valuesPosition)); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, + ExponentialHistogramBlock valueBlock, LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = LastExponentialHistogramByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ExponentialHistogramStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ExponentialHistogramStates.java index 9269361a70535..ec18bed689da9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ExponentialHistogramStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ExponentialHistogramStates.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -18,6 +19,7 @@ import org.elasticsearch.exponentialhistogram.ExponentialHistogram; import org.elasticsearch.exponentialhistogram.ExponentialHistogramCircuitBreaker; import org.elasticsearch.exponentialhistogram.ExponentialHistogramMerger; +import org.elasticsearch.exponentialhistogram.ReleasableExponentialHistogram; public final class ExponentialHistogramStates { @@ -184,6 +186,187 @@ public void close() { public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` } + } + + /** + * A state consisting of a single {@code long} value with a {@link ExponentialHistogram}. + * The intermediate state contains three values in order: the long, the histogram, and a boolean specifying if a value was set or not. + */ + public static final class WithLongSingleState implements AggregatorState { + + private final CircuitBreaker breaker; + private long longValue; + private ReleasableExponentialHistogram histogramValue; + + public WithLongSingleState(CircuitBreaker breaker) { + this.breaker = breaker; + } + + public boolean isSeen() { + return histogramValue != null; + } + + public long longValue() { + assert isSeen(); + return longValue; + } + + public ReleasableExponentialHistogram histogramValue() { + assert isSeen(); + return histogramValue; + } + + public void set(long longValue, ExponentialHistogram histogram) { + assert histogram != null; + this.longValue = longValue; + ReleasableExponentialHistogram newValue; + try (var copyBuilder = ExponentialHistogram.builder(histogram, new HistoBreaker(breaker))) { + newValue = copyBuilder.build(); + } + Releasables.close(histogramValue); + this.histogramValue = newValue; + } + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 3; + BlockFactory blockFactory = driverContext.blockFactory(); + // in case of error, the blocks are closed by the caller + if (histogramValue == null) { + blocks[offset] = blockFactory.newConstantLongBlockWith(0L, 1); + blocks[offset + 1] = blockFactory.newConstantExponentialHistogramBlock(ExponentialHistogram.empty(), 1); + blocks[offset + 2] = blockFactory.newConstantBooleanBlockWith(false, 1); + } else { + blocks[offset] = blockFactory.newConstantLongBlockWith(longValue, 1); + blocks[offset + 1] = blockFactory.newConstantExponentialHistogramBlock(histogramValue, 1); + blocks[offset + 2] = blockFactory.newConstantBooleanBlockWith(true, 1); + } + } + + public Block evaluateFinalHistogram(DriverContext driverContext) { + BlockFactory blockFactory = driverContext.blockFactory(); + if (histogramValue == null) { + return blockFactory.newConstantNullBlock(1); + } else { + return blockFactory.newConstantExponentialHistogramBlock(histogramValue, 1); + } + } + + @Override + public void close() { + Releasables.close(histogramValue); + histogramValue = null; + } + } + + /** + * A grouping state consisting of a single {@code long} value with a {@link ExponentialHistogram} per group. + * The intermediate state contains three values in order: the long, the histogram, and a boolean specifying if a value was set or not. + */ + public static final class WithLongGroupingState implements GroupingAggregatorState { + + private LongArray longValues; + private ObjectArray histogramValues; + private final HistoBreaker breaker; + private final BigArrays bigArrays; + + WithLongGroupingState(BigArrays bigArrays, CircuitBreaker breaker) { + LongArray longValues = null; + ObjectArray histogramValues = null; + boolean success = false; + try { + longValues = bigArrays.newLongArray(1); + histogramValues = bigArrays.newObjectArray(1); + success = true; + } finally { + if (success == false) { + Releasables.close(histogramValues, longValues); + } + } + this.longValues = longValues; + this.histogramValues = histogramValues; + this.bigArrays = bigArrays; + this.breaker = new HistoBreaker(breaker); + } + + public void set(int groupId, long longValue, ExponentialHistogram histogramValue) { + assert histogramValue != null; + ensureCapacity(groupId); + try (var copyBuilder = ExponentialHistogram.builder(histogramValue, breaker)) { + ReleasableExponentialHistogram old = histogramValues.getAndSet(groupId, copyBuilder.build()); + Releasables.close(old); + } + longValues.set(groupId, longValue); + } + + private void ensureCapacity(int groupId) { + histogramValues = bigArrays.grow(histogramValues, groupId + 1); + longValues = bigArrays.grow(longValues, groupId + 1); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3; + try ( + var longBuilder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount()); + var histoBuilder = driverContext.blockFactory().newExponentialHistogramBlockBuilder(selected.getPositionCount()); + var seenBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int groupId = selected.getInt(i); + if (seen(groupId)) { + seenBuilder.appendBoolean(true); + longBuilder.appendLong(longValues.get(groupId)); + histoBuilder.append(histogramValues.get(groupId)); + } else { + seenBuilder.appendBoolean(false); + longBuilder.appendLong(0L); + histoBuilder.append(ExponentialHistogram.empty()); + } + } + blocks[offset] = longBuilder.build().asBlock(); + blocks[offset + 1] = histoBuilder.build(); + blocks[offset + 2] = seenBuilder.build().asBlock(); + } + } + + public boolean seen(int groupId) { + return groupId < histogramValues.size() && histogramValues.get(groupId) != null; + } + + public long longValue(int groupId) { + assert seen(groupId); + return longValues.get(groupId); + } + + @Override + public void close() { + for (int i = 0; i < histogramValues.size(); i++) { + Releasables.close(histogramValues.get(i)); + } + Releasables.close(histogramValues, longValues); + histogramValues = null; + longValues = null; + } + + public Block evaluateFinalHistograms(IntVector selected, DriverContext driverContext) { + try (var builder = driverContext.blockFactory().newExponentialHistogramBlockBuilder(selected.getPositionCount());) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int groupId = selected.getInt(i); + if (seen(groupId)) { + builder.append(histogramValues.get(groupId)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + + @Override + public void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregator.java new file mode 100644 index 0000000000000..95f1feba45046 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FirstExponentialHistogramByTimestampAggregator.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; + +/** + * A time-series aggregation function that collects the first occurrence exponential histogram of a time series in a specified interval. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "EXPONENTIAL_HISTOGRAM"), + @IntermediateState(name = "seen", type = "BOOLEAN") } +) +@GroupingAggregator() +public class FirstExponentialHistogramByTimestampAggregator { + public static String describe() { + return "first_ExponentialHistogram_by_timestamp"; + } + + public static ExponentialHistogramStates.WithLongSingleState initSingle(DriverContext driverContext) { + return new ExponentialHistogramStates.WithLongSingleState(driverContext.breaker()); + } + + public static void combine(ExponentialHistogramStates.WithLongSingleState current, ExponentialHistogram value, long timestamp) { + if (timestamp < current.longValue()) { + current.set(timestamp, value); + } + } + + public static void combineIntermediate( + ExponentialHistogramStates.WithLongSingleState current, + long timestamp, + ExponentialHistogram value, + boolean seen + ) { + if (seen) { + if (current.isSeen()) { + combine(current, value, timestamp); + } else { + current.set(timestamp, value); + } + } + } + + public static Block evaluateFinal(ExponentialHistogramStates.WithLongSingleState current, DriverContext ctx) { + return current.evaluateFinalHistogram(ctx); + } + + public static ExponentialHistogramStates.WithLongGroupingState initGrouping(DriverContext driverContext) { + return new ExponentialHistogramStates.WithLongGroupingState(driverContext.bigArrays(), driverContext.breaker()); + } + + public static void combine( + ExponentialHistogramStates.WithLongGroupingState current, + int groupId, + ExponentialHistogram value, + long timestamp + ) { + if (current.seen(groupId) == false || timestamp < current.longValue(groupId)) { + current.set(groupId, timestamp, value); + } + } + + public static void combineIntermediate( + ExponentialHistogramStates.WithLongGroupingState current, + int groupId, + long timestamp, + ExponentialHistogram value, + boolean seen + ) { + if (seen) { + combine(current, groupId, value, timestamp); + } + } + + public static Block evaluateFinal( + ExponentialHistogramStates.WithLongGroupingState state, + IntVector selected, + GroupingAggregatorEvaluationContext ctx + ) { + return state.evaluateFinalHistograms(selected, ctx.driverContext()); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregator.java new file mode 100644 index 0000000000000..1be309147d450 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/LastExponentialHistogramByTimestampAggregator.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.exponentialhistogram.ExponentialHistogram; + +/** + * A time-series aggregation function that collects the Last occurrence exponential histogram of a time series in a specified interval. + * + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "EXPONENTIAL_HISTOGRAM"), + @IntermediateState(name = "seen", type = "BOOLEAN") } +) +@GroupingAggregator() +public class LastExponentialHistogramByTimestampAggregator { + public static String describe() { + return "last_ExponentialHistogram_by_timestamp"; + } + + public static ExponentialHistogramStates.WithLongSingleState initSingle(DriverContext driverContext) { + return new ExponentialHistogramStates.WithLongSingleState(driverContext.breaker()); + } + + public static void combine(ExponentialHistogramStates.WithLongSingleState current, ExponentialHistogram value, long timestamp) { + if (timestamp > current.longValue()) { + current.set(timestamp, value); + } + } + + public static void combineIntermediate( + ExponentialHistogramStates.WithLongSingleState current, + long timestamp, + ExponentialHistogram value, + boolean seen + ) { + if (seen) { + if (current.isSeen()) { + combine(current, value, timestamp); + } else { + current.set(timestamp, value); + } + } + } + + public static Block evaluateFinal(ExponentialHistogramStates.WithLongSingleState current, DriverContext ctx) { + return current.evaluateFinalHistogram(ctx); + } + + public static ExponentialHistogramStates.WithLongGroupingState initGrouping(DriverContext driverContext) { + return new ExponentialHistogramStates.WithLongGroupingState(driverContext.bigArrays(), driverContext.breaker()); + } + + public static void combine( + ExponentialHistogramStates.WithLongGroupingState current, + int groupId, + ExponentialHistogram value, + long timestamp + ) { + if (current.seen(groupId) == false || timestamp > current.longValue(groupId)) { + current.set(groupId, timestamp, value); + } + } + + public static void combineIntermediate( + ExponentialHistogramStates.WithLongGroupingState current, + int groupId, + long timestamp, + ExponentialHistogram value, + boolean seen + ) { + if (seen) { + combine(current, groupId, value, timestamp); + } + } + + public static Block evaluateFinal( + ExponentialHistogramStates.WithLongGroupingState state, + IntVector selected, + GroupingAggregatorEvaluationContext ctx + ) { + return state.evaluateFinalHistograms(selected, ctx.driverContext()); + } +} diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index f6e370a136a4c..ee869faf76c1f 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -430,11 +430,11 @@ protected boolean supportsExponentialHistograms() { try { return RestEsqlTestCase.hasCapabilities( client(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.capabilityName()) ) && RestEsqlTestCase.hasCapabilities( remoteClusterClient(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.capabilityName()) ); } catch (IOException e) { throw new RuntimeException(e); diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index 6f3b275e1226c..0469741b935b0 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -58,7 +58,7 @@ protected boolean supportsSourceFieldMapping() { protected boolean supportsExponentialHistograms() { return RestEsqlTestCase.hasCapabilities( client(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.capabilityName()) ); } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index ed56b0db7dfe4..40cca58a481f8 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -289,7 +289,7 @@ protected boolean supportsSourceFieldMapping() throws IOException { protected boolean supportsExponentialHistograms() { return RestEsqlTestCase.hasCapabilities( client(), - List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.capabilityName()) + List.of(EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.capabilityName()) ); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec index b4789b256dcbf..86f52c86d70d9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/exponential_histogram.csv-spec @@ -1,5 +1,5 @@ loadFiltered -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE STARTS_WITH(instance, "dummy") | SORT instance | KEEP instance, responseTime ; @@ -15,7 +15,7 @@ dummy-zero_threshold_only | "{""scale"":0,""zero"":{""threshold"":2.0E-5}}" ; allAggsGrouped -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | EVAL instance = CASE(STARTS_WITH(instance, "dummy"), "dummy-grouped", instance) @@ -34,7 +34,7 @@ instance-2 | 2.2E-4 | 2.744054 | 6.469E-4 | 0.0016068 | 27.706 allAggsFiltered -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | STATS min = MIN(responseTime) WHERE instance == "instance-0", @@ -53,7 +53,7 @@ min:double | max:double | median:double | p75:double | sum:double | avg:double allAggsGroupedFiltered -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") @@ -77,7 +77,7 @@ instance-2 | null | null | 6.469E-4 | null | null allAggsGroupedEmptyGroups -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | STATS min = MIN(responseTime) WHERE instance == "idontexist", @@ -94,7 +94,7 @@ null | null | null | null | null | null ; allAggsInlineGrouped -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | INLINE STATS min = MIN(responseTime), max = MAX(responseTime), median = MEDIAN(responseTime), p75 = PERCENTILE(responseTime,75), sum = SUM(responseTime), avg = AVG(responseTime) BY instance @@ -125,7 +125,7 @@ instance-0 | 2.4E-4 | 6.786232 | 0.0211404 | 0.2608237 allAggsOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS min = MIN(responseTime), max = MAX(responseTime), median = MEDIAN(responseTime), p75 = PERCENTILE(responseTime,75), sum = SUM(responseTime), avg = AVG(responseTime) @@ -138,7 +138,7 @@ NULL | NULL | NULL | NULL | NULL | NULL histoAsCaseValue -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | INLINE STATS p50 = PERCENTILE(responseTime, 50) BY instance, @timestamp @@ -152,7 +152,7 @@ filteredCount:long ; ungroupedPercentiles -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS p0 = PERCENTILE(responseTime,0), p50 = PERCENTILE(responseTime,50), p99 = PERCENTILE(responseTime, 99), p100 = PERCENTILE(responseTime,100) @@ -167,7 +167,7 @@ p0:double | p50:double | p99:double | p100:double groupedPercentiles -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS p0 = PERCENTILE(responseTime,0), p50 = PERCENTILE(responseTime,50), p99 = PERCENTILE(responseTime, 99), p100 = PERCENTILE(responseTime,100) BY instance @@ -185,7 +185,7 @@ instance-2 | 2.2E-4 | 6.469E-4 | 0.0857672 | 2.7059714542564097 percentileOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS p50 = PERCENTILE(responseTime,50) @@ -199,7 +199,7 @@ NULL ungroupedMinMax -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS min = MIN(responseTime), max = MAX(responseTime) @@ -213,7 +213,7 @@ min:double | max:double groupedMinMax -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS min = MIN(responseTime), max = MAX(responseTime) BY instance @@ -230,7 +230,7 @@ instance-2 | 2.2E-4 | 2.744054 minMaxOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS min = MIN(responseTime), max = MAX(responseTime) @@ -243,7 +243,7 @@ NULL | NULL ungroupedAvg -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS avg = ROUND(AVG(responseTime), 7) // rounding to avoid floating point precision issues @@ -256,7 +256,7 @@ avg:double groupedAvg -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS avg = ROUND(AVG(responseTime), 7) BY instance // rounding to avoid floating point precision issues @@ -272,7 +272,7 @@ instance-2 | 0.008197 avgOnEmptyHistogram -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE instance == "dummy-empty" | STATS avg = AVG(responseTime) @@ -285,7 +285,7 @@ NULL ungroupedSum -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS sum = ROUND(SUM(responseTime), 7) // rounding to avoid floating point precision issues @@ -298,7 +298,7 @@ sum:double groupedSum -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 FROM exp_histo_sample | WHERE NOT STARTS_WITH(instance, "dummy") | STATS sum = ROUND(SUM(responseTime), 7) BY instance // rounding to avoid floating point precision issues @@ -314,7 +314,7 @@ instance-2 | 27.706021 timeseriesAllAggsNoBucket -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 required_capability: ts_command_v0 TS exp_histo_sample @@ -333,7 +333,7 @@ instance-2 | 2.2E-4 | 2.744054 | 6.469E-4 | 0.0016068 | 27.706 timeseriesAllAggsFilteredAndBucketed -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 required_capability: ts_command_v0 TS exp_histo_sample @@ -362,7 +362,7 @@ instance-2 | 2025-09-25T01:00:00.000Z | 2.45E-4 | 0.008362 | 4.777E-4 timeseriesAllAggsUngrouped -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 required_capability: ts_command_v0 TS exp_histo_sample @@ -378,7 +378,7 @@ min:double | max:double | median:double | p75:double | sum:double | avg:double timseriesAllAggsGroupedFiltered -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 required_capability: ts_command_v0 TS exp_histo_sample @@ -402,8 +402,153 @@ instance-2 | null | null | 6.469E-4 | null | null ; +timeseriesAllAggsLastOverTimeNoBucket +required_capability: exponential_histogram_pre_tech_preview_v7 +required_capability: ts_command_v0 + + +TS exp_histo_sample + | WHERE instance == "instance-1" + | STATS min = MIN(last_over_time(responseTime)), + max = MAX(last_over_time(responseTime)), + median = MEDIAN(last_over_time(responseTime)), + p75 = PERCENTILE(last_over_time(responseTime), 75), + sum = SUM(last_over_time(responseTime)), + avg = AVG(last_over_time(responseTime)) + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP min, max, median, p75, sum, avg +; + +min:double | max:double | median:double | p75:double | sum:double | avg:double +2.22E-4 | 0.003494 | 5.621E-4 | 0.0014898 | 0.021901 | 9.522E-4 +; + + +timeseriesAllAggsLastOverTimeBucketed +required_capability: exponential_histogram_pre_tech_preview_v7 +required_capability: ts_command_v0 + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | STATS min = MIN(last_over_time(responseTime)), + max = MAX(last_over_time(responseTime)), + median = MEDIAN(last_over_time(responseTime)), + p75 = PERCENTILE(last_over_time(responseTime), 75), + sum = SUM(last_over_time(responseTime)), + avg = AVG(last_over_time(responseTime)) + BY instance + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP instance, min, max, median, p75, sum, avg + | SORT instance +; + +instance:keyword | min:double | max:double | median:double | p75:double | sum:double | avg:double +instance-0 | 3.36E-4 | 3.252835 | 0.0355538 | 0.3688603 | 21.522568 | 0.2314255 +instance-1 | 2.22E-4 | 0.003494 | 5.621E-4 | 0.0014898 | 0.021901 | 9.522E-4 +instance-2 | 2.59E-4 | 0.068249 | 5.209E-4 | 0.0014578 | 0.097412 | 0.0037466 +; + + +timeseriesAllAggsLastOverTimeEmptyBuckets +required_capability: exponential_histogram_pre_tech_preview_v7 +required_capability: ts_command_v0 + + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | STATS min = MIN(last_over_time(responseTime)) WHERE instance == "idontexist", + max = MAX(last_over_time(responseTime)) WHERE instance == "idontexist", + median = MEDIAN(last_over_time(responseTime)) WHERE instance == "idontexist", + p75 = PERCENTILE(last_over_time(responseTime), 75) WHERE instance == "idontexist", + sum = SUM(last_over_time(responseTime)) WHERE instance == "idontexist", + avg = AVG(last_over_time(responseTime)) WHERE instance == "idontexist" + BY instance + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP instance, min, max, median, p75, sum, avg + | SORT instance +; + +instance:keyword | min:double | max:double | median:double | p75:double | sum:double | avg:double +instance-0 | null | null | null | null | null | null +instance-1 | null | null | null | null | null | null +instance-2 | null | null | null | null | null | null +; + + +timeseriesAllAggsFirstOverTimeNoBucket +required_capability: exponential_histogram_pre_tech_preview_v7 +required_capability: ts_command_v0 + +TS exp_histo_sample + | WHERE instance == "instance-1" + | STATS min = MIN(first_over_time(responseTime)), + max = MAX(first_over_time(responseTime)), + median = MEDIAN(first_over_time(responseTime)), + p75 = PERCENTILE(first_over_time(responseTime), 75), + sum = SUM(first_over_time(responseTime)), + avg = AVG(first_over_time(responseTime)) + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP min, max, median, p75, sum, avg +; +min:double | max:double | median:double | p75:double | sum:double | avg:double +2.58E-4 | 0.061096 | 5.686E-4 | 0.0013817 | 0.149232 | 0.0046635 +; + + +timeseriesAllAggsFirstOverTimeBucketed +required_capability: exponential_histogram_pre_tech_preview_v7 +required_capability: ts_command_v0 + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | STATS min = MIN(first_over_time(responseTime)), + max = MAX(first_over_time(responseTime)), + median = MEDIAN(first_over_time(responseTime)), + p75 = PERCENTILE(first_over_time(responseTime), 75), + sum = SUM(first_over_time(responseTime)), + avg = AVG(first_over_time(responseTime)) + BY instance + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP instance, min, max, median, p75, sum, avg + | SORT instance +; + +instance:keyword | min:double | max:double | median:double | p75:double | sum:double | avg:double +instance-0 | 3.79E-4 | 0.873616 | 0.0125702 | 0.2391763 | 9.269671 | 0.1343431 +instance-1 | 2.58E-4 | 0.061096 | 5.686E-4 | 0.0013817 | 0.149232 | 0.0046635 +instance-2 | 2.65E-4 | 0.067933 | 5.571E-4 | 7.289E-4 | 0.108144 | 0.0031807 +; + + + +timeseriesAllAggsFirstOverTimeEmptyBuckets +required_capability: exponential_histogram_pre_tech_preview_v7 +required_capability: ts_command_v0 + + +TS exp_histo_sample + | WHERE NOT STARTS_WITH(instance, "dummy") + | STATS min = MIN(first_over_time(responseTime)) WHERE instance == "idontexist", + max = MAX(first_over_time(responseTime)) WHERE instance == "idontexist", + median = MEDIAN(first_over_time(responseTime)) WHERE instance == "idontexist", + p75 = PERCENTILE(first_over_time(responseTime), 75) WHERE instance == "idontexist", + sum = SUM(first_over_time(responseTime)) WHERE instance == "idontexist", + avg = AVG(first_over_time(responseTime)) WHERE instance == "idontexist" + BY instance + | EVAL median = ROUND(median, 7), p75 = ROUND(p75, 7), sum = ROUND(sum, 7), avg = ROUND(avg, 7) // rounding to avoid floating point precision issues + | KEEP instance, min, max, median, p75, sum, avg + | SORT instance +; + +instance:keyword | min:double | max:double | median:double | p75:double | sum:double | avg:double +instance-0 | null | null | null | null | null | null +instance-1 | null | null | null | null | null | null +instance-2 | null | null | null | null | null | null +; + + timeseriesAllMathOverTimeAggsGrouped -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 required_capability: ts_command_v0 TS exp_histo_sample @@ -428,7 +573,7 @@ instance-2 | 2.2E-4 | 2.744054 | 0.0016068 | 27.706021 | 0.008197 timeseriesAllMathOverTimeAggsUngrouped -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 required_capability: ts_command_v0 TS exp_histo_sample @@ -449,7 +594,7 @@ min:double | max:double | p75:double | sum:double | avg:double timeseriesPresentAbsentOverTime -required_capability: exponential_histogram_pre_tech_preview_v6 +required_capability: exponential_histogram_pre_tech_preview_v7 required_capability: ts_command_v0 TS exp_histo_sample diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index fa6d565dc6432..851e1218140ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -1577,7 +1577,7 @@ public enum Cap { * When implementing changes on this type, we'll simply increment the version suffix at the end to prevent bwc tests from running. * As soon as we move into tech preview, we'll replace this capability with a "EXPONENTIAL_HISTOGRAM_TECH_PREVIEW" one. */ - EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6(EXPONENTIAL_HISTOGRAM_FEATURE_FLAG), + EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7(EXPONENTIAL_HISTOGRAM_FEATURE_FLAG), /** * Create new block when filtering OrdinalBytesRefBlock diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTime.java index d3bcf63489f73..9e1970346b6e9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTime.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.FirstDoubleByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.FirstExponentialHistogramByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.FirstFloatByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.FirstIntByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.FirstLongByTimestampAggregatorFunctionSupplier; @@ -51,7 +52,7 @@ public class FirstOverTime extends TimeSeriesAggregateFunction implements Option // TODO: support all types @FunctionInfo( type = FunctionType.TIME_SERIES_AGGREGATE, - returnType = { "long", "integer", "double" }, + returnType = { "long", "integer", "double", "exponential_histogram" }, description = "Calculates the earliest value of a field, where recency determined by the `@timestamp` field.", appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.PREVIEW, version = "9.2.0") }, preview = true, @@ -61,7 +62,7 @@ public FirstOverTime( Source source, @Param( name = "field", - type = { "counter_long", "counter_integer", "counter_double", "long", "integer", "double" } + type = { "counter_long", "counter_integer", "counter_double", "long", "integer", "double", "exponential_histogram" } ) Expression field, @Param( name = "window", @@ -118,7 +119,9 @@ public DataType dataType() { protected TypeResolution resolveType() { return isType( field(), - dt -> (dt.noCounter().isNumeric() && dt != DataType.UNSIGNED_LONG) || dt == DataType.AGGREGATE_METRIC_DOUBLE, + dt -> (dt.noCounter().isNumeric() && dt != DataType.UNSIGNED_LONG) + || dt == DataType.AGGREGATE_METRIC_DOUBLE + || dt == DataType.EXPONENTIAL_HISTOGRAM, sourceText(), DEFAULT, "numeric except unsigned_long" @@ -137,6 +140,7 @@ public AggregatorFunctionSupplier supplier() { case INTEGER, COUNTER_INTEGER -> new FirstIntByTimestampAggregatorFunctionSupplier(); case DOUBLE, COUNTER_DOUBLE -> new FirstDoubleByTimestampAggregatorFunctionSupplier(); case FLOAT -> new FirstFloatByTimestampAggregatorFunctionSupplier(); + case EXPONENTIAL_HISTOGRAM -> new FirstExponentialHistogramByTimestampAggregatorFunctionSupplier(); default -> throw EsqlIllegalArgumentException.illegalDataType(type); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTime.java index 7a0f43704190d..c280f68e10870 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTime.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.LastBytesRefByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.LastDoubleByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.LastExponentialHistogramByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.LastFloatByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.LastIntByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.LastLongByTimestampAggregatorFunctionSupplier; @@ -52,7 +53,7 @@ public class LastOverTime extends TimeSeriesAggregateFunction implements Optiona // TODO: support all types @FunctionInfo( type = FunctionType.TIME_SERIES_AGGREGATE, - returnType = { "long", "integer", "double", "_tsid" }, + returnType = { "long", "integer", "double", "_tsid", "exponential_histogram" }, description = "Calculates the latest value of a field, where recency determined by the `@timestamp` field.", appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.PREVIEW, version = "9.2.0") }, preview = true, @@ -62,7 +63,7 @@ public LastOverTime( Source source, @Param( name = "field", - type = { "counter_long", "counter_integer", "counter_double", "long", "integer", "double", "_tsid" }, + type = { "counter_long", "counter_integer", "counter_double", "long", "integer", "double", "_tsid", "exponential_histogram" }, description = "the field to calculate the latest value for" ) Expression field, @Param( @@ -120,7 +121,9 @@ public DataType dataType() { protected TypeResolution resolveType() { return isType( field(), - dt -> (dt.noCounter().isNumeric() && dt != DataType.UNSIGNED_LONG) || dt == DataType.TSID_DATA_TYPE, + dt -> (dt.noCounter().isNumeric() && dt != DataType.UNSIGNED_LONG) + || dt == DataType.TSID_DATA_TYPE + || dt == DataType.EXPONENTIAL_HISTOGRAM, sourceText(), DEFAULT, "numeric except unsigned_long" @@ -140,6 +143,7 @@ public AggregatorFunctionSupplier supplier() { case DOUBLE, COUNTER_DOUBLE -> new LastDoubleByTimestampAggregatorFunctionSupplier(); case FLOAT -> new LastFloatByTimestampAggregatorFunctionSupplier(); case TSID_DATA_TYPE -> new LastBytesRefByTimestampAggregatorFunctionSupplier(); + case EXPONENTIAL_HISTOGRAM -> new LastExponentialHistogramByTimestampAggregatorFunctionSupplier(); default -> throw EsqlIllegalArgumentException.illegalDataType(type); }; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTimeTests.java index 7fe78e08f1271..ab3c443b7b836 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FirstOverTimeTests.java @@ -41,8 +41,8 @@ public static Iterable parameters() { var valuesSuppliers = List.of( MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), - MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) - + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), + MultiRowTestCaseSupplier.exponentialHistogramCases(1, 100) ); for (List valuesSupplier : valuesSuppliers) { for (TestCaseSupplier.TypedDataSupplier fieldSupplier : valuesSupplier) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTimeTests.java index 9bf6cef1de56f..94d52612eebfa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/LastOverTimeTests.java @@ -41,7 +41,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), - MultiRowTestCaseSupplier.tsidCases(1, 1000) + MultiRowTestCaseSupplier.tsidCases(1, 1000), + MultiRowTestCaseSupplier.exponentialHistogramCases(1, 100) ); for (List valuesSupplier : valuesSuppliers) { for (TestCaseSupplier.TypedDataSupplier fieldSupplier : valuesSupplier) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java index a8bb4b46fdecd..3383deaff6933 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java @@ -172,7 +172,7 @@ public static void init() { ); List metricIndices = new ArrayList<>(); - if (EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.isEnabled()) { + if (EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.isEnabled()) { Map expHistoMetricMapping = loadMapping("exp_histo_sample-mappings.json"); metricIndices.add( EsIndexGenerator.esIndex("exp_histo_sample", expHistoMetricMapping, Map.of("exp_histo_sample", IndexMode.TIME_SERIES)) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 301950c19a0a4..937f1dc4a532b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -7847,7 +7847,7 @@ public void testTranslateWithInlineFilterWithImplicitLastOverTime() { } public void testTranslateHistogramSumWithImplicitMergeOverTime() { - assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.isEnabled()); + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.isEnabled()); var query = """ TS exp_histo_sample | STATS SUM(responseTime) BY bucket(@timestamp, 1 minute) | LIMIT 10 """; @@ -7879,7 +7879,7 @@ public void testTranslateHistogramSumWithImplicitMergeOverTime() { } public void testTranslateHistogramSumWithImplicitMergeOverTimeAndFilter() { - assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.isEnabled()); + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.isEnabled()); var query = """ TS exp_histo_sample | STATS SUM(responseTime) WHERE instance == "foobar" BY bucket(@timestamp, 1 minute) | LIMIT 10 """; @@ -7912,7 +7912,7 @@ public void testTranslateHistogramSumWithImplicitMergeOverTimeAndFilter() { } public void testTranslateHistogramPercentileWithImplicitMergeOverTime() { - assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.isEnabled()); + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.isEnabled()); var query = """ TS exp_histo_sample | STATS PERCENTILE(responseTime, 50) BY bucket(@timestamp, 1 minute) | LIMIT 10 """; @@ -7945,7 +7945,7 @@ public void testTranslateHistogramPercentileWithImplicitMergeOverTime() { } public void testTranslateHistogramPercentileWithImplicitMergeOverTimeAndFilter() { - assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V6.isEnabled()); + assumeTrue("exponenial histogram support required", EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V7.isEnabled()); var query = """ TS exp_histo_sample | STATS PERCENTILE(responseTime, 50) WHERE instance == "foobar" BY bucket(@timestamp, 1 minute) | LIMIT 10 """;