From 99e262b2d595468be4bc247097b9ed1478c0fb89 Mon Sep 17 00:00:00 2001 From: Mouhcine Aitounejjar Date: Thu, 13 Nov 2025 14:03:28 -0500 Subject: [PATCH 1/4] [ES|QL] Support for First/Last (#137408) - Minor refactoring in "build.gradle" to allow the generation of the two variants of state classes (e.g.: LongLongState and AllLongLongState) - Generate the new two state files using "All-X-2State.java.st", for all types. - Generate the new timestamp aggregator files using "All-X-ValueByTimestampAggregator.java.st", for all types. - Register ALL_LAST as snapshot function - Augment existing csv tests to exercise ALL_LAST as well, and to check for all supported types. --- x-pack/plugin/esql/compute/build.gradle | 81 +++-- .../gen/argument/StandardArgument.java | 2 + ...AllFirstBytesRefByTimestampAggregator.java | 262 +++++++++++++++ .../AllFirstDoubleByTimestampAggregator.java | 248 ++++++++++++++ .../AllFirstFloatByTimestampAggregator.java | 248 ++++++++++++++ .../AllFirstIntByTimestampAggregator.java | 248 ++++++++++++++ .../AllFirstLongByTimestampAggregator.java | 25 +- .../AllLastBytesRefByTimestampAggregator.java | 262 +++++++++++++++ .../AllLastDoubleByTimestampAggregator.java | 248 ++++++++++++++ .../AllLastFloatByTimestampAggregator.java | 248 ++++++++++++++ .../AllLastIntByTimestampAggregator.java | 248 ++++++++++++++ .../AllLastLongByTimestampAggregator.java | 248 ++++++++++++++ .../aggregation/AllLongBytesRefState.java | 87 +++++ .../aggregation/AllLongDoubleState.java | 85 +++++ .../aggregation/AllLongFloatState.java | 85 +++++ .../compute/aggregation/AllLongIntState.java | 85 +++++ .../compute/aggregation/AllLongLongState.java | 8 +- ...BytesRefByTimestampAggregatorFunction.java | 159 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 316 +++++++++++++++++ ...stDoubleByTimestampAggregatorFunction.java | 157 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 312 +++++++++++++++++ ...rstFloatByTimestampAggregatorFunction.java | 157 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 312 +++++++++++++++++ ...FirstIntByTimestampAggregatorFunction.java | 157 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 311 +++++++++++++++++ ...BytesRefByTimestampAggregatorFunction.java | 159 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 316 +++++++++++++++++ ...stDoubleByTimestampAggregatorFunction.java | 157 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 312 +++++++++++++++++ ...astFloatByTimestampAggregatorFunction.java | 157 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 312 +++++++++++++++++ ...lLastIntByTimestampAggregatorFunction.java | 157 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 311 +++++++++++++++++ ...LastLongByTimestampAggregatorFunction.java | 155 +++++++++ ...ByTimestampAggregatorFunctionSupplier.java | 47 +++ ...ByTimestampGroupingAggregatorFunction.java | 310 +++++++++++++++++ .../compute/aggregation/All-X-2State.java.st | 104 ++++++ .../All-X-ValueByTimestampAggregator.java.st | 318 ++++++++++++++++++ .../stats_all_first_all_last.csv-spec | 85 +++++ .../src/main/resources/stats_first.csv-spec | 59 ---- .../xpack/esql/action/EsqlCapabilities.java | 2 + .../function/EsqlFunctionRegistry.java | 2 + .../function/aggregate/AllFirst.java | 22 +- .../function/aggregate/AllLast.java | 160 +++++++++ 52 files changed, 8001 insertions(+), 119 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java rename x-pack/plugin/esql/compute/src/main/{java => generated-src}/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java (92%) create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java rename x-pack/plugin/esql/compute/src/main/{java => generated-src}/org/elasticsearch/compute/aggregation/AllLongLongState.java (86%) create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_all_first_all_last.csv-spec create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllLast.java diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 2200d17e67d83..ab915d38ef153 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -496,22 +496,26 @@ tasks.named('stringTemplates').configure { * Generates pairwise states. We generate the ones that we need at the moment, * but add more if you need more. */ - File twoStateInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-2State.java.st") - [longProperties].forEach { v1 -> - [intProperties, longProperties, floatProperties, doubleProperties, bytesRefProperties].forEach { v2 -> - { - var properties = [:] - v1.forEach { k, v -> properties["v1_" + k] = v} - v2.forEach { k, v -> properties["v2_" + k] = v} + def generateTwoStateFiles = { inputFile, prefix = "" -> + def v1Props = [longProperties] + def v2Props = [intProperties, longProperties, floatProperties, doubleProperties, bytesRefProperties] + v1Props.forEach { v1 -> + v2Props.forEach { v2 -> + def properties = [:] + v1.forEach { k, v -> properties["v1_" + k] = v } + v2.forEach { k, v -> properties["v2_" + k] = v } template { it.properties = properties - it.inputFile = twoStateInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/${v1.Type}${v2.Type}State.java" + it.inputFile = inputFile + it.outputFile = "org/elasticsearch/compute/aggregation/${prefix}${v1.Type}${v2.Type}State.java" } } } } + generateTwoStateFiles(file("src/main/java/org/elasticsearch/compute/aggregation/X-2State.java.st")) + generateTwoStateFiles(file("src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st"), "All") + File irateAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-IrateAggregator.java.st") template { it.properties = intProperties @@ -958,37 +962,42 @@ tasks.named('stringTemplates').configure { } // TODO: add {value}_over_time for other types: boolean, bytes_refs - File valueByTimestampAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-ValueByTimestampAggregator.java.st") - ["First", "Last"].forEach { Occurrence -> - { - template { - it.properties = addOccurrence(intProperties, Occurrence) - it.inputFile = valueByTimestampAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/${Occurrence}IntByTimestampAggregator.java" - } - template { - it.properties = addOccurrence(longProperties, Occurrence) - it.inputFile = valueByTimestampAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/${Occurrence}LongByTimestampAggregator.java" - } - template { - it.properties = addOccurrence(floatProperties, Occurrence) - it.inputFile = valueByTimestampAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/${Occurrence}FloatByTimestampAggregator.java" - } - template { - it.properties = addOccurrence(doubleProperties, Occurrence) - it.inputFile = valueByTimestampAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/${Occurrence}DoubleByTimestampAggregator.java" - } - template { - it.properties = addOccurrence(bytesRefProperties, Occurrence) - it.inputFile = valueByTimestampAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/${Occurrence}BytesRefByTimestampAggregator.java" + def generateTimestampAggregatorClasses = { inputFilename, prefix = "" -> + def inputFile = file(inputFilename) + ["First", "Last"].forEach { Occurrence -> + { + template { + it.properties = addOccurrence(intProperties, Occurrence) + [Prefix: prefix] + it.inputFile = inputFile + it.outputFile = "org/elasticsearch/compute/aggregation/${prefix}${Occurrence}IntByTimestampAggregator.java" + } + template { + it.properties = addOccurrence(longProperties, Occurrence) + [Prefix: prefix] + it.inputFile = inputFile + it.outputFile = "org/elasticsearch/compute/aggregation/${prefix}${Occurrence}LongByTimestampAggregator.java" + } + template { + it.properties = addOccurrence(floatProperties, Occurrence) + [Prefix: prefix] + it.inputFile = inputFile + it.outputFile = "org/elasticsearch/compute/aggregation/${prefix}${Occurrence}FloatByTimestampAggregator.java" + } + template { + it.properties = addOccurrence(doubleProperties, Occurrence) + [Prefix: prefix] + it.inputFile = inputFile + it.outputFile = "org/elasticsearch/compute/aggregation/${prefix}${Occurrence}DoubleByTimestampAggregator.java" + } + template { + it.properties = addOccurrence(bytesRefProperties, Occurrence) + [Prefix: prefix] + it.inputFile = inputFile + it.outputFile = "org/elasticsearch/compute/aggregation/${prefix}${Occurrence}BytesRefByTimestampAggregator.java" + } } } } + generateTimestampAggregatorClasses("src/main/java/org/elasticsearch/compute/aggregation/X-ValueByTimestampAggregator.java.st", "") + generateTimestampAggregatorClasses("src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st", "All") + File rateAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-RateGroupingAggregatorFunction.java.st") template { it.properties = intProperties diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/argument/StandardArgument.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/argument/StandardArgument.java index 1c57a8054cff3..0add5ec6a65a2 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/argument/StandardArgument.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/argument/StandardArgument.java @@ -21,6 +21,7 @@ import static org.elasticsearch.compute.gen.Types.DOUBLE_BLOCK; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR_FACTORY; +import static org.elasticsearch.compute.gen.Types.FLOAT_BLOCK; import static org.elasticsearch.compute.gen.Types.INT_BLOCK; import static org.elasticsearch.compute.gen.Types.LONG_BLOCK; import static org.elasticsearch.compute.gen.Types.blockType; @@ -120,6 +121,7 @@ static boolean isBlockType(TypeName type) { return type.equals(INT_BLOCK) || type.equals(LONG_BLOCK) || type.equals(DOUBLE_BLOCK) + || type.equals(FLOAT_BLOCK) || type.equals(BOOLEAN_BLOCK) || type.equals(BYTES_REF_BLOCK); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java new file mode 100644 index 0000000000000..8079cf86390e6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "BYTES_REF"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "BYTES_REF_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllFirstBytesRefByTimestampAggregator { + public static String describe() { + return "first_BytesRef_by_timestamp"; + } + + public static AllLongBytesRefState initSingle(DriverContext driverContext) { + return new AllLongBytesRefState(0, new BytesRef(), driverContext.breaker(), describe()); + } + + private static void first(AllLongBytesRefState current, long timestamp, BytesRef value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : new BytesRef()); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongBytesRefState current, @Position int position, BytesRefBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + BytesRef bytesScratch = new BytesRef(); + first(current, timestamp.getLong(position), value.getBytesRef(position, bytesScratch), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts < current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + BytesRef bytesScratch = new BytesRef(); + current.v2(value.getBytesRef(position, bytesScratch)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongBytesRefState current, long timestamp, BytesRef value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp < current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongBytesRefState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantBytesRefBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays(), driverContext.breaker()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, BytesRefBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + BytesRef bytesScratch = new BytesRef(); + current.collectValue(groupId, timestamp.getLong(position), value.getBytesRef(position, bytesScratch), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + BytesRefBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + BytesRef bytesScratch = new BytesRef(); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getBytesRef(firstIndex + i, bytesScratch), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private ObjectArray values; + private final CircuitBreaker breaker; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays, CircuitBreaker breaker) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + this.breaker = breaker; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newObjectArray(1); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, BytesRef value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) > timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + BreakingBytesRefBuilder builder = values.get(groupId); + if (builder == null) { + builder = new BreakingBytesRefBuilder(breaker, "First", value.length); + } + builder.copyBytes(value); + values.set(groupId, builder); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + for (long i = 0; i < values.size(); i++) { + Releasables.close(values.get(i)); + } + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendBytesRef(values.get(group).bytesRefView()); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendBytesRef(values.get(group).bytesRefView()); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java new file mode 100644 index 0000000000000..2dd466e2c48f1 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "DOUBLE"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "DOUBLE_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllFirstDoubleByTimestampAggregator { + public static String describe() { + return "first_double_by_timestamp"; + } + + public static AllLongDoubleState initSingle(DriverContext driverContext) { + return new AllLongDoubleState(0, 0); + } + + private static void first(AllLongDoubleState current, long timestamp, double value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : 0); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongDoubleState current, @Position int position, DoubleBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + first(current, timestamp.getLong(position), value.getDouble(position), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts < current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + current.v2(value.getDouble(position)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongDoubleState current, long timestamp, double value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp < current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongDoubleState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantDoubleBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, DoubleBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + current.collectValue(groupId, timestamp.getLong(position), value.getDouble(position), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + DoubleBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getDouble(firstIndex + i), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private DoubleArray values; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newDoubleArray(1, false); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, double value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) > timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + values.set(groupId, value); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendDouble(values.get(group)); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendDouble(values.get(group)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java new file mode 100644 index 0000000000000..c9defbaf37c36 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.FloatArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "FLOAT"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "FLOAT_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllFirstFloatByTimestampAggregator { + public static String describe() { + return "first_float_by_timestamp"; + } + + public static AllLongFloatState initSingle(DriverContext driverContext) { + return new AllLongFloatState(0, 0); + } + + private static void first(AllLongFloatState current, long timestamp, float value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : 0); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongFloatState current, @Position int position, FloatBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + first(current, timestamp.getLong(position), value.getFloat(position), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts < current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + current.v2(value.getFloat(position)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongFloatState current, long timestamp, float value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp < current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongFloatState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantFloatBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, FloatBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + current.collectValue(groupId, timestamp.getLong(position), value.getFloat(position), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + FloatBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getFloat(firstIndex + i), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private FloatArray values; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newFloatArray(1, false); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, float value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) > timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + values.set(groupId, value); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newFloatBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendFloat(values.get(group)); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newFloatBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendFloat(values.get(group)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java new file mode 100644 index 0000000000000..374b5d1f556a6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "INT"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "INT_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllFirstIntByTimestampAggregator { + public static String describe() { + return "first_int_by_timestamp"; + } + + public static AllLongIntState initSingle(DriverContext driverContext) { + return new AllLongIntState(0, 0); + } + + private static void first(AllLongIntState current, long timestamp, int value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : 0); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongIntState current, @Position int position, IntBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + first(current, timestamp.getLong(position), value.getInt(position), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts < current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + current.v2(value.getInt(position)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongIntState current, long timestamp, int value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp < current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongIntState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantIntBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, IntBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + current.collectValue(groupId, timestamp.getLong(position), value.getInt(position), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + IntBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getInt(firstIndex + i), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private IntArray values; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newIntArray(1, false); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, int value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) > timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + values.set(groupId, value); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendInt(values.get(group)); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newIntBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendInt(values.get(group)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java similarity index 92% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java index f30f0db366491..7c1f6fb34fb30 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java @@ -8,24 +8,30 @@ package org.elasticsearch.compute.aggregation; // begin generated imports -import org.elasticsearch.common.util.ByteArray; -import org.elasticsearch.compute.ann.Position; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.compute.ann.Aggregator; import org.elasticsearch.compute.ann.GroupingAggregator; import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; // end generated imports /** * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. - * This class is not generated yet, but will be eventually by something like `X-ValueByTimestampAggregator.java.st`. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. */ @Aggregator( { @@ -57,9 +63,8 @@ private static void first(AllLongLongState current, long timestamp, long value, } public static void combine(AllLongLongState current, @Position int position, LongBlock value, LongBlock timestamp) { - if (current.seen() == false) { - // we never observed a value before so we'll take this right in, no questions asked + // We never observed a value before so we'll take this right in, no questions asked. first(current, timestamp.getLong(position), value.getLong(position), value.isNull(position) == false); return; } @@ -71,8 +76,7 @@ public static void combine(AllLongLongState current, @Position int position, Lon current.seen(true); if (value.isNull(position) == false) { // non-null value - long earlierValue = value.getLong(position); - current.v2(earlierValue); + current.v2(value.getLong(position)); current.v2Seen(true); } else { // null value @@ -85,7 +89,7 @@ public static void combineIntermediate(AllLongLongState current, long timestamp, if (seen) { if (current.seen()) { if (timestamp < current.v1()) { - // an earlier timestamp has been observed in the reporting shard so we must update internal state + // A newer timestamp has been observed in the reporting shard so we must update internal state current.v1(timestamp); current.v2(value); current.v2Seen(v2Seen); @@ -119,7 +123,7 @@ public static void combine(GroupingState current, int groupId, @Position int pos public static void combineIntermediate( GroupingState current, int groupId, - LongBlock timestamps, // stylecheck + LongBlock timestamps, LongBlock values, BooleanBlock hasValues, int otherPosition @@ -189,7 +193,6 @@ void collectValue(int groupId, long timestamp, long value, boolean hasVal) { if (updated) { values = bigArrays.grow(values, groupId + 1); values.set(groupId, value); - hasValues = bigArrays.grow(hasValues, groupId + 1); hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); } @@ -204,7 +207,7 @@ public void close() { @Override public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { - // creates 3 intermediate state blocks (timestamps, values, hasValue) + // Creates 3 intermediate state blocks (timestamps, values, hasValue) try ( var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); var valuesBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java new file mode 100644 index 0000000000000..6ece7c3928b98 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "BYTES_REF"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "BYTES_REF_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllLastBytesRefByTimestampAggregator { + public static String describe() { + return "last_BytesRef_by_timestamp"; + } + + public static AllLongBytesRefState initSingle(DriverContext driverContext) { + return new AllLongBytesRefState(0, new BytesRef(), driverContext.breaker(), describe()); + } + + private static void first(AllLongBytesRefState current, long timestamp, BytesRef value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : new BytesRef()); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongBytesRefState current, @Position int position, BytesRefBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + BytesRef bytesScratch = new BytesRef(); + first(current, timestamp.getLong(position), value.getBytesRef(position, bytesScratch), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts > current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + BytesRef bytesScratch = new BytesRef(); + current.v2(value.getBytesRef(position, bytesScratch)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongBytesRefState current, long timestamp, BytesRef value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp > current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongBytesRefState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantBytesRefBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays(), driverContext.breaker()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, BytesRefBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + BytesRef bytesScratch = new BytesRef(); + current.collectValue(groupId, timestamp.getLong(position), value.getBytesRef(position, bytesScratch), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + BytesRefBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + BytesRef bytesScratch = new BytesRef(); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getBytesRef(firstIndex + i, bytesScratch), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private ObjectArray values; + private final CircuitBreaker breaker; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays, CircuitBreaker breaker) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + this.breaker = breaker; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newObjectArray(1); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, BytesRef value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) < timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + BreakingBytesRefBuilder builder = values.get(groupId); + if (builder == null) { + builder = new BreakingBytesRefBuilder(breaker, "Last", value.length); + } + builder.copyBytes(value); + values.set(groupId, builder); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + for (long i = 0; i < values.size(); i++) { + Releasables.close(values.get(i)); + } + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendBytesRef(values.get(group).bytesRefView()); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendBytesRef(values.get(group).bytesRefView()); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java new file mode 100644 index 0000000000000..2425b0d1aca50 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "DOUBLE"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "DOUBLE_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllLastDoubleByTimestampAggregator { + public static String describe() { + return "last_double_by_timestamp"; + } + + public static AllLongDoubleState initSingle(DriverContext driverContext) { + return new AllLongDoubleState(0, 0); + } + + private static void first(AllLongDoubleState current, long timestamp, double value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : 0); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongDoubleState current, @Position int position, DoubleBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + first(current, timestamp.getLong(position), value.getDouble(position), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts > current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + current.v2(value.getDouble(position)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongDoubleState current, long timestamp, double value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp > current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongDoubleState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantDoubleBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, DoubleBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + current.collectValue(groupId, timestamp.getLong(position), value.getDouble(position), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + DoubleBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getDouble(firstIndex + i), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private DoubleArray values; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newDoubleArray(1, false); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, double value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) < timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + values.set(groupId, value); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendDouble(values.get(group)); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendDouble(values.get(group)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java new file mode 100644 index 0000000000000..e5f5c907817bd --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.FloatArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "FLOAT"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "FLOAT_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllLastFloatByTimestampAggregator { + public static String describe() { + return "last_float_by_timestamp"; + } + + public static AllLongFloatState initSingle(DriverContext driverContext) { + return new AllLongFloatState(0, 0); + } + + private static void first(AllLongFloatState current, long timestamp, float value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : 0); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongFloatState current, @Position int position, FloatBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + first(current, timestamp.getLong(position), value.getFloat(position), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts > current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + current.v2(value.getFloat(position)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongFloatState current, long timestamp, float value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp > current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongFloatState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantFloatBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, FloatBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + current.collectValue(groupId, timestamp.getLong(position), value.getFloat(position), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + FloatBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getFloat(firstIndex + i), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private FloatArray values; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newFloatArray(1, false); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, float value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) < timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + values.set(groupId, value); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newFloatBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendFloat(values.get(group)); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newFloatBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendFloat(values.get(group)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java new file mode 100644 index 0000000000000..bc767cea68957 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "INT"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "INT_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllLastIntByTimestampAggregator { + public static String describe() { + return "last_int_by_timestamp"; + } + + public static AllLongIntState initSingle(DriverContext driverContext) { + return new AllLongIntState(0, 0); + } + + private static void first(AllLongIntState current, long timestamp, int value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : 0); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongIntState current, @Position int position, IntBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + first(current, timestamp.getLong(position), value.getInt(position), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts > current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + current.v2(value.getInt(position)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongIntState current, long timestamp, int value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp > current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongIntState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantIntBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, IntBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + current.collectValue(groupId, timestamp.getLong(position), value.getInt(position), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + IntBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getInt(firstIndex + i), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private IntArray values; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newIntArray(1, false); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, int value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) < timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + values.set(groupId, value); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newIntBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendInt(values.get(group)); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newIntBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendInt(values.get(group)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java new file mode 100644 index 0000000000000..7d8dcbf0efd9f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "LONG"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "LONG_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class AllLastLongByTimestampAggregator { + public static String describe() { + return "last_long_by_timestamp"; + } + + public static AllLongLongState initSingle(DriverContext driverContext) { + return new AllLongLongState(0, 0); + } + + private static void first(AllLongLongState current, long timestamp, long value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + current.v2(v2Seen ? value : 0); + current.v2Seen(v2Seen); + } + + public static void combine(AllLongLongState current, @Position int position, LongBlock value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + first(current, timestamp.getLong(position), value.getLong(position), value.isNull(position) == false); + return; + } + + long ts = timestamp.getLong(position); + if (ts > current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + current.v2(value.getLong(position)); + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate(AllLongLongState current, long timestamp, long value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp > current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal(AllLongLongState current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstantLongBlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + return new GroupingState(driverContext.bigArrays()); + } + + public static void combine(GroupingState current, int groupId, @Position int position, LongBlock value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + current.collectValue(groupId, timestamp.getLong(position), value.getLong(position), hasValue); + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + LongBlock values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + for (int i = 0; i < valueCount; i++) { + current.collectValue(groupId, timestamp, values.getLong(firstIndex + i), hasValueFlag); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + private LongArray values; + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + this.values = bigArrays.newLongArray(1, false); + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, long value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) < timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + values.set(groupId, value); + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + valuesBuilder.appendLong(values.get(group)); + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().newLongBlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + builder.appendLong(values.get(group)); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java new file mode 100644 index 0000000000000..b6942fffcfc95 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * Aggregator state for a single {@code long} and a single {@code BytesRef}, with support for null v2 values. + * This class is generated. Edit {@code All-X-2State.java.st} instead. + */ +final class AllLongBytesRefState implements AggregatorState { + // the timestamp + private long v1; + + // the value + private final BreakingBytesRefBuilder v2; + // whether we've seen a first/last timestamp + private boolean seen; + + // because we might observe a first/last timestamp without observing a value (e.g.: value was null) + private boolean v2Seen; + + AllLongBytesRefState(long v1, BytesRef v2, CircuitBreaker breaker, String label) { + this.v1 = v1; + this.v2 = new BreakingBytesRefBuilder(breaker, label, v2.length); + this.v2.copyBytes(v2); + } + + long v1() { + return v1; + } + + void v1(long v1) { + this.v1 = v1; + } + + BytesRef v2() { + return v2.bytesRefView(); + } + + void v2(BytesRef v2) { + this.v2.copyBytes(v2); + } + + boolean seen() { + return seen; + } + + void seen(boolean seen) { + this.seen = seen; + } + + boolean v2Seen() { + return v2Seen; + } + + void v2Seen(boolean v2Seen) { + this.v2Seen = v2Seen; + } + + /** Extracts an intermediate view of the contents of this state. */ + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 4; + blocks[offset + 0] = driverContext.blockFactory().newConstantLongBlockWith(v1, 1); + blocks[offset + 1] = driverContext.blockFactory().newConstantBytesRefBlockWith(v2.bytesRefView(), 1); + blocks[offset + 2] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); + blocks[offset + 3] = driverContext.blockFactory().newConstantBooleanBlockWith(v2Seen, 1); + } + + @Override + public void close() { + Releasables.close(this.v2); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java new file mode 100644 index 0000000000000..40ed40f216434 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * Aggregator state for a single {@code long} and a single {@code double}, with support for null v2 values. + * This class is generated. Edit {@code All-X-2State.java.st} instead. + */ +final class AllLongDoubleState implements AggregatorState { + // the timestamp + private long v1; + + // the value + private double v2; + + // whether we've seen a first/last timestamp + private boolean seen; + + // because we might observe a first/last timestamp without observing a value (e.g.: value was null) + private boolean v2Seen; + + AllLongDoubleState(long v1, double v2) { + this.v1 = v1; + this.v2 = v2; + } + + long v1() { + return v1; + } + + void v1(long v1) { + this.v1 = v1; + } + + double v2() { + return v2; + } + + void v2(double v2) { + this.v2 = v2; + } + + boolean seen() { + return seen; + } + + void seen(boolean seen) { + this.seen = seen; + } + + boolean v2Seen() { + return v2Seen; + } + + void v2Seen(boolean v2Seen) { + this.v2Seen = v2Seen; + } + + /** Extracts an intermediate view of the contents of this state. */ + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 4; + blocks[offset + 0] = driverContext.blockFactory().newConstantLongBlockWith(v1, 1); + blocks[offset + 1] = driverContext.blockFactory().newConstantDoubleBlockWith(v2, 1); + blocks[offset + 2] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); + blocks[offset + 3] = driverContext.blockFactory().newConstantBooleanBlockWith(v2Seen, 1); + } + + @Override + public void close() {} +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java new file mode 100644 index 0000000000000..adc0d5eaebe07 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * Aggregator state for a single {@code long} and a single {@code float}, with support for null v2 values. + * This class is generated. Edit {@code All-X-2State.java.st} instead. + */ +final class AllLongFloatState implements AggregatorState { + // the timestamp + private long v1; + + // the value + private float v2; + + // whether we've seen a first/last timestamp + private boolean seen; + + // because we might observe a first/last timestamp without observing a value (e.g.: value was null) + private boolean v2Seen; + + AllLongFloatState(long v1, float v2) { + this.v1 = v1; + this.v2 = v2; + } + + long v1() { + return v1; + } + + void v1(long v1) { + this.v1 = v1; + } + + float v2() { + return v2; + } + + void v2(float v2) { + this.v2 = v2; + } + + boolean seen() { + return seen; + } + + void seen(boolean seen) { + this.seen = seen; + } + + boolean v2Seen() { + return v2Seen; + } + + void v2Seen(boolean v2Seen) { + this.v2Seen = v2Seen; + } + + /** Extracts an intermediate view of the contents of this state. */ + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 4; + blocks[offset + 0] = driverContext.blockFactory().newConstantLongBlockWith(v1, 1); + blocks[offset + 1] = driverContext.blockFactory().newConstantFloatBlockWith(v2, 1); + blocks[offset + 2] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); + blocks[offset + 3] = driverContext.blockFactory().newConstantBooleanBlockWith(v2Seen, 1); + } + + @Override + public void close() {} +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java new file mode 100644 index 0000000000000..bd8a8e618d937 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * Aggregator state for a single {@code long} and a single {@code int}, with support for null v2 values. + * This class is generated. Edit {@code All-X-2State.java.st} instead. + */ +final class AllLongIntState implements AggregatorState { + // the timestamp + private long v1; + + // the value + private int v2; + + // whether we've seen a first/last timestamp + private boolean seen; + + // because we might observe a first/last timestamp without observing a value (e.g.: value was null) + private boolean v2Seen; + + AllLongIntState(long v1, int v2) { + this.v1 = v1; + this.v2 = v2; + } + + long v1() { + return v1; + } + + void v1(long v1) { + this.v1 = v1; + } + + int v2() { + return v2; + } + + void v2(int v2) { + this.v2 = v2; + } + + boolean seen() { + return seen; + } + + void seen(boolean seen) { + this.seen = seen; + } + + boolean v2Seen() { + return v2Seen; + } + + void v2Seen(boolean v2Seen) { + this.v2Seen = v2Seen; + } + + /** Extracts an intermediate view of the contents of this state. */ + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 4; + blocks[offset + 0] = driverContext.blockFactory().newConstantLongBlockWith(v1, 1); + blocks[offset + 1] = driverContext.blockFactory().newConstantIntBlockWith(v2, 1); + blocks[offset + 2] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); + blocks[offset + 3] = driverContext.blockFactory().newConstantBooleanBlockWith(v2Seen, 1); + } + + @Override + public void close() {} +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AllLongLongState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongLongState.java similarity index 86% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AllLongLongState.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongLongState.java index 66420bc35b633..4540799932c14 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AllLongLongState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongLongState.java @@ -8,13 +8,17 @@ package org.elasticsearch.compute.aggregation; // begin generated imports +import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.core.Releasables; // end generated imports /** - * Aggregator state for a single {@code long} and a single {@code long}. - * This class is not generated yet, but will be eventually by something like {@code X-2State.java.st}. + * Aggregator state for a single {@code long} and a single {@code long}, with support for null v2 values. + * This class is generated. Edit {@code All-X-2State.java.st} instead. */ final class AllLongLongState implements AggregatorState { // the timestamp diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..0b3b7cc321dd8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunction.java @@ -0,0 +1,159 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllFirstBytesRefByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllFirstBytesRefByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.BYTES_REF), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongBytesRefState state; + + private final List channels; + + public AllFirstBytesRefByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongBytesRefState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllFirstBytesRefByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllFirstBytesRefByTimestampAggregatorFunction(driverContext, channels, AllFirstBytesRefByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + BytesRefBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + BytesRefBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(BytesRefBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllFirstBytesRefByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(BytesRefBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllFirstBytesRefByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefVector values = ((BytesRefBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + BytesRef valuesScratch = new BytesRef(); + AllFirstBytesRefByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getBytesRef(0, valuesScratch), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllFirstBytesRefByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..e65b1c2370fd9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllFirstBytesRefByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllFirstBytesRefByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllFirstBytesRefByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllFirstBytesRefByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllFirstBytesRefByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllFirstBytesRefByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllFirstBytesRefByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllFirstBytesRefByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllFirstBytesRefByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllFirstBytesRefByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..49af4a6eb3c21 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,316 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllFirstBytesRefByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllFirstBytesRefByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.BYTES_REF), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllFirstBytesRefByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllFirstBytesRefByTimestampGroupingAggregatorFunction(List channels, + AllFirstBytesRefByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllFirstBytesRefByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllFirstBytesRefByTimestampGroupingAggregatorFunction(channels, AllFirstBytesRefByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + BytesRefVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + BytesRef valuesScratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + BytesRef valuesScratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllFirstBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + BytesRef valuesScratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllFirstBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllFirstBytesRefByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..4bd72249f455a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllFirstDoubleByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllFirstDoubleByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongDoubleState state; + + private final List channels; + + public AllFirstDoubleByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongDoubleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllFirstDoubleByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllFirstDoubleByTimestampAggregatorFunction(driverContext, channels, AllFirstDoubleByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + DoubleBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + DoubleBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(DoubleBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllFirstDoubleByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(DoubleBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllFirstDoubleByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleVector values = ((DoubleBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + AllFirstDoubleByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getDouble(0), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllFirstDoubleByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..1afdb82f8cc2d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllFirstDoubleByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllFirstDoubleByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllFirstDoubleByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllFirstDoubleByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllFirstDoubleByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllFirstDoubleByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllFirstDoubleByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllFirstDoubleByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllFirstDoubleByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllFirstDoubleByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..a6dbaedcedbbf --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,312 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllFirstDoubleByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllFirstDoubleByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.DOUBLE), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllFirstDoubleByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllFirstDoubleByTimestampGroupingAggregatorFunction(List channels, + AllFirstDoubleByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllFirstDoubleByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllFirstDoubleByTimestampGroupingAggregatorFunction(channels, AllFirstDoubleByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + DoubleVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, DoubleBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstDoubleByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstDoubleByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, DoubleBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstDoubleByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstDoubleByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllFirstDoubleByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllFirstDoubleByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, DoubleBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllFirstDoubleByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..d1ad9a33cc6c9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllFirstFloatByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllFirstFloatByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.FLOAT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongFloatState state; + + private final List channels; + + public AllFirstFloatByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongFloatState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllFirstFloatByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllFirstFloatByTimestampAggregatorFunction(driverContext, channels, AllFirstFloatByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + FloatBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + FloatBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(FloatBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllFirstFloatByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(FloatBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllFirstFloatByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatVector values = ((FloatBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + AllFirstFloatByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getFloat(0), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllFirstFloatByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..f3f6bacbb7a3e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllFirstFloatByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllFirstFloatByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllFirstFloatByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllFirstFloatByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllFirstFloatByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllFirstFloatByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllFirstFloatByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllFirstFloatByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllFirstFloatByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllFirstFloatByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..f82354c8ef094 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,312 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllFirstFloatByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllFirstFloatByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.FLOAT), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllFirstFloatByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllFirstFloatByTimestampGroupingAggregatorFunction(List channels, + AllFirstFloatByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllFirstFloatByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllFirstFloatByTimestampGroupingAggregatorFunction(channels, AllFirstFloatByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + FloatBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + FloatVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, FloatBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstFloatByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatBlock values = (FloatBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstFloatByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, FloatBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstFloatByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatBlock values = (FloatBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstFloatByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, FloatBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllFirstFloatByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatBlock values = (FloatBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllFirstFloatByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, FloatBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllFirstFloatByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..43a5f327ea510 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllFirstIntByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllFirstIntByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.INT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongIntState state; + + private final List channels; + + public AllFirstIntByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongIntState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllFirstIntByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllFirstIntByTimestampAggregatorFunction(driverContext, channels, AllFirstIntByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + IntBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + IntBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(IntBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllFirstIntByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(IntBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllFirstIntByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntVector values = ((IntBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + AllFirstIntByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getInt(0), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllFirstIntByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..bc66069a04414 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllFirstIntByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllFirstIntByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllFirstIntByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllFirstIntByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllFirstIntByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllFirstIntByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllFirstIntByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllFirstIntByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllFirstIntByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllFirstIntByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..bc7355b3cd909 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,311 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllFirstIntByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllFirstIntByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.INT), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllFirstIntByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllFirstIntByTimestampGroupingAggregatorFunction(List channels, + AllFirstIntByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllFirstIntByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllFirstIntByTimestampGroupingAggregatorFunction(channels, AllFirstIntByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + IntVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, IntBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstIntByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstIntByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, IntBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllFirstIntByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllFirstIntByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllFirstIntByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllFirstIntByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, IntBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllFirstIntByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..c28e758010bc8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunction.java @@ -0,0 +1,159 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllLastBytesRefByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllLastBytesRefByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.BYTES_REF), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongBytesRefState state; + + private final List channels; + + public AllLastBytesRefByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongBytesRefState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllLastBytesRefByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllLastBytesRefByTimestampAggregatorFunction(driverContext, channels, AllLastBytesRefByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + BytesRefBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + BytesRefBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(BytesRefBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllLastBytesRefByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(BytesRefBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllLastBytesRefByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefVector values = ((BytesRefBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + BytesRef valuesScratch = new BytesRef(); + AllLastBytesRefByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getBytesRef(0, valuesScratch), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllLastBytesRefByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..836293ff44c30 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllLastBytesRefByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllLastBytesRefByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllLastBytesRefByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllLastBytesRefByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllLastBytesRefByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllLastBytesRefByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllLastBytesRefByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllLastBytesRefByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllLastBytesRefByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllLastBytesRefByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..d47c77a4575e8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,316 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllLastBytesRefByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllLastBytesRefByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.BYTES_REF), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllLastBytesRefByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllLastBytesRefByTimestampGroupingAggregatorFunction(List channels, + AllLastBytesRefByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllLastBytesRefByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllLastBytesRefByTimestampGroupingAggregatorFunction(channels, AllLastBytesRefByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + BytesRefVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + BytesRef valuesScratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + BytesRef valuesScratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllLastBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + BytesRef valuesScratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllLastBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, BytesRefBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllLastBytesRefByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..5f239208be503 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllLastDoubleByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllLastDoubleByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongDoubleState state; + + private final List channels; + + public AllLastDoubleByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongDoubleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllLastDoubleByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllLastDoubleByTimestampAggregatorFunction(driverContext, channels, AllLastDoubleByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + DoubleBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + DoubleBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(DoubleBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllLastDoubleByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(DoubleBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllLastDoubleByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleVector values = ((DoubleBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + AllLastDoubleByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getDouble(0), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllLastDoubleByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..a1dce2583a363 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllLastDoubleByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllLastDoubleByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllLastDoubleByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllLastDoubleByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllLastDoubleByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllLastDoubleByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllLastDoubleByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllLastDoubleByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllLastDoubleByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllLastDoubleByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..9a8c846a318f2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,312 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllLastDoubleByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllLastDoubleByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.DOUBLE), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllLastDoubleByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllLastDoubleByTimestampGroupingAggregatorFunction(List channels, + AllLastDoubleByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllLastDoubleByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllLastDoubleByTimestampGroupingAggregatorFunction(channels, AllLastDoubleByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + DoubleVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, DoubleBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastDoubleByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastDoubleByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, DoubleBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastDoubleByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastDoubleByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllLastDoubleByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllLastDoubleByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, DoubleBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllLastDoubleByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..548ff1e5087ac --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllLastFloatByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllLastFloatByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.FLOAT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongFloatState state; + + private final List channels; + + public AllLastFloatByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongFloatState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllLastFloatByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllLastFloatByTimestampAggregatorFunction(driverContext, channels, AllLastFloatByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + FloatBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + FloatBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(FloatBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllLastFloatByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(FloatBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllLastFloatByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatVector values = ((FloatBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + AllLastFloatByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getFloat(0), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllLastFloatByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..408678bacf23f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllLastFloatByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllLastFloatByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllLastFloatByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllLastFloatByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllLastFloatByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllLastFloatByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllLastFloatByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllLastFloatByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllLastFloatByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllLastFloatByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..e72baf5e6ebc1 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,312 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllLastFloatByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllLastFloatByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.FLOAT), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllLastFloatByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllLastFloatByTimestampGroupingAggregatorFunction(List channels, + AllLastFloatByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllLastFloatByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllLastFloatByTimestampGroupingAggregatorFunction(channels, AllLastFloatByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + FloatBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + FloatVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, FloatBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastFloatByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatBlock values = (FloatBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastFloatByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, FloatBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastFloatByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatBlock values = (FloatBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastFloatByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, FloatBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllLastFloatByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + FloatBlock values = (FloatBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllLastFloatByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, FloatBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllLastFloatByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..faff460aeecf7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunction.java @@ -0,0 +1,157 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllLastIntByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllLastIntByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.INT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongIntState state; + + private final List channels; + + public AllLastIntByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongIntState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllLastIntByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllLastIntByTimestampAggregatorFunction(driverContext, channels, AllLastIntByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + IntBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + IntBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(IntBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllLastIntByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(IntBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllLastIntByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntVector values = ((IntBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + AllLastIntByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getInt(0), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllLastIntByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..610898a345935 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllLastIntByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllLastIntByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllLastIntByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllLastIntByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllLastIntByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllLastIntByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllLastIntByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllLastIntByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllLastIntByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllLastIntByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..20f8f7047ab53 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastIntByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,311 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllLastIntByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllLastIntByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.INT), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllLastIntByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllLastIntByTimestampGroupingAggregatorFunction(List channels, + AllLastIntByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllLastIntByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllLastIntByTimestampGroupingAggregatorFunction(channels, AllLastIntByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + IntVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, IntBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastIntByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastIntByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, IntBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastIntByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastIntByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllLastIntByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllLastIntByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, IntBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllLastIntByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunction.java new file mode 100644 index 0000000000000..18c314ea1aef6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunction.java @@ -0,0 +1,155 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link AllLastLongByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorImplementer} instead. + */ +public final class AllLastLongByTimestampAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN), + new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final AllLongLongState state; + + private final List channels; + + public AllLastLongByTimestampAggregatorFunction(DriverContext driverContext, + List channels, AllLongLongState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static AllLastLongByTimestampAggregatorFunction create(DriverContext driverContext, + List channels) { + return new AllLastLongByTimestampAggregatorFunction(driverContext, channels, AllLastLongByTimestampAggregator.initSingle(driverContext)); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + } else if (mask.allTrue()) { + addRawInputNotMasked(page); + } else { + addRawInputMasked(page, mask); + } + } + + private void addRawInputMasked(Page page, BooleanVector mask) { + LongBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock, mask); + } + + private void addRawInputNotMasked(Page page) { + LongBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + addRawBlock(valueBlock, timestampBlock); + } + + private void addRawBlock(LongBlock valueBlock, LongBlock timestampBlock) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + AllLastLongByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + private void addRawBlock(LongBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { + for (int p = 0; p < valueBlock.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + AllLastLongByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); + assert timestamps.getPositionCount() == 1; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongVector values = ((LongBlock) valuesUncast).asVector(); + assert values.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; + Block hasValueUncast = page.getBlock(channels.get(3)); + if (hasValueUncast.areAllValuesNull()) { + return; + } + BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); + assert hasValue.getPositionCount() == 1; + AllLastLongByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getLong(0), seen.getBoolean(0), hasValue.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); + return; + } + blocks[offset] = AllLastLongByTimestampAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..5a5193028ecea --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link AllLastLongByTimestampAggregator}. + * This class is generated. Edit {@code AggregatorFunctionSupplierImplementer} instead. + */ +public final class AllLastLongByTimestampAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + public AllLastLongByTimestampAggregatorFunctionSupplier() { + } + + @Override + public List nonGroupingIntermediateStateDesc() { + return AllLastLongByTimestampAggregatorFunction.intermediateStateDesc(); + } + + @Override + public List groupingIntermediateStateDesc() { + return AllLastLongByTimestampGroupingAggregatorFunction.intermediateStateDesc(); + } + + @Override + public AllLastLongByTimestampAggregatorFunction aggregator(DriverContext driverContext, + List channels) { + return AllLastLongByTimestampAggregatorFunction.create(driverContext, channels); + } + + @Override + public AllLastLongByTimestampGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext, List channels) { + return AllLastLongByTimestampGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return AllLastLongByTimestampAggregator.describe(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..6c5283cfddac9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampGroupingAggregatorFunction.java @@ -0,0 +1,310 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link AllLastLongByTimestampAggregator}. + * This class is generated. Edit {@code GroupingAggregatorImplementer} instead. + */ +public final class AllLastLongByTimestampGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.LONG), + new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) ); + + private final AllLastLongByTimestampAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public AllLastLongByTimestampGroupingAggregatorFunction(List channels, + AllLastLongByTimestampAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static AllLastLongByTimestampGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new AllLastLongByTimestampGroupingAggregatorFunction(channels, AllLastLongByTimestampAggregator.initGrouping(driverContext), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valueBlock = page.getBlock(channels.get(0)); + LongBlock timestampBlock = page.getBlock(channels.get(1)); + LongVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + LongVector timestampVector = timestampBlock.asVector(); + if (timestampVector == null) { + maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valueBlock, timestampBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntBigArrayBlock groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + // This type does not support vectors because all values are multi-valued + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntArrayBlock groups, LongBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastLongByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastLongByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntBigArrayBlock groups, LongBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int valuesPosition = groupPosition + positionOffset; + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + AllLastLongByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + int valuesPosition = groupPosition + positionOffset; + AllLastLongByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock valueBlock, + LongBlock timestampBlock) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int valuesPosition = groupPosition + positionOffset; + int groupId = groups.getInt(groupPosition); + AllLastLongByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock); + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + Block hasValuesUncast = page.getBlock(channels.get(2)); + if (hasValuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock hasValues = (BooleanBlock) hasValuesUncast; + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + int valuesPosition = groupPosition + positionOffset; + AllLastLongByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition); + } + } + + private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, LongBlock valueBlock, + LongBlock timestampBlock) { + if (valueBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + if (timestampBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + GroupingAggregatorEvaluationContext ctx) { + blocks[offset] = AllLastLongByTimestampAggregator.evaluateFinal(state, selected, ctx); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st new file mode 100644 index 0000000000000..9905f90f1ab55 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * Aggregator state for a single {@code $v1_type$} and a single {@code $v2_type$}, with support for null v2 values. + * This class is generated. Edit {@code All-X-2State.java.st} instead. + */ +final class All$v1_Type$$v2_Type$State implements AggregatorState { + // the timestamp + private $v1_type$ v1; + + // the value + $if(v2_BytesRef)$ + private final BreakingBytesRefBuilder v2; + $else$ + private $v2_type$ v2; + $endif$ + + // whether we've seen a first/last timestamp + private boolean seen; + + // because we might observe a first/last timestamp without observing a value (e.g.: value was null) + private boolean v2Seen; + + All$v1_Type$$v2_Type$State($v1_type$ v1, $v2_type$ v2$if(v2_BytesRef)$, CircuitBreaker breaker, String label$endif$) { + this.v1 = v1; + $if(v2_BytesRef)$ + this.v2 = new BreakingBytesRefBuilder(breaker, label, v2.length); + this.v2.copyBytes(v2); + $else$ + this.v2 = v2; + $endif$ + } + + $v1_type$ v1() { + return v1; + } + + void v1($v1_type$ v1) { + this.v1 = v1; + } + + $v2_type$ v2() { + return v2$if(v2_BytesRef)$.bytesRefView()$endif$; + } + + void v2($v2_type$ v2) { + $if(v2_BytesRef)$ + this.v2.copyBytes(v2); + $else$ + this.v2 = v2; + $endif$ + } + + boolean seen() { + return seen; + } + + void seen(boolean seen) { + this.seen = seen; + } + + boolean v2Seen() { + return v2Seen; + } + + void v2Seen(boolean v2Seen) { + this.v2Seen = v2Seen; + } + + /** Extracts an intermediate view of the contents of this state. */ + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 4; + blocks[offset + 0] = driverContext.blockFactory().newConstant$v1_Type$BlockWith(v1, 1); + blocks[offset + 1] = driverContext.blockFactory().newConstant$v2_Type$BlockWith(v2$if(v2_BytesRef)$.bytesRefView()$endif$, 1); + blocks[offset + 2] = driverContext.blockFactory().newConstantBooleanBlockWith(seen, 1); + blocks[offset + 3] = driverContext.blockFactory().newConstantBooleanBlockWith(v2Seen, 1); + } + + @Override + $if(v2_BytesRef)$ + public void close() { + Releasables.close(this.v2); + } + $else$ + public void close() {} + $endif$ +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st new file mode 100644 index 0000000000000..51b7cfaae5045 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st @@ -0,0 +1,318 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +// begin generated imports +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.common.util.$Type$Array; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.ann.Position; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; +// end generated imports + +/** + * A time-series aggregation function that collects the $Occurrence$ occurrence value of a time series in a specified interval. + * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "timestamps", type = "LONG"), + @IntermediateState(name = "values", type = "$TYPE$"), + @IntermediateState(name = "seen", type = "BOOLEAN"), + @IntermediateState(name = "hasValue", type = "BOOLEAN") } +) +@GroupingAggregator( + { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "$TYPE$_BLOCK"), + @IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") } +) +public class $Prefix$$Occurrence$$Type$ByTimestampAggregator { + public static String describe() { + return "$occurrence$_$type$_by_timestamp"; + } + + public static $Prefix$Long$Type$State initSingle(DriverContext driverContext) { + $if(BytesRef)$ + return new $Prefix$LongBytesRefState(0, new BytesRef(), driverContext.breaker(), describe()); + $else$ + return new $Prefix$Long$Type$State(0, 0); + $endif$ + } + + private static void first($Prefix$Long$Type$State current, long timestamp, $type$ value, boolean v2Seen) { + current.seen(true); + current.v1(timestamp); + $if(BytesRef)$ + current.v2(v2Seen ? value : new BytesRef()); + $else$ + current.v2(v2Seen ? value : 0); + $endif$ + current.v2Seen(v2Seen); + } + + public static void combine($Prefix$Long$Type$State current, @Position int position, $Type$Block value, LongBlock timestamp) { + if (current.seen() == false) { + // We never observed a value before so we'll take this right in, no questions asked. + $if(BytesRef)$ + BytesRef bytesScratch = new BytesRef(); + first(current, timestamp.getLong(position), value.getBytesRef(position, bytesScratch), value.isNull(position) == false); + $else$ + first(current, timestamp.getLong(position), value.get$Type$(position), value.isNull(position) == false); + $endif$ + return; + } + + long ts = timestamp.getLong(position); + if (ts $if(First)$<$else$>$endif$ current.v1()) { + // timestamp and seen flag are updated in all cases + current.v1(ts); + current.seen(true); + if (value.isNull(position) == false) { + // non-null value + $if(BytesRef)$ + BytesRef bytesScratch = new BytesRef(); + current.v2(value.getBytesRef(position, bytesScratch)); + $else$ + current.v2(value.get$Type$(position)); + $endif$ + current.v2Seen(true); + } else { + // null value + current.v2Seen(false); + } + } + } + + public static void combineIntermediate($Prefix$Long$Type$State current, long timestamp, $type$ value, boolean seen, boolean v2Seen) { + if (seen) { + if (current.seen()) { + if (timestamp $if(First)$<$else$>$endif$ current.v1()) { + // A newer timestamp has been observed in the reporting shard so we must update internal state + current.v1(timestamp); + current.v2(value); + current.v2Seen(v2Seen); + } + } else { + current.v1(timestamp); + current.v2(value); + current.seen(true); + current.v2Seen(v2Seen); + } + } + } + + public static Block evaluateFinal($Prefix$Long$Type$State current, DriverContext ctx) { + if (current.v2Seen()) { + return ctx.blockFactory().newConstant$Type$BlockWith(current.v2(), 1); + } else { + return ctx.blockFactory().newConstantNullBlock(1); + } + } + + public static GroupingState initGrouping(DriverContext driverContext) { + $if(BytesRef)$ + return new GroupingState(driverContext.bigArrays(), driverContext.breaker()); + $else$ + return new GroupingState(driverContext.bigArrays()); + $endif$ + } + + public static void combine(GroupingState current, int groupId, @Position int position, $Type$Block value, LongBlock timestamp) { + boolean hasValue = value.isNull(position) == false; + $if(BytesRef)$ + BytesRef bytesScratch = new BytesRef(); + current.collectValue(groupId, timestamp.getLong(position), value.getBytesRef(position, bytesScratch), hasValue); + $else$ + current.collectValue(groupId, timestamp.getLong(position), value.get$Type$(position), hasValue); + $endif$ + } + + public static void combineIntermediate( + GroupingState current, + int groupId, + LongBlock timestamps, + $Type$Block values, + BooleanBlock hasValues, + int otherPosition + ) { + // TODO seen should probably be part of the intermediate representation + int valueCount = values.getValueCount(otherPosition); + if (valueCount > 0) { + long timestamp = timestamps.getLong(timestamps.getFirstValueIndex(otherPosition)); + int firstIndex = values.getFirstValueIndex(otherPosition); + boolean hasValueFlag = hasValues.getBoolean(otherPosition); + $if(BytesRef)$ + BytesRef bytesScratch = new BytesRef(); + $endif$ + for (int i = 0; i < valueCount; i++) { + $if(BytesRef)$ + current.collectValue(groupId, timestamp, values.getBytesRef(firstIndex + i, bytesScratch), hasValueFlag); + $else$ + current.collectValue(groupId, timestamp, values.get$Type$(firstIndex + i), hasValueFlag); + $endif$ + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { + return state.evaluateFinal(selected, ctx); + } + + public static final class GroupingState extends AbstractArrayState { + private final BigArrays bigArrays; + private LongArray timestamps; + $if(BytesRef)$ + private ObjectArray values; + $else$ + private $Type$Array values; + $endif$ + $if(BytesRef)$ + private final CircuitBreaker breaker; + $endif$ + private ByteArray hasValues; + private int maxGroupId = -1; + + GroupingState(BigArrays bigArrays$if(BytesRef)$, CircuitBreaker breaker$endif$) { + super(bigArrays); + this.bigArrays = bigArrays; + boolean success = false; + $if(BytesRef)$ + this.breaker = breaker; + $endif$ + LongArray timestamps = null; + ByteArray hasValues = null; + try { + timestamps = bigArrays.newLongArray(1, false); + this.timestamps = timestamps; + $if(BytesRef)$ + this.values = bigArrays.newObjectArray(1); + $else$ + this.values = bigArrays.new$Type$Array(1, false); + $endif$ + hasValues = bigArrays.newByteArray(1, false); + this.hasValues = hasValues; + + /* + * Enable group id tracking because we use has hasValue in the + * collection itself to detect the when a value first arrives. + */ + enableGroupIdTracking(new SeenGroupIds.Empty()); + success = true; + } finally { + if (success == false) { + Releasables.close(timestamps, values, hasValues, super::close); + } + } + } + + void collectValue(int groupId, long timestamp, $type$ value, boolean hasVal) { + boolean updated = false; + if (groupId < timestamps.size()) { + // TODO: handle multiple values? + if (groupId > maxGroupId || hasValue(groupId) == false || timestamps.get(groupId) $if(Last)$<$else$>$endif$ timestamp) { + timestamps.set(groupId, timestamp); + updated = true; + } + } else { + timestamps = bigArrays.grow(timestamps, groupId + 1); + timestamps.set(groupId, timestamp); + updated = true; + } + if (updated) { + values = bigArrays.grow(values, groupId + 1); + $if(BytesRef)$ + BreakingBytesRefBuilder builder = values.get(groupId); + if (builder == null) { + builder = new BreakingBytesRefBuilder(breaker, "$Occurrence$", value.length); + } + builder.copyBytes(value); + values.set(groupId, builder); + $else$ + values.set(groupId, value); + $endif$ + hasValues = bigArrays.grow(hasValues, groupId + 1); + hasValues.set(groupId, (byte) (hasVal ? 1 : 0)); + } + maxGroupId = Math.max(maxGroupId, groupId); + trackGroupId(groupId); + } + + @Override + public void close() { + $if(BytesRef)$ + for (long i = 0; i < values.size(); i++) { + Releasables.close(values.get(i)); + } + $endif$ + Releasables.close(timestamps, values, hasValues, super::close); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + // Creates 3 intermediate state blocks (timestamps, values, hasValue) + try ( + var timestampsBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + var valuesBuilder = driverContext.blockFactory().new$Type$BlockBuilder(selected.getPositionCount()); + var hasValuesBuilder = driverContext.blockFactory().newBooleanBlockBuilder(selected.getPositionCount()) + ) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + timestampsBuilder.appendLong(timestamps.get(group)); + $if(BytesRef)$ + valuesBuilder.append$Type$(values.get(group).bytesRefView()); + $else$ + valuesBuilder.append$Type$(values.get(group)); + $endif$ + hasValuesBuilder.appendBoolean(true); + } else { + timestampsBuilder.appendNull(); + valuesBuilder.appendNull(); + hasValuesBuilder.appendBoolean(false); + } + } + blocks[offset] = timestampsBuilder.build(); + blocks[offset + 1] = valuesBuilder.build(); + blocks[offset + 2] = hasValuesBuilder.build(); + } + } + + Block evaluateFinal(IntVector selected, GroupingAggregatorEvaluationContext evalContext) { + try (var builder = evalContext.blockFactory().new$Type$BlockBuilder(selected.getPositionCount())) { + for (int p = 0; p < selected.getPositionCount(); p++) { + int group = selected.getInt(p); + if (group < timestamps.size() && hasValues.get(group) == 1) { + $if(BytesRef)$ + builder.append$Type$(values.get(group).bytesRefView()); + $else$ + builder.append$Type$(values.get(group)); + $endif$ + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_all_first_all_last.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_all_first_all_last.csv-spec new file mode 100644 index 0000000000000..5ee6458680315 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_all_first_all_last.csv-spec @@ -0,0 +1,85 @@ +first nullable value by timestamp +required_capability: all_first +required_capability: all_last + +ROW row = [ + # @timestamp | name | number + "2023-01-23T00:00:00.000Z | alpha | ", + "2023-01-23T00:00:01.000Z | alpha | 2", + "2023-01-23T00:00:02.000Z | bravo | ", + "2023-01-23T00:00:03.000Z | alpha | 4", + "2023-01-23T00:00:04.000Z | bravo | 5", + "2023-01-23T00:00:05.000Z | charlie | 6", + "2023-01-23T00:00:06.000Z | delta | " +] +| MV_EXPAND row +| DISSECT row """%{@timestamp} | %{name} | %{number}""" +| KEEP @timestamp, name, number +| EVAL @timestamp = TO_DATETIME(@timestamp), + name = TRIM(name), + v1 = TO_LONG(number), + v2 = v1::integer, + v3 = v1::double, + v4 = v1::keyword + +| STATS first_l = ALL_FIRST(v1, @timestamp), + first_i = ALL_FIRST(v2, @timestamp), + first_d = ALL_FIRST(v3, @timestamp), + first_k = ALL_FIRST(v4, @timestamp), + last_l = ALL_LAST(v1, @timestamp), + last_i = ALL_LAST(v2, @timestamp), + last_d = ALL_LAST(v3, @timestamp), + last_k = ALL_LAST(v4, @timestamp) +; + +warning:Line 15:6: evaluation of [TO_LONG(number)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 15:6: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [] + +first_l:long | first_i:integer | first_d:double | first_k:keyword | last_l:long | last_i:integer | last_d:double | last_k:keyword +null | null | null | null | null | null | null | null +; + +first nullable value by timestamp on group +required_capability: all_first +required_capability: all_last + +ROW row = [ + # @timestamp | name | number + "2023-01-23T00:00:00.000Z | alpha | ", + "2023-01-23T00:00:01.000Z | alpha | 2", + "2023-01-23T00:00:02.000Z | bravo | ", + "2023-01-23T00:00:03.000Z | alpha | 4", + "2023-01-23T00:00:04.000Z | bravo | 5", + "2023-01-23T00:00:05.000Z | charlie | 6", + "2023-01-23T00:00:06.000Z | delta | " +] +| MV_EXPAND row +| DISSECT row """%{@timestamp} | %{name} | %{number}""" +| KEEP @timestamp, name, number +| EVAL @timestamp = TO_DATETIME(@timestamp), + name = TRIM(name), + v1 = TO_LONG(number), + v2 = v1::integer, + v3 = v1::double, + v4 = v1::keyword +| STATS + first_l = ALL_FIRST(v1, @timestamp), + first_i = ALL_FIRST(v2, @timestamp), + first_d = ALL_FIRST(v3, @timestamp), + first_k = ALL_FIRST(v4, @timestamp), + last_l = ALL_LAST(v1, @timestamp), + last_i = ALL_LAST(v2, @timestamp), + last_d = ALL_LAST(v3, @timestamp), + last_k = ALL_LAST(v4, @timestamp) BY name +| SORT name ASC +; + +warning:Line 15:6: evaluation of [TO_LONG(number)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 15:6: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [] + +first_l:long | first_i:integer | first_d:double | first_k:keyword | last_l:long | last_i:integer | last_d:double | last_k:keyword | name:keyword +null | null | null | null | 4 | 4 | 4.0 | 4 | alpha +null | null | null | null | 5 | 5 | 5.0 | 5 | bravo +6 | 6 | 6.0 | 6 | 6 | 6 | 6.0 | 6 | charlie +null | null | null | null | null | null | null | null | delta +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_first.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_first.csv-spec index d58537f550e64..149aa668f11ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_first.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_first.csv-spec @@ -140,62 +140,3 @@ Running cats (cycle 1) | cron Doing java stuff for 192.168.86.038 | java Pinging 192.168.86.046 | ping ; - -first nullable value by timestamp -required_capability: all_first - -ROW row = [ - # @timestamp | name | number - "2023-01-23T00:00:00.000Z | alpha | ", - "2023-01-23T00:00:01.000Z | alpha | 2", - "2023-01-23T00:00:02.000Z | bravo | ", - "2023-01-23T00:00:03.000Z | alpha | 4", - "2023-01-23T00:00:04.000Z | bravo | 5", - "2023-01-23T00:00:05.000Z | charlie | 6" -] -| MV_EXPAND row -| DISSECT row """%{@timestamp} | %{name} | %{number}""" -| KEEP @timestamp, name, number -| EVAL number = TO_LONG(number), - name = TRIM(name), - @timestamp = TO_DATETIME(@timestamp) -| STATS number = ALL_FIRST(number, @timestamp) -; - -warning:Line 12:17: evaluation of [TO_LONG(number)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 12:17: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [] - -number:long -null -; - -first nullable value by timestamp on group -required_capability: all_first - -ROW row = [ - # @timestamp | name | number - "2023-01-23T00:00:00.000Z | alpha | ", - "2023-01-23T00:00:01.000Z | alpha | 2", - "2023-01-23T00:00:02.000Z | bravo | ", - "2023-01-23T00:00:03.000Z | alpha | 4", - "2023-01-23T00:00:04.000Z | bravo | 5", - "2023-01-23T00:00:05.000Z | charlie | 6" -] -| MV_EXPAND row -| DISSECT row """%{@timestamp} | %{name} | %{number}""" -| KEEP @timestamp, name, number -| EVAL number = TO_LONG(number), - name = TRIM(name), - @timestamp = TO_DATETIME(@timestamp) -| STATS number = ALL_FIRST(number, @timestamp) BY name -| SORT name ASC -; - -warning:Line 12:17: evaluation of [TO_LONG(number)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 12:17: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [] - -number:long | name:keyword -null | alpha -null | bravo -6 | charlie -; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d900e92de0c3f..4458955fcd634 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -1662,6 +1662,8 @@ public enum Cap { */ ALL_FIRST(Build.current().isSnapshot()), + ALL_LAST(Build.current().isSnapshot()), + /** * Allow ST_EXTENT_AGG to gracefully handle missing spatial shapes */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index f95b8ec48daac..a33bd9b4e0a1c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Absent; import org.elasticsearch.xpack.esql.expression.function.aggregate.AbsentOverTime; import org.elasticsearch.xpack.esql.expression.function.aggregate.AllFirst; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AllLast; import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.esql.expression.function.aggregate.AvgOverTime; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; @@ -561,6 +562,7 @@ private static FunctionDefinition[][] snapshotFunctions() { def(Delay.class, Delay::new, "delay"), def(First.class, bi(First::new), "first"), def(AllFirst.class, bi(AllFirst::new), "all_first"), + def(AllLast.class, bi(AllLast::new), "all_last"), def(Last.class, bi(Last::new), "last"), def(Term.class, bi(Term::new), "term"), def(CosineSimilarity.class, CosineSimilarity::new, "v_cosine"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllFirst.java index 18b5aef2ca588..05783bc3ff8c0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllFirst.java @@ -10,6 +10,10 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllFirstBytesRefByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllFirstDoubleByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllFirstFloatByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllFirstIntByTimestampAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.AllFirstLongByTimestampAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -33,9 +37,6 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -/** - * This class only supports the long type for now, but that'll change after templating kicks in. - */ public class AllFirst extends AggregateFunction implements ToAggregator { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, @@ -45,20 +46,19 @@ public class AllFirst extends AggregateFunction implements ToAggregator { private final Expression sort; - // TODO: support all types of values @FunctionInfo( type = FunctionType.AGGREGATE, preview = true, - returnType = { "long"/*, "integer", "double", "keyword" */ }, + returnType = { "long", "integer", "double", "keyword" }, description = "Calculates the earliest value of a field, and can operate on null values.", appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.DEVELOPMENT) }, - examples = @Example(file = "all_first", tag = "all_first") + examples = @Example(file = "stats_all_first_all_last", tag = "all_first") ) public AllFirst( Source source, @Param( name = "value", - type = { "long"/*, "integer", "double", "keyword", "text" */ }, + type = { "long", "integer", "double", "keyword", "text" }, description = "Values to return" ) Expression field, @Param(name = "sort", type = { "date", "date_nanos" }, description = "Sort key") Expression sort @@ -145,10 +145,10 @@ public AggregatorFunctionSupplier supplier() { final DataType type = field().dataType(); return switch (type) { case LONG -> new AllFirstLongByTimestampAggregatorFunctionSupplier(); - // case INTEGER -> new AllFirstIntByTimestampAggregatorFunctionSupplier(); - // case DOUBLE -> new AllFirstDoubleByTimestampAggregatorFunctionSupplier(); - // case FLOAT -> new AllFirstFloatByTimestampAggregatorFunctionSupplier(); - // case KEYWORD, TEXT -> new AllFirstBytesRefByTimestampAggregatorFunctionSupplier(); + case INTEGER -> new AllFirstIntByTimestampAggregatorFunctionSupplier(); + case DOUBLE -> new AllFirstDoubleByTimestampAggregatorFunctionSupplier(); + case FLOAT -> new AllFirstFloatByTimestampAggregatorFunctionSupplier(); + case KEYWORD, TEXT -> new AllFirstBytesRefByTimestampAggregatorFunctionSupplier(); default -> throw EsqlIllegalArgumentException.illegalDataType(type); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllLast.java new file mode 100644 index 0000000000000..3939ad9f85e79 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllLast.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllLastBytesRefByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllLastDoubleByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllLastFloatByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllLastIntByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AllLastLongByTimestampAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesTo; +import org.elasticsearch.xpack.esql.expression.function.FunctionAppliesToLifecycle; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.FunctionType; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; + +public class AllLast extends AggregateFunction implements ToAggregator { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "AllLast", + AllLast::readFrom + ); + + private final Expression sort; + + @FunctionInfo( + type = FunctionType.AGGREGATE, + preview = true, + returnType = { "long", "integer", "double", "keyword" }, + description = "Calculates the latest value of a field, and can operate on null values.", + appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.DEVELOPMENT) }, + examples = @Example(file = "stats_all_first_all_last", tag = "all_last") + ) + public AllLast( + Source source, + @Param( + name = "value", + type = { "long", "integer", "double", "keyword", "text" }, + description = "Values to return" + ) Expression field, + @Param(name = "sort", type = { "date", "date_nanos" }, description = "Sort key") Expression sort + ) { + this(source, field, Literal.TRUE, NO_WINDOW, sort); + } + + private AllLast(Source source, Expression field, Expression filter, Expression window, Expression sort) { + super(source, field, filter, window, List.of(sort)); + this.sort = sort; + } + + private static AllLast readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression field = in.readNamedWriteable(Expression.class); + Expression filter = in.readNamedWriteable(Expression.class); + Expression window = readWindow(in); + List params = in.readNamedWriteableCollectionAsList(Expression.class); + Expression sort = params.getFirst(); + return new AllLast(source, field, filter, window, sort); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, AllLast::new, field(), sort); + } + + @Override + public AllLast replaceChildren(List newChildren) { + return new AllLast(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); + } + + @Override + public AllLast withFilter(Expression filter) { + return new AllLast(source(), field(), filter, window(), sort); + } + + public Expression sort() { + return sort; + } + + @Override + public DataType dataType() { + return field().dataType().noText(); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + return isType( + field(), + dt -> dt == DataType.BOOLEAN + || dt == DataType.DATETIME + || DataType.isString(dt) + || (dt.isNumeric() && dt != DataType.UNSIGNED_LONG), + sourceText(), + FIRST, + "boolean", + "date", + "ip", + "string", + "numeric except unsigned_long or counter types" + ).and( + isType( + sort, + dt -> dt == DataType.LONG || dt == DataType.DATETIME || dt == DataType.DATE_NANOS, + sourceText(), + SECOND, + "long or date_nanos or datetime" + ) + ); + } + + @Override + public AggregatorFunctionSupplier supplier() { + final DataType type = field().dataType(); + return switch (type) { + case LONG -> new AllLastLongByTimestampAggregatorFunctionSupplier(); + case INTEGER -> new AllLastIntByTimestampAggregatorFunctionSupplier(); + case DOUBLE -> new AllLastDoubleByTimestampAggregatorFunctionSupplier(); + case FLOAT -> new AllLastFloatByTimestampAggregatorFunctionSupplier(); + case KEYWORD, TEXT -> new AllLastBytesRefByTimestampAggregatorFunctionSupplier(); + default -> throw EsqlIllegalArgumentException.illegalDataType(type); + }; + } + + @Override + public String toString() { + return "all_last(" + field() + ", " + sort + ")"; + } +} From 95f8ec1cacc6b1a068742a480c7f8305bbc617d4 Mon Sep 17 00:00:00 2001 From: Mouhcine Aitounejjar Date: Mon, 17 Nov 2025 09:27:33 -0500 Subject: [PATCH 2/4] Changes: - Rename templates to start with "X-" --- x-pack/plugin/esql/compute/build.gradle | 4 ++-- .../aggregation/{All-X-2State.java.st => X-All2State.java.st} | 0 ...egator.java.st => X-AllValueByTimestampAggregator.java.st} | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/{All-X-2State.java.st => X-All2State.java.st} (100%) rename x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/{All-X-ValueByTimestampAggregator.java.st => X-AllValueByTimestampAggregator.java.st} (100%) diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index ab915d38ef153..3622971d457b6 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -514,7 +514,7 @@ tasks.named('stringTemplates').configure { } generateTwoStateFiles(file("src/main/java/org/elasticsearch/compute/aggregation/X-2State.java.st")) - generateTwoStateFiles(file("src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st"), "All") + generateTwoStateFiles(file("src/main/java/org/elasticsearch/compute/aggregation/X-All2State.java.st"), "All") File irateAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-IrateAggregator.java.st") template { @@ -996,7 +996,7 @@ tasks.named('stringTemplates').configure { } generateTimestampAggregatorClasses("src/main/java/org/elasticsearch/compute/aggregation/X-ValueByTimestampAggregator.java.st", "") - generateTimestampAggregatorClasses("src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st", "All") + generateTimestampAggregatorClasses("src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st", "All") File rateAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-RateGroupingAggregatorFunction.java.st") template { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-All2State.java.st similarity index 100% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-2State.java.st rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-All2State.java.st diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st similarity index 100% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/All-X-ValueByTimestampAggregator.java.st rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st From 2e401ffeda7cdf0a49b1f1a844c68c60f7dbae1c Mon Sep 17 00:00:00 2001 From: Mouhcine Aitounejjar Date: Tue, 18 Nov 2025 09:57:59 -0500 Subject: [PATCH 3/4] Changes: - Rename templates to start with "X-" --- .../aggregation/AllFirstBytesRefByTimestampAggregator.java | 2 +- .../aggregation/AllFirstDoubleByTimestampAggregator.java | 2 +- .../compute/aggregation/AllFirstFloatByTimestampAggregator.java | 2 +- .../compute/aggregation/AllFirstIntByTimestampAggregator.java | 2 +- .../compute/aggregation/AllFirstLongByTimestampAggregator.java | 2 +- .../aggregation/AllLastBytesRefByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastDoubleByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastFloatByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastIntByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastLongByTimestampAggregator.java | 2 +- .../elasticsearch/compute/aggregation/AllLongBytesRefState.java | 2 +- .../elasticsearch/compute/aggregation/AllLongDoubleState.java | 2 +- .../elasticsearch/compute/aggregation/AllLongFloatState.java | 2 +- .../org/elasticsearch/compute/aggregation/AllLongIntState.java | 2 +- .../org/elasticsearch/compute/aggregation/AllLongLongState.java | 2 +- .../org/elasticsearch/compute/aggregation/X-All2State.java.st | 2 +- .../compute/aggregation/X-AllValueByTimestampAggregator.java.st | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java index 8079cf86390e6..6eefc0d45a582 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java index 2dd466e2c48f1..11f3f82409124 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java index c9defbaf37c36..9c68d7643287b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java index 374b5d1f556a6..2c4770aff29a1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java index 7c1f6fb34fb30..b4a4f39afc54c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the First occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java index 6ece7c3928b98..39a20e0924269 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java index 2425b0d1aca50..2d4f20daf70e7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java index e5f5c907817bd..e25b6fb68aaeb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java index bc767cea68957..3b5ce9dd6a07a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java index 7d8dcbf0efd9f..9b480fcf493d9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java @@ -31,7 +31,7 @@ /** * A time-series aggregation function that collects the Last occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java index b6942fffcfc95..d1917a52dd6b0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongBytesRefState.java @@ -18,7 +18,7 @@ /** * Aggregator state for a single {@code long} and a single {@code BytesRef}, with support for null v2 values. - * This class is generated. Edit {@code All-X-2State.java.st} instead. + * This class is generated. Edit {@code X-All2State.java.st} instead. */ final class AllLongBytesRefState implements AggregatorState { // the timestamp diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java index 40ed40f216434..2f824a74feaaa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongDoubleState.java @@ -18,7 +18,7 @@ /** * Aggregator state for a single {@code long} and a single {@code double}, with support for null v2 values. - * This class is generated. Edit {@code All-X-2State.java.st} instead. + * This class is generated. Edit {@code X-All2State.java.st} instead. */ final class AllLongDoubleState implements AggregatorState { // the timestamp diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java index adc0d5eaebe07..fb211816fb083 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongFloatState.java @@ -18,7 +18,7 @@ /** * Aggregator state for a single {@code long} and a single {@code float}, with support for null v2 values. - * This class is generated. Edit {@code All-X-2State.java.st} instead. + * This class is generated. Edit {@code X-All2State.java.st} instead. */ final class AllLongFloatState implements AggregatorState { // the timestamp diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java index bd8a8e618d937..a3ccf24e14606 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongIntState.java @@ -18,7 +18,7 @@ /** * Aggregator state for a single {@code long} and a single {@code int}, with support for null v2 values. - * This class is generated. Edit {@code All-X-2State.java.st} instead. + * This class is generated. Edit {@code X-All2State.java.st} instead. */ final class AllLongIntState implements AggregatorState { // the timestamp diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongLongState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongLongState.java index 4540799932c14..8fa28722ede4e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongLongState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLongLongState.java @@ -18,7 +18,7 @@ /** * Aggregator state for a single {@code long} and a single {@code long}, with support for null v2 values. - * This class is generated. Edit {@code All-X-2State.java.st} instead. + * This class is generated. Edit {@code X-All2State.java.st} instead. */ final class AllLongLongState implements AggregatorState { // the timestamp diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-All2State.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-All2State.java.st index 9905f90f1ab55..d69602a753355 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-All2State.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-All2State.java.st @@ -18,7 +18,7 @@ import org.elasticsearch.core.Releasables; /** * Aggregator state for a single {@code $v1_type$} and a single {@code $v2_type$}, with support for null v2 values. - * This class is generated. Edit {@code All-X-2State.java.st} instead. + * This class is generated. Edit {@code X-All2State.java.st} instead. */ final class All$v1_Type$$v2_Type$State implements AggregatorState { // the timestamp diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st index 51b7cfaae5045..535b060cba6bd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st @@ -31,7 +31,7 @@ import org.elasticsearch.core.Releasables; /** * A time-series aggregation function that collects the $Occurrence$ occurrence value of a time series in a specified interval. - * This class is generated. Edit `All-X-ValueByTimestampAggregator.java.st` instead. + * This class is generated. Edit `X-AllValueByTimestampAggregator.java.st` instead. */ @Aggregator( { From fa6c5ea3792ede6fa9c4acf1b0b72df6cc582e50 Mon Sep 17 00:00:00 2001 From: Mouhcine Aitounejjar Date: Tue, 18 Nov 2025 15:13:14 -0500 Subject: [PATCH 4/4] Changes: - Include the "all" prefix in the describe() method. - Add an entry for ALL_LAST in agg writables. --- .../aggregation/AllFirstBytesRefByTimestampAggregator.java | 2 +- .../aggregation/AllFirstDoubleByTimestampAggregator.java | 2 +- .../compute/aggregation/AllFirstFloatByTimestampAggregator.java | 2 +- .../compute/aggregation/AllFirstIntByTimestampAggregator.java | 2 +- .../compute/aggregation/AllFirstLongByTimestampAggregator.java | 2 +- .../aggregation/AllLastBytesRefByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastDoubleByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastFloatByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastIntByTimestampAggregator.java | 2 +- .../compute/aggregation/AllLastLongByTimestampAggregator.java | 2 +- .../compute/aggregation/X-AllValueByTimestampAggregator.java.st | 2 +- .../esql/expression/function/aggregate/AggregateWritables.java | 1 + 12 files changed, 12 insertions(+), 11 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java index 6eefc0d45a582..03ee63777cf80 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllFirstBytesRefByTimestampAggregator { public static String describe() { - return "first_BytesRef_by_timestamp"; + return "all_first_BytesRef_by_timestamp"; } public static AllLongBytesRefState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java index 11f3f82409124..b80ba198a2ed1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllFirstDoubleByTimestampAggregator { public static String describe() { - return "first_double_by_timestamp"; + return "all_first_double_by_timestamp"; } public static AllLongDoubleState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java index 9c68d7643287b..81a2be76a7275 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstFloatByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllFirstFloatByTimestampAggregator { public static String describe() { - return "first_float_by_timestamp"; + return "all_first_float_by_timestamp"; } public static AllLongFloatState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java index 2c4770aff29a1..fc9b56740e848 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstIntByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllFirstIntByTimestampAggregator { public static String describe() { - return "first_int_by_timestamp"; + return "all_first_int_by_timestamp"; } public static AllLongIntState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java index b4a4f39afc54c..edc6a49bac1b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstLongByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllFirstLongByTimestampAggregator { public static String describe() { - return "first_long_by_timestamp"; + return "all_first_long_by_timestamp"; } public static AllLongLongState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java index 39a20e0924269..e8fe7e1d14923 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllLastBytesRefByTimestampAggregator { public static String describe() { - return "last_BytesRef_by_timestamp"; + return "all_last_BytesRef_by_timestamp"; } public static AllLongBytesRefState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java index 2d4f20daf70e7..bc7c6919f6bbd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastDoubleByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllLastDoubleByTimestampAggregator { public static String describe() { - return "last_double_by_timestamp"; + return "all_last_double_by_timestamp"; } public static AllLongDoubleState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java index e25b6fb68aaeb..1f73b96ca98dc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastFloatByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllLastFloatByTimestampAggregator { public static String describe() { - return "last_float_by_timestamp"; + return "all_last_float_by_timestamp"; } public static AllLongFloatState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java index 3b5ce9dd6a07a..2c811f7d62f13 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastIntByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllLastIntByTimestampAggregator { public static String describe() { - return "last_int_by_timestamp"; + return "all_last_int_by_timestamp"; } public static AllLongIntState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java index 9b480fcf493d9..7bfd55c35ac88 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregator.java @@ -48,7 +48,7 @@ ) public class AllLastLongByTimestampAggregator { public static String describe() { - return "last_long_by_timestamp"; + return "all_last_long_by_timestamp"; } public static AllLongLongState initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st index 535b060cba6bd..1c3bc7b58ef83 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-AllValueByTimestampAggregator.java.st @@ -48,7 +48,7 @@ import org.elasticsearch.core.Releasables; ) public class $Prefix$$Occurrence$$Type$ByTimestampAggregator { public static String describe() { - return "$occurrence$_$type$_by_timestamp"; + return "all_$occurrence$_$type$_by_timestamp"; } public static $Prefix$Long$Type$State initSingle(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java index d486c1054b0f8..9867539dd88a4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -42,6 +42,7 @@ public static List getNamedWriteables() { MaxOverTime.ENTRY, AvgOverTime.ENTRY, Last.ENTRY, + AllLast.ENTRY, LastOverTime.ENTRY, FirstOverTime.ENTRY, SumOverTime.ENTRY,