Skip to content

Commit

Permalink
Serialize top-level pipeline aggs as part of InternalAggregations (#4…
Browse files Browse the repository at this point in the history
…0177)

We currently convert pipeline aggregators to their corresponding
InternalAggregation instance as part of the final reduction phase.
They arrive to the coordinating node as part of QuerySearchResult
objects fom the shards and, despite we may incrementally reduce
aggs (hence we may have some non-final reduce and the final
one later) all the reduction phases happen on the same node.

With CCS minimizing roundtrips though, each cluster performs its
own non-final reduction, and then serializes the results back to
the CCS coordinating node which will perform the final coordination.
This breaks the assumptions made up until now around reductions
happening all on the same node.

With #40101 we have made sure that top-level pipeline aggs are not
reduced as part of the non-final reduction. The next step is to make
sure that they don't get lost, meaning that each coordinating node
needs to send them back to the CCS coordinating node as part of
the top-level `InternalAggregations` object.

Closes #40059
  • Loading branch information
javanna committed Mar 19, 2019
1 parent 7759144 commit 6356644
Show file tree
Hide file tree
Showing 4 changed files with 235 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,22 @@
*/
package org.elasticsearch.search.aggregations;

import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import static java.util.Collections.emptyMap;

Expand All @@ -49,6 +53,8 @@ public final class InternalAggregations extends Aggregations implements Streamab
}
};

private List<SiblingPipelineAggregator> topLevelPipelineAggregators = Collections.emptyList();

private InternalAggregations() {
}

Expand All @@ -60,18 +66,42 @@ public InternalAggregations(List<InternalAggregation> aggregations) {
}

/**
* Reduces the given list of aggregations
* Constructs a new aggregation providing its {@link InternalAggregation}s and {@link SiblingPipelineAggregator}s
*/
public InternalAggregations(List<InternalAggregation> aggregations, List<SiblingPipelineAggregator> topLevelPipelineAggregators) {
super(aggregations);
this.topLevelPipelineAggregators = Objects.requireNonNull(topLevelPipelineAggregators);
}

/**
* Returns the top-level pipeline aggregators.
* Note that top-level pipeline aggregators become normal aggregation once the final reduction has been performed, after which they
* become part of the list of {@link InternalAggregation}s.
*/
List<SiblingPipelineAggregator> getTopLevelPipelineAggregators() {
return topLevelPipelineAggregators;
}

/**
* Reduces the given list of aggregations as well as the top-level pipeline aggregators extracted from the first
* {@link InternalAggregations} object found in the list.
* Note that top-level pipeline aggregators are reduced only as part of the final reduction phase, otherwise they are left untouched.
*/
public static InternalAggregations reduce(List<InternalAggregations> aggregationsList, ReduceContext context) {
return reduce(aggregationsList, null, context);
public static InternalAggregations reduce(List<InternalAggregations> aggregationsList,
ReduceContext context) {
if (aggregationsList.isEmpty()) {
return null;
}
InternalAggregations first = aggregationsList.get(0);
return reduce(aggregationsList, first.topLevelPipelineAggregators, context);
}

/**
* Reduces the given list of aggregations as well as the provided sibling pipeline aggregators.
* Note that sibling pipeline aggregators are ignored when non final reduction is performed.
* Reduces the given list of aggregations as well as the provided top-level pipeline aggregators.
* Note that top-level pipeline aggregators are reduced only as part of the final reduction phase, otherwise they are left untouched.
*/
public static InternalAggregations reduce(List<InternalAggregations> aggregationsList,
List<SiblingPipelineAggregator> siblingPipelineAggregators,
List<SiblingPipelineAggregator> topLevelPipelineAggregators,
ReduceContext context) {
if (aggregationsList.isEmpty()) {
return null;
Expand All @@ -98,15 +128,14 @@ public static InternalAggregations reduce(List<InternalAggregations> aggregation
reducedAggregations.add(first.reduce(aggregations, context));
}

if (siblingPipelineAggregators != null) {
if (context.isFinalReduce()) {
for (SiblingPipelineAggregator pipelineAggregator : siblingPipelineAggregators) {
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(reducedAggregations), context);
reducedAggregations.add(newAgg);
}
if (context.isFinalReduce()) {
for (SiblingPipelineAggregator pipelineAggregator : topLevelPipelineAggregators) {
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(reducedAggregations), context);
reducedAggregations.add(newAgg);
}
return new InternalAggregations(reducedAggregations);
}
return new InternalAggregations(reducedAggregations);
return new InternalAggregations(reducedAggregations, topLevelPipelineAggregators);
}

public static InternalAggregations readAggregations(StreamInput in) throws IOException {
Expand All @@ -121,11 +150,20 @@ public void readFrom(StreamInput in) throws IOException {
if (aggregations.isEmpty()) {
aggregationsAsMap = emptyMap();
}
if (in.getVersion().onOrAfter(Version.V_6_7_0)) {
this.topLevelPipelineAggregators = in.readList(
stream -> (SiblingPipelineAggregator)in.readNamedWriteable(PipelineAggregator.class));
} else {
this.topLevelPipelineAggregators = Collections.emptyList();
}
}

@Override
@SuppressWarnings("unchecked")
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteableList((List<InternalAggregation>)aggregations);
if (out.getVersion().onOrAfter(Version.V_6_7_0)) {
out.writeNamedWriteableList(topLevelPipelineAggregators);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@
import org.elasticsearch.search.suggest.Suggest;

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;

import static java.util.Collections.emptyList;
import static org.elasticsearch.common.lucene.Lucene.readTopDocs;
import static org.elasticsearch.common.lucene.Lucene.writeTopDocs;

Expand All @@ -50,7 +51,7 @@ public final class QuerySearchResult extends SearchPhaseResult {
private DocValueFormat[] sortValueFormats;
private InternalAggregations aggregations;
private boolean hasAggs;
private List<SiblingPipelineAggregator> pipelineAggregators;
private List<SiblingPipelineAggregator> pipelineAggregators = Collections.emptyList();
private Suggest suggest;
private boolean searchTimedOut;
private Boolean terminatedEarly = null;
Expand Down Expand Up @@ -79,7 +80,6 @@ public QuerySearchResult queryResult() {
return this;
}


public void searchTimedOut(boolean searchTimedOut) {
this.searchTimedOut = searchTimedOut;
}
Expand Down Expand Up @@ -203,7 +203,7 @@ public List<SiblingPipelineAggregator> pipelineAggregators() {
}

public void pipelineAggregators(List<SiblingPipelineAggregator> pipelineAggregators) {
this.pipelineAggregators = pipelineAggregators;
this.pipelineAggregators = Objects.requireNonNull(pipelineAggregators);
}

public Suggest suggest() {
Expand Down Expand Up @@ -337,7 +337,7 @@ public void writeToNoId(StreamOutput out) throws IOException {
out.writeBoolean(true);
aggregations.writeTo(out);
}
out.writeNamedWriteableList(pipelineAggregators == null ? emptyList() : pipelineAggregators);
out.writeNamedWriteableList(pipelineAggregators);
if (suggest == null) {
out.writeBoolean(false);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ public void testBuildClusters() {
assertEquals(successful, clusters.getSuccessful());
assertEquals(skipped, clusters.getSkipped());
}

private static OriginalIndices randomOriginalIndices() {
int numLocalIndices = randomIntBetween(0, 5);
String[] localIndices = new String[numLocalIndices];
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;

import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogramTests;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.StringTermsTests;
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValueTests;
import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregationBuilder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.hamcrest.Matchers;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.List;

public class InternalAggregationsTests extends ESTestCase {

private final NamedWriteableRegistry registry = new NamedWriteableRegistry(
new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedWriteables());

public void testReduceEmptyAggs() {
List<InternalAggregations> aggs = Collections.emptyList();
InternalAggregation.ReduceContext reduceContext = new InternalAggregation.ReduceContext(null, null, randomBoolean());
assertNull(InternalAggregations.reduce(aggs, Collections.emptyList(), reduceContext));
}

public void testNonFinalReduceTopLevelPipelineAggs() throws IOException {
InternalAggregation terms = new StringTerms("name", BucketOrder.key(true),
10, 1, Collections.emptyList(), Collections.emptyMap(), DocValueFormat.RAW, 25, false, 10, Collections.emptyList(), 0);
List<InternalAggregations> aggs = Collections.singletonList(new InternalAggregations(Collections.singletonList(terms)));
List<SiblingPipelineAggregator> topLevelPipelineAggs = new ArrayList<>();
MaxBucketPipelineAggregationBuilder maxBucketPipelineAggregationBuilder = new MaxBucketPipelineAggregationBuilder("test", "test");
topLevelPipelineAggs.add((SiblingPipelineAggregator)maxBucketPipelineAggregationBuilder.create());
InternalAggregation.ReduceContext reduceContext = new InternalAggregation.ReduceContext(null, null, false);
InternalAggregations reducedAggs = InternalAggregations.reduce(aggs, topLevelPipelineAggs, reduceContext);
assertEquals(1, reducedAggs.getTopLevelPipelineAggregators().size());
assertEquals(1, reducedAggs.aggregations.size());
}

public void testFinalReduceTopLevelPipelineAggs() throws IOException {
InternalAggregation terms = new StringTerms("name", BucketOrder.key(true),
10, 1, Collections.emptyList(), Collections.emptyMap(), DocValueFormat.RAW, 25, false, 10, Collections.emptyList(), 0);

MaxBucketPipelineAggregationBuilder maxBucketPipelineAggregationBuilder = new MaxBucketPipelineAggregationBuilder("test", "test");
SiblingPipelineAggregator siblingPipelineAggregator = (SiblingPipelineAggregator) maxBucketPipelineAggregationBuilder.create();
InternalAggregation.ReduceContext reduceContext = new InternalAggregation.ReduceContext(null, null, true);
final InternalAggregations reducedAggs;
if (randomBoolean()) {
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(terms),
Collections.singletonList(siblingPipelineAggregator));
reducedAggs = InternalAggregations.reduce(Collections.singletonList(aggs), reduceContext);
} else {
InternalAggregations aggs = new InternalAggregations(Collections.singletonList(terms));
List<SiblingPipelineAggregator> topLevelPipelineAggs = Collections.singletonList(siblingPipelineAggregator);
reducedAggs = InternalAggregations.reduce(Collections.singletonList(aggs), topLevelPipelineAggs, reduceContext);
}
assertEquals(0, reducedAggs.getTopLevelPipelineAggregators().size());
assertEquals(2, reducedAggs.aggregations.size());
}

public void testSerialization() throws Exception {
List<InternalAggregation> aggsList = new ArrayList<>();
if (randomBoolean()) {
StringTermsTests stringTermsTests = new StringTermsTests();
stringTermsTests.init();
stringTermsTests.setUp();
aggsList.add(stringTermsTests.createTestInstance());
}
if (randomBoolean()) {
InternalDateHistogramTests dateHistogramTests = new InternalDateHistogramTests();
dateHistogramTests.setUp();
aggsList.add(dateHistogramTests.createTestInstance());
}
if (randomBoolean()) {
InternalSimpleValueTests simpleValueTests = new InternalSimpleValueTests();
aggsList.add(simpleValueTests.createTestInstance());
}
List<SiblingPipelineAggregator> topLevelPipelineAggs = new ArrayList<>();
if (randomBoolean()) {
if (randomBoolean()) {
topLevelPipelineAggs.add((SiblingPipelineAggregator)new MaxBucketPipelineAggregationBuilder("name1", "bucket1").create());
}
if (randomBoolean()) {
topLevelPipelineAggs.add((SiblingPipelineAggregator)new AvgBucketPipelineAggregationBuilder("name2", "bucket2").create());
}
if (randomBoolean()) {
topLevelPipelineAggs.add((SiblingPipelineAggregator)new SumBucketPipelineAggregationBuilder("name3", "bucket3").create());
}
}
InternalAggregations aggregations = new InternalAggregations(aggsList, topLevelPipelineAggs);
writeToAndReadFrom(aggregations, 0);
}

private void writeToAndReadFrom(InternalAggregations aggregations, int iteration) throws IOException {
Version version = VersionUtils.randomVersion(random());
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(version);
aggregations.writeTo(out);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytesRef().bytes), registry)) {
in.setVersion(version);
InternalAggregations deserialized = InternalAggregations.readAggregations(in);
assertEquals(aggregations.aggregations, deserialized.aggregations);
if (aggregations.getTopLevelPipelineAggregators() == null) {
assertEquals(0, deserialized.getTopLevelPipelineAggregators().size());
} else {
if (version.before(Version.V_6_7_0)) {
assertEquals(0, deserialized.getTopLevelPipelineAggregators().size());
} else {
assertEquals(aggregations.getTopLevelPipelineAggregators().size(),
deserialized.getTopLevelPipelineAggregators().size());
for (int i = 0; i < aggregations.getTopLevelPipelineAggregators().size(); i++) {
SiblingPipelineAggregator siblingPipelineAggregator1 = aggregations.getTopLevelPipelineAggregators().get(i);
SiblingPipelineAggregator siblingPipelineAggregator2 = deserialized.getTopLevelPipelineAggregators().get(i);
assertArrayEquals(siblingPipelineAggregator1.bucketsPaths(), siblingPipelineAggregator2.bucketsPaths());
assertEquals(siblingPipelineAggregator1.name(), siblingPipelineAggregator2.name());
}
}
}
if (iteration < 2) {
//serialize this enough times to make sure that we are able to write again what we read
writeToAndReadFrom(deserialized, iteration + 1);
}
}
}
}

public void testSerializationFromPre_6_7_0() throws IOException {
String aggsString = "AwZzdGVybXMFb0F0Q0EKCQVsZG5ncgAFeG56RWcFeUFxVmcABXBhQVVpBUtYc2VIAAVaclRESwVqUkxySAAFelp5d1AFRUREcEYABW1" +
"sckF0BU5wWWVFAAVJYVJmZgVURlJVbgAFT0RiU04FUWNwSVoABU1sb09HBUNzZHFlAAVWWmJHaQABAwGIDgNyYXcFAQAADmRhdGVfaGlzdG9ncmFt" +
"BVhHbVl4/wADAAKAurcDA1VUQwABAQAAAWmOhukAAQAAAWmR9dEAAAAAAAAAAAAAAANyYXcACAAAAWmQrDoAUQAAAAFpkRoXAEMAAAABaZGH9AAtA" +
"AAAAWmR9dEAJwAAAAFpkmOuAFwAAAABaZLRiwAYAAAAAWmTP2gAKgAAAAFpk61FABsADHNpbXBsZV92YWx1ZQVsWVNLVv8AB2RlY2ltYWwGIyMjLi" +
"MjQLZWZVy5zBYAAAAAAAAAAAAAAAAAAAAAAAAA";

byte[] aggsBytes = Base64.getDecoder().decode(aggsString);
try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(aggsBytes), registry)) {
in.setVersion(VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(),
Version.max(Version.CURRENT.minimumCompatibilityVersion(), VersionUtils.getPreviousVersion(Version.CURRENT))));
InternalAggregations deserialized = InternalAggregations.readAggregations(in);
assertEquals(3, deserialized.aggregations.size());
assertThat(deserialized.aggregations.get(0), Matchers.instanceOf(StringTerms.class));
assertThat(deserialized.aggregations.get(1), Matchers.instanceOf(InternalDateHistogram.class));
assertThat(deserialized.aggregations.get(2), Matchers.instanceOf(InternalSimpleValue.class));
}
}
}

0 comments on commit 6356644

Please sign in to comment.