Skip to content

Commit

Permalink
review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
colings86 committed Nov 10, 2015
1 parent 95e8d66 commit 4a4be91
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 21 deletions.
Expand Up @@ -32,6 +32,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
Expand Down Expand Up @@ -147,7 +148,7 @@ public Builder addPipelineAggregator(PipelineAggregatorFactory pipelineAggregato
/**
* FOR TESTING ONLY
*/
public Builder skipResolveOrder() {
Builder skipResolveOrder() {
this.skipResolveOrder = true;
return this;
}
Expand Down Expand Up @@ -276,7 +277,8 @@ public AggregatorFactories readFrom(StreamInput in) throws IOException {
PipelineAggregatorFactory factory = in.readPipelineAggregatorFactory();
pipelineAggregatorFactoriesList.add(factory);
}
AggregatorFactories aggregatorFactories = new AggregatorFactories(factoriesList, pipelineAggregatorFactoriesList);
AggregatorFactories aggregatorFactories = new AggregatorFactories(factoriesList,
Collections.unmodifiableList(pipelineAggregatorFactoriesList));
return aggregatorFactories;
}

Expand Down
Expand Up @@ -71,6 +71,13 @@ public final void init(AggregationContext context) {
this.factories.init(context);
}

/**
* Allows the {@link AggregatorFactory} to initialize any state prior to
* using it to create {@link Aggregator}s.
*
* @param context
* the {@link AggregationContext} to use during initialization.
*/
protected void doInit(AggregationContext context) {
}

Expand Down
Expand Up @@ -38,7 +38,6 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter.DateTime;
import org.joda.time.DateTimeZone;

import java.io.IOException;
Expand Down Expand Up @@ -182,13 +181,16 @@ protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggre

@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, aggregationContext, parent);
}
// we need to round the bounds given by the user and we have to do it for every aggregator we crate
// we need to round the bounds given by the user and we have to do
// it for every aggregator we crate
// as the rounding is not necessarily an idempotent operation.
// todo we need to think of a better structure to the factory/agtor code so we won't need to do that
// todo we need to think of a better structure to the factory/agtor
// code so we won't need to do that
ExtendedBounds roundedBounds = null;
if (extendedBounds != null) {
// we need to process & validate here using the parser
Expand Down
Expand Up @@ -155,6 +155,12 @@ public SignificantTermsAggregatorFactory(String name, ValuesSourceParser.Input v
this.filter = filter;
}

@Override
public void doInit(AggregationContext context) {
super.doInit(context);
setFieldInfo();
}

private void setFieldInfo() {
if (!config.unmapped()) {
this.indexedFieldName = config.fieldContext().field();
Expand All @@ -166,7 +172,6 @@ private void setFieldInfo() {
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException {
setFieldInfo();
final InternalAggregation aggregation = new UnmappedSignificantTerms(name, bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(), pipelineAggregators, metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, pipelineAggregators, metaData) {
Expand All @@ -181,7 +186,6 @@ public InternalAggregation buildEmptyAggregation() {
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
setFieldInfo();
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, aggregationContext, parent);
}
Expand Down
Expand Up @@ -50,16 +50,16 @@ public AbstractPercentilesParser(boolean formattable) {

@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {

ValuesSourceParser<ValuesSource.Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalTDigestPercentiles.TYPE, context)
.formattable(formattable).build();

double[] keys = null;
boolean keyed = true;
Double compression = null;
Integer numberOfSignificantValueDigits = null;
PercentilesMethod method = null;

XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
Expand Down Expand Up @@ -103,17 +103,17 @@ public AggregatorFactory parse(String aggregationName, XContentParser parser, Se
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (context.parseFieldMatcher().match(currentFieldName, COMPRESSION_FIELD)) {
compression = parser.doubleValue();
} else {
compression = parser.doubleValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName
+ "]: [" + currentFieldName + "].", parser.getTokenLocation());
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
}
break;
case HDR:
Expand All @@ -123,14 +123,14 @@ public AggregatorFactory parse(String aggregationName, XContentParser parser, Se
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (context.parseFieldMatcher().match(currentFieldName, NUMBER_SIGNIFICANT_DIGITS_FIELD)) {
numberOfSignificantValueDigits = parser.intValue();
} else {
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName
+ "]: [" + currentFieldName + "].", parser.getTokenLocation());
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
}
break;
}
Expand Down

0 comments on commit 4a4be91

Please sign in to comment.