Skip to content

Commit

Permalink
Cleanup dead code in o.e.s.aggregations (#101806)
Browse files Browse the repository at this point in the history
Just a few mostly automated obvious dead code removals.
  • Loading branch information
original-brownbear committed Nov 4, 2023
1 parent b620c5c commit 4dff9cd
Show file tree
Hide file tree
Showing 69 changed files with 11 additions and 653 deletions.
Expand Up @@ -130,10 +130,6 @@ public void collectDebugInfo(BiConsumer<String, Object> add) {
add.accept("delegate_debug", delegateDebug);
}

public Aggregator delegate() {
return delegate;
}

@Override
public String toString() {
return name();
Expand Down
Expand Up @@ -75,7 +75,6 @@
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.ValueCount;
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder;

import java.util.List;

Expand All @@ -100,13 +99,6 @@ public static AvgAggregationBuilder avg(String name) {
return new AvgAggregationBuilder(name);
}

/**
* Create a new {@link Avg} aggregation with the given name.
*/
public static WeightedAvgAggregationBuilder weightedAvg(String name) {
return new WeightedAvgAggregationBuilder(name);
}

/**
* Create a new {@link Max} aggregation with the given name.
*/
Expand Down
Expand Up @@ -64,8 +64,6 @@ public String name() {
return name;
}

public void doValidate() {}

protected abstract Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException;

Expand Down
Expand Up @@ -112,11 +112,6 @@ public Collection<AggregationBuilder> getSiblingAggregations() {
return siblingAggregations;
}

@Override
public Collection<PipelineAggregationBuilder> getSiblingPipelineAggregations() {
return siblingPipelineAggregations;
}

@Override
public void validateHasParent(String type, String name) {
addValidationError(type + " aggregation [" + name + "] must be declared inside of another aggregation");
Expand Down Expand Up @@ -155,11 +150,6 @@ public Collection<AggregationBuilder> getSiblingAggregations() {
return parent.getSubAggregations();
}

@Override
public Collection<PipelineAggregationBuilder> getSiblingPipelineAggregations() {
return parent.getPipelineAggregations();
}

@Override
public void validateHasParent(String type, String name) {
// There is a parent inside the tree.
Expand All @@ -181,11 +171,6 @@ public void validateParentAggSequentiallyOrderedWithoutSkips(String type, String
*/
public abstract Collection<AggregationBuilder> getSiblingAggregations();

/**
* Pipeline aggregations that are siblings to the aggregation being validated.
*/
public abstract Collection<PipelineAggregationBuilder> getSiblingPipelineAggregations();

/**
* Add a validation error to this context. All validation errors
* are accumulated in a list and, if there are any, the request
Expand Down
Expand Up @@ -11,7 +11,6 @@
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.CumulativeSumPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregationBuilder;
Expand Down Expand Up @@ -62,10 +61,6 @@ public static BucketScriptPipelineAggregationBuilder bucketScript(String name, S
return new BucketScriptPipelineAggregationBuilder(name, script, bucketsPaths);
}

public static CumulativeSumPipelineAggregationBuilder cumulativeSum(String name, String bucketsPath) {
return new CumulativeSumPipelineAggregationBuilder(name, bucketsPath);
}

public static SerialDiffPipelineAggregationBuilder diff(String name, String bucketsPath) {
return new SerialDiffPipelineAggregationBuilder(name, bucketsPath);
}
Expand Down
Expand Up @@ -21,10 +21,6 @@ public IteratorAndCurrent(Iterator<B> iterator) {
this.current = iterator.next();
}

public Iterator<B> getIterator() {
return iterator;
}

public B current() {
return current;
}
Expand Down
Expand Up @@ -48,7 +48,7 @@ class BinaryValuesSource extends SingleDimensionValuesSource<BytesRef> {
int size,
int reverseMul
) {
super(bigArrays, format, fieldType, missingBucket, missingOrder, size, reverseMul);
super(bigArrays, format, fieldType, missingBucket, missingOrder, reverseMul);
this.breakerConsumer = breakerConsumer;
this.docValuesFunc = docValuesFunc;
this.values = bigArrays.newObjectArray(Math.min(size, 100));
Expand Down
Expand Up @@ -212,13 +212,6 @@ public ZoneId timeZone() {
return timeZone;
}

/**
* Get the offset to use when rounding, which is a number of milliseconds.
*/
public long offset() {
return offset;
}

/**
* Set the offset on this builder, which is a number of milliseconds.
* @return this for chaining
Expand Down
Expand Up @@ -43,7 +43,7 @@ class DoubleValuesSource extends SingleDimensionValuesSource<Double> {
int size,
int reverseMul
) {
super(bigArrays, format, fieldType, missingBucket, missingOrder, size, reverseMul);
super(bigArrays, format, fieldType, missingBucket, missingOrder, reverseMul);
this.docValuesFunc = docValuesFunc;
this.bits = this.missingBucket ? new BitArray(100, bigArrays) : null;
boolean success = false;
Expand Down
Expand Up @@ -76,7 +76,7 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource<BytesRef> {
int size,
int reverseMul
) {
super(bigArrays, format, type, missingBucket, missingOrder, size, reverseMul);
super(bigArrays, format, type, missingBucket, missingOrder, reverseMul);
this.uniqueValueCount = uniqueValueCount;
this.docValuesFunc = docValuesFunc;
this.values = bigArrays.newLongArray(Math.min(size, 100), false);
Expand Down
Expand Up @@ -147,13 +147,6 @@ public String type() {
return TYPE;
}

/**
* Returns the interval that is set on this source
**/
public double interval() {
return interval;
}

/**
* Sets the interval on this source.
**/
Expand Down
Expand Up @@ -59,7 +59,7 @@ class LongValuesSource extends SingleDimensionValuesSource<Long> {
int size,
int reverseMul
) {
super(bigArrays, format, fieldType, missingBucket, missingOrder, size, reverseMul);
super(bigArrays, format, fieldType, missingBucket, missingOrder, reverseMul);
this.bigArrays = bigArrays;
this.docValuesFunc = docValuesFunc;
this.rounding = rounding;
Expand Down
Expand Up @@ -31,8 +31,6 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
protected final MappedFieldType fieldType;
protected final boolean missingBucket;
protected final MissingOrder missingOrder;

protected final int size;
protected final int reverseMul;

protected T afterValue;
Expand All @@ -45,7 +43,6 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
* @param fieldType The field type or null if the source is a script.
* @param missingBucket If true, an explicit `null bucket represents documents with missing values.
* @param missingOrder How to order missing buckets if missingBucket is <code>true</code>.
* @param size The number of values to record.
* @param reverseMul -1 if the natural order ({@link SortOrder#ASC} should be reversed.
*/
SingleDimensionValuesSource(
Expand All @@ -54,15 +51,13 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
@Nullable MappedFieldType fieldType,
boolean missingBucket,
MissingOrder missingOrder,
int size,
int reverseMul
) {
this.bigArrays = bigArrays;
this.format = format;
this.fieldType = fieldType;
this.missingBucket = missingBucket;
this.missingOrder = missingOrder;
this.size = size;
this.reverseMul = reverseMul;
this.afterValue = null;
}
Expand Down
Expand Up @@ -198,13 +198,6 @@ public FiltersAggregationBuilder keyedBucket(boolean keyedBucket) {
return this;
}

/**
* Get whether to return keyed bucket in array
*/
public boolean keyedBucket() {
return keyedBucket;
}

@Override
public BucketCardinality bucketCardinality() {
return BucketCardinality.MANY;
Expand Down
Expand Up @@ -151,10 +151,6 @@ public GeoGridAggregationBuilder size(int size) {
return this;
}

public int size() {
return requiredSize;
}

public GeoGridAggregationBuilder shardSize(int shardSize) {
if (shardSize <= 0) {
throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]");
Expand All @@ -163,10 +159,6 @@ public GeoGridAggregationBuilder shardSize(int shardSize) {
return this;
}

public int shardSize() {
return shardSize;
}

public GeoGridAggregationBuilder setGeoBoundingBox(GeoBoundingBox geoBoundingBox) {
this.geoBoundingBox = geoBoundingBox;
// no validation done here, similar to geo_bounding_box query behavior.
Expand Down
Expand Up @@ -290,11 +290,6 @@ public DateHistogramAggregationBuilder extendedBounds(LongBounds extendedBounds)
return this;
}

/** Return hard bounds for this histogram, or {@code null} if none are set. */
public LongBounds hardBounds() {
return hardBounds;
}

/** Set hard bounds on this histogram, specifying boundaries outside which buckets cannot be created. */
public DateHistogramAggregationBuilder hardBounds(LongBounds hardBounds) {
if (hardBounds == null) {
Expand Down
Expand Up @@ -117,10 +117,6 @@ public DateHistogramAggregatorFactory(
this.rounding = rounding;
}

public long minDocCount() {
return minDocCount;
}

@Override
protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException {
Expand Down
Expand Up @@ -26,7 +26,6 @@

import java.io.IOException;
import java.time.ZoneId;
import java.util.Locale;
import java.util.Objects;

import static org.elasticsearch.core.RestApiVersion.equalTo;
Expand Down Expand Up @@ -59,10 +58,6 @@ public enum IntervalTypeEnum implements Writeable {
@Deprecated
LEGACY_DATE_HISTO(null);

public static IntervalTypeEnum fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}

public static IntervalTypeEnum fromStream(StreamInput in) throws IOException {
return in.readEnum(IntervalTypeEnum.class);
}
Expand All @@ -72,10 +67,6 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(this);
}

public String value() {
return name().toLowerCase(Locale.ROOT);
}

public boolean isValid() {
// I'm being a little cheeky here and just reusing the name for signaling invlaid choices too
return this.preferredName != null;
Expand Down
Expand Up @@ -216,10 +216,6 @@ public double maxBound() {
return DoubleBounds.getEffectiveMax(extendedBounds);
}

protected DoubleBounds extendedBounds() {
return extendedBounds;
}

/**
* Set extended bounds on this builder: buckets between {@code minBound} and
* {@code maxBound} will be created even if no documents fell into these
Expand Down
Expand Up @@ -84,10 +84,6 @@ public HistogramAggregatorFactory(
this.hardBounds = hardBounds;
}

public long minDocCount() {
return minDocCount;
}

@Override
protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException {
Expand Down
Expand Up @@ -269,10 +269,6 @@ public List<InternalDateHistogram.Bucket> getBuckets() {
return Collections.unmodifiableList(buckets);
}

DocValueFormat getFormatter() {
return format;
}

long getMinDocCount() {
return minDocCount;
}
Expand Down
Expand Up @@ -187,10 +187,6 @@ public int compareKey(InternalVariableWidthHistogram.Bucket other) {
return Double.compare(centroid, other.centroid); // Use centroid for bucket ordering
}

public DocValueFormat getFormatter() {
return format;
}

Bucket finalizeSampling(SamplingContext samplingContext) {
return new Bucket(
centroid,
Expand Down Expand Up @@ -282,10 +278,6 @@ public List<Bucket> getBuckets() {
return Collections.unmodifiableList(buckets);
}

DocValueFormat getFormatter() {
return format;
}

public int getTargetBuckets() {
return targetNumBuckets;
}
Expand Down Expand Up @@ -525,7 +517,7 @@ private void mergeBucketsWithSameMin(List<Bucket> buckets, AggregationReduceCont
*
* After this adjustment, A will contain more values than indicated and B will have less.
*/
private static void adjustBoundsForOverlappingBuckets(List<Bucket> buckets, AggregationReduceContext reduceContext) {
private static void adjustBoundsForOverlappingBuckets(List<Bucket> buckets) {
for (int i = 1; i < buckets.size(); i++) {
Bucket curBucket = buckets.get(i);
Bucket prevBucket = buckets.get(i - 1);
Expand All @@ -545,7 +537,7 @@ public InternalAggregation reduce(List<InternalAggregation> aggregations, Aggreg
if (reduceContext.isFinalReduce()) {
buckets.sort(Comparator.comparing(Bucket::min));
mergeBucketsWithSameMin(reducedBuckets, reduceContext);
adjustBoundsForOverlappingBuckets(reducedBuckets, reduceContext);
adjustBoundsForOverlappingBuckets(reducedBuckets);
}
return new InternalVariableWidthHistogram(getName(), reducedBuckets, emptyBucketInfo, targetNumBuckets, format, metadata);
}
Expand Down

0 comments on commit 4dff9cd

Please sign in to comment.