Skip to content

Commit

Permalink
add debug info
Browse files Browse the repository at this point in the history
Signed-off-by: bowenlan-amzn <bowenlan23@gmail.com>
  • Loading branch information
bowenlan-amzn committed Apr 23, 2024
1 parent ba7c549 commit 50126be
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -99,3 +99,7 @@ setup:
- length: { aggregations.histo.buckets: 2 }
- match: { profile.shards.0.aggregations.0.type: AutoDateHistogramAggregator.FromSingle }
- match: { profile.shards.0.aggregations.0.debug.surviving_buckets: 4 }
- match: { profile.shards.0.aggregations.0.debug.optimized_segments: 1 }
- match: { profile.shards.0.aggregations.0.debug.unoptimized_segments: 0 }
- match: { profile.shards.0.aggregations.0.debug.leaf_visited: 1 }
- match: { profile.shards.0.aggregations.0.debug.inner_visited: 0 }
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.LongUnaryOperator;
import java.util.stream.Collectors;

Expand Down Expand Up @@ -171,7 +172,7 @@ final class CompositeAggregator extends BucketsAggregator {
// bucketOrds is used for saving date histogram results
bucketOrds = LongKeyedBucketOrds.build(context.bigArrays(), CardinalityUpperBound.ONE);
preparedRounding = ((CompositeAggregationType) fastFilterContext.getAggregationType()).getRoundingPrepared();
fastFilterContext.setFieldName(sourceConfigs[0].name());
fastFilterContext.setFieldName(sourceConfigs[0].fieldType().name());
fastFilterContext.buildRanges();
}
}
Expand Down Expand Up @@ -707,4 +708,14 @@ private static class Entry {
this.docIdSet = docIdSet;
}
}

@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
if (fastFilterContext.optimizedSegments > 0) {
add.accept("optimized_segments", fastFilterContext.optimizedSegments);
add.accept("unoptimized_segments", fastFilterContext.segments - fastFilterContext.optimizedSegments);
add.accept("leaf_visited", fastFilterContext.leaf);
add.accept("inner_visited", fastFilterContext.inner);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,17 @@ protected final void merge(long[] mergeMap, long newNumBuckets) {
}
}

@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
super.collectDebugInfo(add);
if (fastFilterContext.optimizedSegments > 0) {
add.accept("optimized_segments", fastFilterContext.optimizedSegments);
add.accept("unoptimized_segments", fastFilterContext.segments - fastFilterContext.optimizedSegments);
add.accept("leaf_visited", fastFilterContext.leaf);
add.accept("inner_visited", fastFilterContext.inner);
}
}

/**
* Initially it uses the most fine grained rounding configuration possible
* but as more data arrives it rebuckets the data until it "fits" in the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,6 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol
)
);
if (optimized) throw new CollectionTerminatedException();
// we will return the debug info for each segment
// or we should just cache it in the fast filter context

SortedNumericDocValues values = valuesSource.longValues(ctx);
return new LeafBucketCollectorBase(sub, values) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,6 @@
import org.opensearch.search.aggregations.AggregatorFactories.Builder;
import org.opensearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer;
import org.opensearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
import org.opensearch.search.aggregations.bucket.terms.TermsAggregator;
import org.opensearch.search.aggregations.metrics.MetricsAggregator;
import org.opensearch.search.aggregations.pipeline.PipelineAggregator;
import org.opensearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
Expand Down

0 comments on commit 50126be

Please sign in to comment.