Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,12 @@

package org.elasticsearch.tdigest;

import static org.hamcrest.Matchers.lessThanOrEqualTo;

public abstract class BigCountTests extends TDigestTestCase {

public void testBigMerge() {
try (TDigest digest = createDigest()) {
try (TDigest digest = createDigest(100)) {
for (int i = 0; i < 5; i++) {
try (TDigest digestToMerge = getDigest()) {
digest.add(digestToMerge);
Expand All @@ -35,13 +37,25 @@ public void testBigMerge() {
}
}

/**
* Verify that, at a range of compression values, the size of the produced digest is not much larger than 10 times the compression
*/
public void testCompression() {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is tangential to the main point of this PR, but part of the exploratory work and seems like a reasonable test in general.

for (int compression : new int[] { 100, 500, 1000, 10000 }) {
try (TDigest digest = createDigest(compression)) {
addData(digest);
assertThat("Compression = " + compression, digest.centroidCount(), lessThanOrEqualTo(compression * 10));
}
}
}

private TDigest getDigest() {
TDigest digest = createDigest();
TDigest digest = createDigest(100);
addData(digest);
return digest;
}

public TDigest createDigest() {
public TDigest createDigest(int compression) {
throw new IllegalStateException("Should have over-ridden createDigest");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

public class BigCountTestsMergingDigestTests extends BigCountTests {
@Override
public TDigest createDigest() {
return TDigest.createMergingDigest(arrays(), 100);
public TDigest createDigest(int compression) {
return TDigest.createMergingDigest(arrays(), compression);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

public class BigCountTestsTreeDigestTests extends BigCountTests {
@Override
public TDigest createDigest() {
return TDigest.createAvlTreeDigest(arrays(), 100);
public TDigest createDigest(int compression) {
return TDigest.createAvlTreeDigest(arrays(), compression);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class TDigestState implements Releasable, Accountable {
private final TDigest tdigest;

// Supported tdigest types.
protected enum Type {
public enum Type {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Expanding the visibility here so we can use it as a parameter to the field.

HYBRID,
AVL_TREE,
MERGING,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.elasticsearch.xpack.analytics.cumulativecardinality.CumulativeCardinalityPipelineAggregationBuilder;
import org.elasticsearch.xpack.analytics.cumulativecardinality.InternalSimpleLongValue;
import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper;
import org.elasticsearch.xpack.analytics.mapper.TDigestFieldMapper;
import org.elasticsearch.xpack.analytics.movingPercentiles.MovingPercentilesPipelineAggregationBuilder;
import org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms;
import org.elasticsearch.xpack.analytics.multiterms.MultiTermsAggregationBuilder;
Expand Down Expand Up @@ -140,6 +141,14 @@ public List<Setting<?>> getSettings() {

@Override
public Map<String, Mapper.TypeParser> getMappers() {
if (TDigestFieldMapper.TDIGEST_FIELD_MAPPER.isEnabled()) {
return Map.of(
HistogramFieldMapper.CONTENT_TYPE,
HistogramFieldMapper.PARSER,
TDigestFieldMapper.CONTENT_TYPE,
TDigestFieldMapper.PARSER
);
}
return Map.of(HistogramFieldMapper.CONTENT_TYPE, HistogramFieldMapper.PARSER);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ private record ParsedZeroBucket(long count, double threshold) {

/**
* Parses an XContent object into an exponential histogram.
* The parse is expected to point at the next token after {@link XContentParser.Token#START_OBJECT}.
* The parser is expected to point at the next token after {@link XContentParser.Token#START_OBJECT}.
*
* @param mappedFieldName the name of the field being parsed, used for error messages
* @param parser the parser to use
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public record ParsedHistogram(List<Double> values, List<Long> counts) {}

/**
* Parses an XContent object into a histogram.
* The parse is expected to point at the next token after {@link XContentParser.Token#START_OBJECT}.
* The parser is expected to point at the next token after {@link XContentParser.Token#START_OBJECT}.
*
* @param mappedFieldName the name of the field being parsed, used for error messages
* @param parser the parser to use
Expand Down
Loading