Skip to content

Commit

Permalink
Move QueryGranularity static fields to QueryGranularities (apache#2980)
Browse files Browse the repository at this point in the history
* Move QueryGranularity static fields to QueryGranularityUtil
* Fixes apache#2979

* Add test showing apache#2979

* change name to QueryGranularities
  • Loading branch information
drcrallen authored and fjy committed May 17, 2016
1 parent eaaad01 commit 15ccf45
Show file tree
Hide file tree
Showing 93 changed files with 445 additions and 403 deletions.
Expand Up @@ -22,7 +22,7 @@
import com.google.common.collect.ImmutableMap;
import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
Expand Down Expand Up @@ -122,7 +122,7 @@ private IncrementalIndex makeIncIndex()
{
return new OnheapIncrementalIndex(
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
aggs,
false,
false,
Expand Down
Expand Up @@ -27,7 +27,7 @@
import io.druid.collections.StupidPool;
import io.druid.data.input.MapBasedInputRow;
import io.druid.data.input.Row;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.aggregation.AggregatorFactory;
Expand Down Expand Up @@ -89,7 +89,7 @@ public ByteBuffer get()
);

IncrementalIndex index = new OnheapIncrementalIndex(
0, QueryGranularity.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000
0, QueryGranularities.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000
);
String visitor_id = "visitor_id";
String client_type = "client_type";
Expand Down
Expand Up @@ -23,7 +23,7 @@
import com.google.common.collect.Lists;
import com.metamx.common.guava.Sequences;
import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.Druids;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.Result;
Expand Down Expand Up @@ -51,7 +51,7 @@ public void testTopNWithDistinctCountAgg() throws Exception
TimeseriesQueryEngine engine = new TimeseriesQueryEngine();

IncrementalIndex index = new OnheapIncrementalIndex(
0, QueryGranularity.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000
0, QueryGranularities.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000
);
String visitor_id = "visitor_id";
String client_type = "client_type";
Expand Down
Expand Up @@ -25,7 +25,7 @@
import com.metamx.common.guava.Sequences;
import io.druid.collections.StupidPool;
import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.Result;
import io.druid.query.aggregation.AggregatorFactory;
Expand Down Expand Up @@ -66,7 +66,7 @@ public ByteBuffer get()
);

IncrementalIndex index = new OnheapIncrementalIndex(
0, QueryGranularity.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000
0, QueryGranularities.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000
);
String visitor_id = "visitor_id";
String client_type = "client_type";
Expand Down
Expand Up @@ -30,7 +30,7 @@
import com.yahoo.sketches.theta.Sketches;
import io.druid.data.input.MapBasedRow;
import io.druid.data.input.Row;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.Result;
import io.druid.query.aggregation.AggregationTestHelper;
import io.druid.query.aggregation.AggregatorFactory;
Expand Down Expand Up @@ -73,7 +73,7 @@ public void testSimpleDataIngestAndGpByQuery() throws Exception
readFileFromClasspathAsString("simple_test_data_record_parser.json"),
readFileFromClasspathAsString("simple_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
5,
readFileFromClasspathAsString("simple_test_data_group_by_query.json")
);
Expand Down Expand Up @@ -167,7 +167,7 @@ public void testSimpleDataIngestAndSelectQuery() throws Exception
readFileFromClasspathAsString("simple_test_data_record_parser.json"),
readFileFromClasspathAsString("simple_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
5000,
readFileFromClasspathAsString("select_query.json")
);
Expand All @@ -186,7 +186,7 @@ public void testSketchDataIngestAndGpByQuery() throws Exception
readFileFromClasspathAsString("sketch_test_data_record_parser.json"),
readFileFromClasspathAsString("sketch_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
5,
readFileFromClasspathAsString("sketch_test_data_group_by_query.json")
);
Expand Down Expand Up @@ -227,7 +227,7 @@ public void testThetaCardinalityOnSimpleColumn() throws Exception
+ " }"
+ "]",
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
5,
readFileFromClasspathAsString("simple_test_data_group_by_query.json")
);
Expand Down
Expand Up @@ -25,7 +25,7 @@
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import io.druid.data.input.MapBasedRow;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.aggregation.AggregationTestHelper;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
Expand Down Expand Up @@ -69,7 +69,7 @@ public void testSimpleDataIngestAndQuery() throws Exception
readFileFromClasspathAsString("simple_test_data_record_parser.json"),
readFileFromClasspathAsString("oldapi/old_simple_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
5,
readFileFromClasspathAsString("oldapi/old_simple_test_data_group_by_query.json")
);
Expand Down Expand Up @@ -101,7 +101,7 @@ public void testSketchDataIngestAndQuery() throws Exception
readFileFromClasspathAsString("sketch_test_data_record_parser.json"),
readFileFromClasspathAsString("oldapi/old_sketch_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
5,
readFileFromClasspathAsString("oldapi/old_sketch_test_data_group_by_query.json")
);
Expand Down
Expand Up @@ -23,7 +23,7 @@
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import io.druid.data.input.MapBasedRow;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.aggregation.AggregationTestHelper;
import org.junit.Assert;
import org.junit.Rule;
Expand Down Expand Up @@ -115,7 +115,7 @@ private MapBasedRow ingestAndQuery(boolean ignoreNulls) throws Exception
parseSpec,
metricSpec,
0,
QueryGranularity.NONE,
QueryGranularities.NONE,
50000,
query
);
Expand Down
Expand Up @@ -19,7 +19,6 @@

package io.druid.indexing.kafka;

import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
Expand Down Expand Up @@ -56,7 +55,7 @@
import io.druid.data.input.impl.JSONPathSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.indexing.common.SegmentLoaderFactory;
import io.druid.indexing.common.TaskLock;
import io.druid.indexing.common.TaskStatus;
Expand Down Expand Up @@ -198,7 +197,7 @@ public class KafkaIndexTaskTest
Map.class
),
new AggregatorFactory[]{new CountAggregatorFactory("rows")},
new UniformGranularitySpec(Granularity.DAY, QueryGranularity.NONE, null),
new UniformGranularitySpec(Granularity.DAY, QueryGranularities.NONE, null),
objectMapper
);
}
Expand Down Expand Up @@ -1442,7 +1441,7 @@ public long countEvents(final Task task) throws Exception
ImmutableList.<AggregatorFactory>of(
new LongSumAggregatorFactory("rows", "rows")
)
).granularity(QueryGranularity.ALL)
).granularity(QueryGranularities.ALL)
.intervals("0000/3000")
.build();

Expand Down
Expand Up @@ -35,7 +35,7 @@
import io.druid.data.input.impl.StringDimensionSchema;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.indexing.common.TaskLocation;
import io.druid.indexing.common.TaskStatus;
import io.druid.indexing.common.task.RealtimeIndexTask;
Expand Down Expand Up @@ -1014,7 +1014,7 @@ private DataSchema getDataSchema(String dataSource)
new AggregatorFactory[]{new CountAggregatorFactory("rows")},
new UniformGranularitySpec(
Granularity.HOUR,
QueryGranularity.NONE,
QueryGranularities.NONE,
ImmutableList.<Interval>of()
),
objectMapper
Expand Down
Expand Up @@ -25,6 +25,7 @@
import com.google.common.collect.ImmutableList;
import io.druid.common.utils.JodaUtils;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.filter.DimFilter;
import io.druid.timeline.DataSegment;
import org.joda.time.Interval;
Expand Down Expand Up @@ -76,7 +77,7 @@ public DatasourceIngestionSpec(
this.segments = segments;

this.filter = filter;
this.granularity = granularity == null ? QueryGranularity.NONE : granularity;
this.granularity = granularity == null ? QueryGranularities.NONE : granularity;

this.dimensions = dimensions;
this.metrics = metrics;
Expand Down
Expand Up @@ -34,7 +34,7 @@
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.indexer.hadoop.WindowedDataSegment;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.aggregation.AggregatorFactory;
Expand Down Expand Up @@ -324,7 +324,7 @@ private void testIngestion(
ImmutableList.of("host"),
ImmutableList.of("visited_sum", "unique_hosts"),
null,
QueryGranularity.NONE
QueryGranularities.NONE
);

List<InputRow> rows = Lists.newArrayList();
Expand Down Expand Up @@ -358,7 +358,7 @@ private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig(Map<String, Object
new HyperUniquesAggregatorFactory("unique_hosts", "host2")
},
new UniformGranularitySpec(
Granularity.DAY, QueryGranularity.NONE, ImmutableList.of(INTERVAL_FULL)
Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(INTERVAL_FULL)
),
MAPPER
),
Expand Down
Expand Up @@ -27,7 +27,7 @@
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.indexer.partitions.HashedPartitionsSpec;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
Expand Down Expand Up @@ -133,7 +133,7 @@ public DetermineHashedPartitionsJobTest(String dataFilePath, long targetPartitio
new AggregatorFactory[]{new DoubleSumAggregatorFactory("index", "index")},
new UniformGranularitySpec(
Granularity.DAY,
QueryGranularity.NONE,
QueryGranularities.NONE,
ImmutableList.of(new Interval(interval))
),
HadoopDruidIndexerConfig.JSON_MAPPER
Expand Down
Expand Up @@ -27,7 +27,7 @@
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.indexer.partitions.SingleDimensionPartitionsSpec;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
Expand Down Expand Up @@ -236,7 +236,7 @@ public DeterminePartitionsJobTest(
),
new AggregatorFactory[]{new LongSumAggregatorFactory("visited_num", "visited_num")},
new UniformGranularitySpec(
Granularity.DAY, QueryGranularity.NONE, ImmutableList.of(new Interval(interval))
Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(new Interval(interval))
),
HadoopDruidIndexerConfig.JSON_MAPPER
),
Expand Down
Expand Up @@ -27,7 +27,7 @@
import com.google.common.collect.Lists;
import com.metamx.common.Granularity;
import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.segment.indexing.DataSchema;
Expand Down Expand Up @@ -203,7 +203,7 @@ public void testHashedBucketSelection()
new AggregatorFactory[0],
new UniformGranularitySpec(
Granularity.MINUTE,
QueryGranularity.MINUTE,
QueryGranularities.MINUTE,
ImmutableList.of(new Interval("2010-01-01/P1D"))
),
jsonMapper
Expand Down Expand Up @@ -242,7 +242,7 @@ public void testHashedBucketSelection()
);
final long timestamp = new DateTime("2010-01-01T01:00:01").getMillis();
final Bucket expectedBucket = config.getBucket(new MapBasedInputRow(timestamp, dims, values)).get();
final long nextBucketTimestamp = QueryGranularity.MINUTE.next(QueryGranularity.MINUTE.truncate(timestamp));
final long nextBucketTimestamp = QueryGranularities.MINUTE.next(QueryGranularities.MINUTE.truncate(timestamp));
// check that all rows having same set of dims and truncated timestamp hash to same bucket
for (int i = 0; timestamp + i < nextBucketTimestamp; i++) {
Assert.assertEquals(
Expand Down
Expand Up @@ -29,7 +29,7 @@
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
Expand Down Expand Up @@ -79,7 +79,7 @@ public void setUp() throws Exception
new HyperUniquesAggregatorFactory("unique_hosts", "host")
},
new UniformGranularitySpec(
Granularity.DAY, QueryGranularity.NONE, ImmutableList.of(Interval.parse("2010/2011"))
Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(Interval.parse("2010/2011"))
),
HadoopDruidIndexerConfig.JSON_MAPPER
),
Expand Down
Expand Up @@ -33,7 +33,7 @@
import io.druid.data.input.impl.JSONParseSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
Expand Down Expand Up @@ -478,7 +478,7 @@ public void setUp() throws Exception
),
aggs,
new UniformGranularitySpec(
Granularity.DAY, QueryGranularity.NONE, ImmutableList.of(this.interval)
Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(this.interval)
),
mapper
),
Expand Down
Expand Up @@ -26,7 +26,7 @@
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.granularity.QueryGranularities;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.segment.indexing.DataSchema;
Expand Down Expand Up @@ -81,7 +81,7 @@ public void setup() throws Exception
),
new AggregatorFactory[]{new LongSumAggregatorFactory("visited_num", "visited_num")},
new UniformGranularitySpec(
Granularity.DAY, QueryGranularity.NONE, ImmutableList.of(this.interval)
Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(this.interval)
),
HadoopDruidIndexerConfig.JSON_MAPPER
),
Expand Down

0 comments on commit 15ccf45

Please sign in to comment.