Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce SQL interface for distinct count extension #13927

30 changes: 30 additions & 0 deletions extensions-contrib/distinctcount/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,17 @@
<artifactId>fastutil-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-sql</artifactId>
<version>${project.parent.version}</version>
<scope>provided</scope>
</dependency>

<!-- Tests -->
<dependency>
Expand All @@ -99,6 +110,25 @@
<artifactId>hamcrest-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-server</artifactId>
<version>${project.parent.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-sql</artifactId>
<version>${project.parent.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>

</project>
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ public void aggregate()
IndexedInts row = selector.getRow();
for (int i = 0, rowSize = row.size(); i < rowSize; i++) {
int index = row.get(i);

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: We can revert this change

mutableBitmap.add(index);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,15 +116,6 @@ public int compare(Object o, Object o1)
@Override
public Object combine(Object lhs, Object rhs)
{
if (lhs == null && rhs == null) {
return 0L;
}
if (rhs == null) {
return ((Number) lhs).longValue();
}
if (lhs == null) {
return ((Number) rhs).longValue();
}
return ((Number) lhs).longValue() + ((Number) rhs).longValue();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This change makes combine no longer work on nulls; was that not needed for some reason?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Reverted

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ public void aggregate(ByteBuffer buf, int position)
int index = row.get(i);
mutableBitmap.add(index);
}

buf.putLong(position, mutableBitmap.size());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import com.google.common.collect.ImmutableList;
import com.google.inject.Binder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.query.aggregation.distinctcount.sql.SegmentDistinctSqlAggregator;
import org.apache.druid.sql.guice.SqlBindings;

import java.util.List;

Expand All @@ -45,5 +47,6 @@ public List<? extends Module> getJacksonModules()
@Override
public void configure(Binder binder)
{
SqlBindings.addAggregator(binder, SegmentDistinctSqlAggregator.class);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.query.aggregation.distinctcount.sql;

import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlAggFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.type.InferTypes;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.Optionality;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.distinctcount.DistinctCountAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.DimensionSpec;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
import org.apache.druid.sql.calcite.aggregation.SqlAggregator;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry;

import javax.annotation.Nullable;
import java.util.List;



public class SegmentDistinctSqlAggregator implements SqlAggregator
{
private static final SqlAggFunction FUNCTION_INSTANCE = new SegmentDistinctAggFunction();
private static final String NAME = "SEGMENT_DISTINCT";

@Override
public SqlAggFunction calciteFunction()
{
return FUNCTION_INSTANCE;
}

@Nullable
@Override
public Aggregation toDruidAggregation(
PlannerContext plannerContext,
RowSignature rowSignature,
VirtualColumnRegistry virtualColumnRegistry,
RexBuilder rexBuilder,
String name,
AggregateCall aggregateCall,
Project project,
List<Aggregation> list,
boolean finalizeAggregations)
{

// Don't use Aggregations.getArgumentsForSimpleAggregator, since it won't let us use direct column access
// for string columns.
final RexNode columnRexNode = Expressions.fromFieldAccess(
rowSignature,
project,
aggregateCall.getArgList().get(0)
);
github-advanced-security[bot] marked this conversation as resolved.
Fixed
Show resolved Hide resolved

final DruidExpression columnArg = Expressions.toDruidExpression(plannerContext, rowSignature, columnRexNode);
if (columnArg == null) {
return null;
}

final AggregatorFactory aggregatorFactory;
final String aggregatorName = finalizeAggregations ? Calcites.makePrefixedName(name, "a") : name;

if (columnArg.isDirectColumnAccess()
&& rowSignature.getColumnType(columnArg.getDirectColumn()).map(type -> type.is(ValueType.COMPLEX)).orElse(false)) {
aggregatorFactory = new DistinctCountAggregatorFactory(name, columnArg.getDirectColumn(), null);
} else {
final RelDataType dataType = columnRexNode.getType();
final ColumnType inputType = Calcites.getColumnTypeForRelDataType(dataType);

if (inputType == null) {
throw new ISE(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You should use org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException instead of ISE. Please refer to the class documentation why the former is preferred.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also, can you please explain why this inputType check is required? If we don't create the dimensionSpec below (as mentioned in another comment of mine), we probably won't run into an error with inputType being null in this code.
Would nullity of inputType cause any issue in the aggregation, and if so can you please update with a comment?

"Cannot translate sqlTypeName[%s] to Druid type for field[%s]",
dataType.getSqlTypeName(),
aggregatorName
);
}

final DimensionSpec dimensionSpec;

if (columnArg.isDirectColumnAccess()) {
dimensionSpec = columnArg.getSimpleExtraction().toDimensionSpec(null, inputType);
} else {
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
columnArg,
dataType
);
dimensionSpec = new DefaultDimensionSpec(virtualColumnName, null, inputType);
}

aggregatorFactory = new DistinctCountAggregatorFactory(name, dimensionSpec.getDimension(), null);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems slightly counter-intuitive that we are creating a dimension spec in the above cases just to get dimensionSpec.getDimension() while creating the final aggregator.

Instead of Line#116, can we do dimensionName = columnArg.getSimpleExtraction().getColumn (since its a direct column access0, and in Line#122 we do dimensionName = virtualColumnName and pass that to the aggregator factory.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for the feedback, will try that out!

}

return Aggregation.create(aggregatorFactory);
}

private static class SegmentDistinctAggFunction extends SqlAggFunction
{
private static final String SIGNATURE = "'" + NAME + "(column, bitMapFactory)'\n";

SegmentDistinctAggFunction()
{
super(
NAME,
null,
SqlKind.OTHER_FUNCTION,
ReturnTypes.explicit(SqlTypeName.BIGINT),
InferTypes.VARCHAR_1024,
OperandTypes.or(
OperandTypes.ANY,
OperandTypes.and(
OperandTypes.sequence(SIGNATURE, OperandTypes.ANY, OperandTypes.LITERAL),
OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.STRING)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see the LITERAL STRING argument being used in the function body. Is that intentional?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We had a look back at some other classes that extend SqlAggFunction, particularly ApproxCountDistinctSqlAggFunction, and noticed that doesn't take the bitmap factory argument. So we decided to simplify SEGMENT_DISTINCT in the same way. Is that OK?

)
),
SqlFunctionCategory.NUMERIC,
false,
false,
Optionality.IGNORED
);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.query.aggregation.distinctcount.sql;

import com.google.common.collect.ImmutableList;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.Druids;
import org.apache.druid.query.aggregation.FilteredAggregatorFactory;
import org.apache.druid.query.aggregation.distinctcount.DistinctCountAggregatorFactory;
import org.apache.druid.query.aggregation.distinctcount.DistinctCountDruidModule;
import org.apache.druid.query.aggregation.distinctcount.RoaringBitMapFactory;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.junit.Test;

public class SegmentDistinctSqlAggregatorTest extends BaseCalciteQueryTest
{


@Override
public void configureGuice(DruidInjectorBuilder builder)
{
super.configureGuice(builder);
builder.add(new DistinctCountDruidModule());
}

@Test
public void testDistinctCount() throws Exception
{
cannotVectorize();

testQuery(
"SELECT\n"
+ "SEGMENT_DISTINCT(dim1),"
+ " SEGMENT_DISTINCT(dim2) FILTER(WHERE dim2 <> ''),\n" // filtered
+ " SEGMENT_DISTINCT(SUBSTRING(dim2, 1, 1))" // on extractionFn
+ "FROM foo",
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
.virtualColumns(
new ExpressionVirtualColumn(
"v0",
"substring(\"dim2\", 0, 1)",
ColumnType.STRING,
TestExprMacroTable.INSTANCE
)
)
.intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity())))
.granularity(Granularities.ALL)
.aggregators(ImmutableList.of(
new DistinctCountAggregatorFactory(
"a0",
"dim1",
new RoaringBitMapFactory()
),
new FilteredAggregatorFactory(
new DistinctCountAggregatorFactory(
"a1",
"dim2",
new RoaringBitMapFactory()
), BaseCalciteQueryTest.not(BaseCalciteQueryTest.selector("dim2", "", null))),
new DistinctCountAggregatorFactory(
"a2",
"v0",
new RoaringBitMapFactory()
)
)
)
.context(QUERY_CONTEXT_DEFAULT)
.build()),
ImmutableList.of(new Long[]{6L, 2L, 3L})
);
}
}