Skip to content

Commit

Permalink
Addressed review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
KazydubB committed Jan 23, 2019
1 parent 3b10351 commit 72919fb
Show file tree
Hide file tree
Showing 6 changed files with 120 additions and 12 deletions.
7 changes: 2 additions & 5 deletions exec/java-exec/src/main/codegen/data/Casts.tdd
Expand Up @@ -169,6 +169,7 @@

{from: "NullableVarBinary", to: "NullableVarDecimal", major: "NullableVarCharDecimalComplex"},

{from: "UntypedNull", to: "Bit", major: "UntypedNull"},
{from: "UntypedNull", to: "TinyInt", major: "UntypedNull"},
{from: "UntypedNull", to: "Int", major: "UntypedNull"},
{from: "UntypedNull", to: "BigInt", major: "UntypedNull"},
Expand All @@ -183,10 +184,6 @@
{from: "UntypedNull", to: "VarBinary", major: "UntypedNull"},
{from: "UntypedNull", to: "VarChar", major: "UntypedNull"},
{from: "UntypedNull", to: "Var16Char", major: "UntypedNull"},
{from: "UntypedNull", to: "VarDecimal", major: "UntypedNull"},
{from: "UntypedNull", to: "Decimal9", major: "UntypedNull"},
{from: "UntypedNull", to: "Decimal18", major: "UntypedNull"},
{from: "UntypedNull", to: "Decimal28Sparse", major: "UntypedNull"},
{from: "UntypedNull", to: "Decimal38Sparse", major: "UntypedNull"},
{from: "UntypedNull", to: "VarDecimal", major: "UntypedNull"}
]
}
Expand Up @@ -41,7 +41,7 @@
public class Cast${type.from}${type.to} implements DrillSimpleFunc {

@Param ${type.from}Holder in;
<#if type.to.contains("Decimal")>
<#if type.to == "VarDecimal">
@Param IntHolder precision;
@Param IntHolder scale;
<#elseif type.to == "VarChar" || type.to == "VarBinary" || type.to == "Var16Char">
Expand Down
Expand Up @@ -556,6 +556,25 @@ public void testMixedEqualAndEqualOrMergeJoin() throws Exception {
}
}

// Full join with USING clause uses COALESCE internally
@Test // DRILL-6962
public void testFullJoinUsingUntypedNullColumn() throws Exception {
try {
enableJoin(true, true);
String query = "select * from " +
"(select n_nationkey, n_name, coalesce(unk1, unk2) as not_exists from cp.`tpch/nation.parquet`) t1 full join " +
"(select r_name, r_comment, coalesce(unk1, unk2) as not_exists from cp.`tpch/region.parquet`) t2 " +
"using (not_exists)";
testBuilder()
.sqlQuery(query)
.unOrdered()
.expectsNumRecords(30)
.go();
} finally {
resetJoinOptions();
}
}

public void nullMixedComparatorEqualJoinHelper(final String query) throws Exception {
testBuilder()
.sqlQuery(query)
Expand Down
33 changes: 33 additions & 0 deletions exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java
Expand Up @@ -18,7 +18,11 @@
package org.apache.drill;

import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.ClusterFixtureBuilder;
import org.apache.drill.test.ClusterTest;
Expand All @@ -36,6 +40,8 @@
@Category(SqlFunctionTest.class)
public class TestUntypedNull extends ClusterTest {

private static final TypeProtos.MajorType UNTYPED_NULL_TYPE = Types.optional(TypeProtos.MinorType.NULL);

@BeforeClass
public static void setup() throws Exception {
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
Expand Down Expand Up @@ -109,6 +115,15 @@ public void testTypeAndMode() throws Exception {
@Test
public void testCoalesceOnNotExistentColumns() throws Exception {
String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` limit 5";
BatchSchema expectedSchema = new SchemaBuilder()
.add("coal", UNTYPED_NULL_TYPE)
.build();

testBuilder()
.sqlQuery(query)
.schemaBaseLine(expectedSchema)
.go();

testBuilder()
.sqlQuery(query)
.unOrdered()
Expand All @@ -120,6 +135,15 @@ public void testCoalesceOnNotExistentColumns() throws Exception {
@Test
public void testCoalesceOnNotExistentColumnsWithGroupBy() throws Exception {
String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` group by 1";
BatchSchema expectedSchema = new SchemaBuilder()
.add("coal", UNTYPED_NULL_TYPE)
.build();

testBuilder()
.sqlQuery(query)
.schemaBaseLine(expectedSchema)
.go();

testBuilder()
.sqlQuery(query)
.unOrdered()
Expand All @@ -131,6 +155,15 @@ public void testCoalesceOnNotExistentColumnsWithGroupBy() throws Exception {
@Test
public void testCoalesceOnNotExistentColumnsWithOrderBy() throws Exception {
String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` order by 1 limit 5";
BatchSchema expectedSchema = new SchemaBuilder()
.add("coal", UNTYPED_NULL_TYPE)
.build();

testBuilder()
.sqlQuery(query)
.schemaBaseLine(expectedSchema)
.go();

testBuilder()
.sqlQuery(query)
.unOrdered()
Expand Down
Expand Up @@ -21,6 +21,7 @@
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
Expand All @@ -30,8 +31,13 @@
import org.apache.drill.categories.UnlikelyTest;
import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.vector.IntervalYearVector;
import org.apache.drill.test.ClusterFixture;
Expand All @@ -46,6 +52,17 @@
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.drill.shaded.guava.com.google.common.collect.Maps;

import static org.apache.drill.common.types.TypeProtos.MinorType.BIGINT;
import static org.apache.drill.common.types.TypeProtos.MinorType.BIT;
import static org.apache.drill.common.types.TypeProtos.MinorType.DATE;
import static org.apache.drill.common.types.TypeProtos.MinorType.FLOAT4;
import static org.apache.drill.common.types.TypeProtos.MinorType.FLOAT8;
import static org.apache.drill.common.types.TypeProtos.MinorType.INT;
import static org.apache.drill.common.types.TypeProtos.MinorType.INTERVALYEAR;
import static org.apache.drill.common.types.TypeProtos.MinorType.TIME;
import static org.apache.drill.common.types.TypeProtos.MinorType.TIMESTAMP;
import static org.apache.drill.common.types.TypeProtos.MinorType.VARCHAR;
import static org.apache.drill.common.types.TypeProtos.MinorType.VARDECIMAL;
import static org.apache.drill.exec.ExecTest.mockUtcDateTimeZone;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.hasItem;
Expand Down Expand Up @@ -752,18 +769,54 @@ public void testCastTimeLiteralInFilter() throws Exception {

@Test
public void testCastUntypedNull() throws Exception {
String[] types = new String[] {
"BOOLEAN", "INT", "BIGINT", "FLOAT", "DOUBLE", "DATE", "TIME", "TIMESTAMP", "INTERVAL MONTH",
"INTERVAL YEAR", "VARBINARY", "VARCHAR", "DECIMAL(9)", "DECIMAL(18)", "DECIMAL(28)", "DECIMAL(38)"
};
String query = "select cast(coalesce(unk1, unk2) as %s) as coal from cp.`tpch/nation.parquet` limit 1";
for (String type : types) {

Map<String, TypeProtos.MajorType> typesMap = createCastTypeMap();
for (Map.Entry<String, TypeProtos.MajorType> entry : typesMap.entrySet()) {
String q = String.format(query, entry.getKey());

MaterializedField field = MaterializedField.create("coal", entry.getValue());
BatchSchema expectedSchema = new SchemaBuilder()
.add(field)
.build();

// Validate schema
testBuilder()
.sqlQuery(q)
.schemaBaseLine(expectedSchema)
.go();

// Validate result
testBuilder()
.sqlQuery(String.format(query, type))
.sqlQuery(q)
.unOrdered()
.baselineColumns("coal")
.baselineValues(new Object[] {null})
.go();
}
}

private static Map<String, TypeProtos.MajorType> createCastTypeMap() {
TypeProtos.DataMode mode = TypeProtos.DataMode.OPTIONAL;
Map<String, TypeProtos.MajorType> typesMap = new HashMap<>();
typesMap.put("BOOLEAN", Types.withMode(BIT, mode));
typesMap.put("INT", Types.withMode(INT, mode));
typesMap.put("BIGINT", Types.withMode(BIGINT, mode));
typesMap.put("FLOAT", Types.withMode(FLOAT4, mode));
typesMap.put("DOUBLE", Types.withMode(FLOAT8, mode));
typesMap.put("DATE", Types.withMode(DATE, mode));
typesMap.put("TIME", Types.withMode(TIME, mode));
typesMap.put("TIMESTAMP", Types.withMode(TIMESTAMP, mode));
typesMap.put("INTERVAL MONTH", Types.withMode(INTERVALYEAR, mode));
typesMap.put("INTERVAL YEAR", Types.withMode(INTERVALYEAR, mode));
// todo: uncomment after DRILL-6993 is resolved
// typesMap.put("VARBINARY(31)", Types.withPrecision(VARBINARY, mode, 31));
typesMap.put("VARCHAR(26)", Types.withPrecision(VARCHAR, mode, 26));
typesMap.put("DECIMAL(9, 2)", Types.withScaleAndPrecision(VARDECIMAL, mode, 2, 9));
typesMap.put("DECIMAL(18, 5)", Types.withScaleAndPrecision(VARDECIMAL, mode, 5, 18));
typesMap.put("DECIMAL(28, 3)", Types.withScaleAndPrecision(VARDECIMAL, mode, 3, 28));
typesMap.put("DECIMAL(38, 2)", Types.withScaleAndPrecision(VARDECIMAL, mode, 2, 38));

return typesMap;
}
}
Expand Up @@ -126,6 +126,7 @@ public TransferPair makeTransferPair(ValueVector to) {

public void transferTo(UntypedNullVector target) {
target.valueCount = valueCount;
clear();
}

public void splitAndTransferTo(int startIndex, int length, UntypedNullVector target) { }
Expand Down Expand Up @@ -173,6 +174,11 @@ public void copyFromSafe(int fromIndex, int thisIndex, UntypedNullVector from) {
public void copyEntry(int toIndex, ValueVector from, int fromIndex) {
}

@Override
public void clear() {
valueCount = 0;
}

public final class Accessor extends BaseAccessor {
@Override
public int getValueCount() {
Expand Down

0 comments on commit 72919fb

Please sign in to comment.