From 72919fbc6203c1e44cd5663303d1f78e91e2eac2 Mon Sep 17 00:00:00 2001 From: Bohdan Kazydub Date: Tue, 22 Jan 2019 20:40:56 +0200 Subject: [PATCH] Addressed review comments --- .../java-exec/src/main/codegen/data/Casts.tdd | 7 +- .../codegen/templates/CastUntypedNull.java | 2 +- .../org/apache/drill/TestJoinNullable.java | 19 ++++++ .../org/apache/drill/TestUntypedNull.java | 33 ++++++++++ .../drill/exec/fn/impl/TestCastFunctions.java | 65 +++++++++++++++++-- .../drill/exec/vector/UntypedNullVector.java | 6 ++ 6 files changed, 120 insertions(+), 12 deletions(-) diff --git a/exec/java-exec/src/main/codegen/data/Casts.tdd b/exec/java-exec/src/main/codegen/data/Casts.tdd index f4df301df24..7652ab79958 100644 --- a/exec/java-exec/src/main/codegen/data/Casts.tdd +++ b/exec/java-exec/src/main/codegen/data/Casts.tdd @@ -169,6 +169,7 @@ {from: "NullableVarBinary", to: "NullableVarDecimal", major: "NullableVarCharDecimalComplex"}, + {from: "UntypedNull", to: "Bit", major: "UntypedNull"}, {from: "UntypedNull", to: "TinyInt", major: "UntypedNull"}, {from: "UntypedNull", to: "Int", major: "UntypedNull"}, {from: "UntypedNull", to: "BigInt", major: "UntypedNull"}, @@ -183,10 +184,6 @@ {from: "UntypedNull", to: "VarBinary", major: "UntypedNull"}, {from: "UntypedNull", to: "VarChar", major: "UntypedNull"}, {from: "UntypedNull", to: "Var16Char", major: "UntypedNull"}, - {from: "UntypedNull", to: "VarDecimal", major: "UntypedNull"}, - {from: "UntypedNull", to: "Decimal9", major: "UntypedNull"}, - {from: "UntypedNull", to: "Decimal18", major: "UntypedNull"}, - {from: "UntypedNull", to: "Decimal28Sparse", major: "UntypedNull"}, - {from: "UntypedNull", to: "Decimal38Sparse", major: "UntypedNull"}, + {from: "UntypedNull", to: "VarDecimal", major: "UntypedNull"} ] } diff --git a/exec/java-exec/src/main/codegen/templates/CastUntypedNull.java b/exec/java-exec/src/main/codegen/templates/CastUntypedNull.java index 66b74e259e4..4a22337b826 100644 --- a/exec/java-exec/src/main/codegen/templates/CastUntypedNull.java +++ b/exec/java-exec/src/main/codegen/templates/CastUntypedNull.java @@ -41,7 +41,7 @@ public class Cast${type.from}${type.to} implements DrillSimpleFunc { @Param ${type.from}Holder in; - <#if type.to.contains("Decimal")> + <#if type.to == "VarDecimal"> @Param IntHolder precision; @Param IntHolder scale; <#elseif type.to == "VarChar" || type.to == "VarBinary" || type.to == "Var16Char"> diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java b/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java index 949acf3f713..13f5dd8e07d 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java +++ b/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java @@ -556,6 +556,25 @@ public void testMixedEqualAndEqualOrMergeJoin() throws Exception { } } + // Full join with USING clause uses COALESCE internally + @Test // DRILL-6962 + public void testFullJoinUsingUntypedNullColumn() throws Exception { + try { + enableJoin(true, true); + String query = "select * from " + + "(select n_nationkey, n_name, coalesce(unk1, unk2) as not_exists from cp.`tpch/nation.parquet`) t1 full join " + + "(select r_name, r_comment, coalesce(unk1, unk2) as not_exists from cp.`tpch/region.parquet`) t2 " + + "using (not_exists)"; + testBuilder() + .sqlQuery(query) + .unOrdered() + .expectsNumRecords(30) + .go(); + } finally { + resetJoinOptions(); + } + } + public void nullMixedComparatorEqualJoinHelper(final String query) throws Exception { testBuilder() .sqlQuery(query) diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java b/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java index 5f6c297bc18..521531c5bd7 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java +++ b/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java @@ -18,7 +18,11 @@ package org.apache.drill; import org.apache.drill.categories.SqlFunctionTest; +import org.apache.drill.common.types.TypeProtos; +import org.apache.drill.common.types.Types; import org.apache.drill.exec.ExecConstants; +import org.apache.drill.exec.record.BatchSchema; +import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.test.ClusterFixture; import org.apache.drill.test.ClusterFixtureBuilder; import org.apache.drill.test.ClusterTest; @@ -36,6 +40,8 @@ @Category(SqlFunctionTest.class) public class TestUntypedNull extends ClusterTest { + private static final TypeProtos.MajorType UNTYPED_NULL_TYPE = Types.optional(TypeProtos.MinorType.NULL); + @BeforeClass public static void setup() throws Exception { ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher); @@ -109,6 +115,15 @@ public void testTypeAndMode() throws Exception { @Test public void testCoalesceOnNotExistentColumns() throws Exception { String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` limit 5"; + BatchSchema expectedSchema = new SchemaBuilder() + .add("coal", UNTYPED_NULL_TYPE) + .build(); + + testBuilder() + .sqlQuery(query) + .schemaBaseLine(expectedSchema) + .go(); + testBuilder() .sqlQuery(query) .unOrdered() @@ -120,6 +135,15 @@ public void testCoalesceOnNotExistentColumns() throws Exception { @Test public void testCoalesceOnNotExistentColumnsWithGroupBy() throws Exception { String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` group by 1"; + BatchSchema expectedSchema = new SchemaBuilder() + .add("coal", UNTYPED_NULL_TYPE) + .build(); + + testBuilder() + .sqlQuery(query) + .schemaBaseLine(expectedSchema) + .go(); + testBuilder() .sqlQuery(query) .unOrdered() @@ -131,6 +155,15 @@ public void testCoalesceOnNotExistentColumnsWithGroupBy() throws Exception { @Test public void testCoalesceOnNotExistentColumnsWithOrderBy() throws Exception { String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` order by 1 limit 5"; + BatchSchema expectedSchema = new SchemaBuilder() + .add("coal", UNTYPED_NULL_TYPE) + .build(); + + testBuilder() + .sqlQuery(query) + .schemaBaseLine(expectedSchema) + .go(); + testBuilder() .sqlQuery(query) .unOrdered() diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java index 1e1c6d90bda..73b4b941ad9 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java @@ -21,6 +21,7 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -30,8 +31,13 @@ import org.apache.drill.categories.UnlikelyTest; import org.apache.drill.common.exceptions.UserRemoteException; import org.apache.drill.common.expression.SchemaPath; +import org.apache.drill.common.types.TypeProtos; +import org.apache.drill.common.types.Types; import org.apache.drill.exec.planner.physical.PlannerSettings; +import org.apache.drill.exec.record.BatchSchema; +import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.record.RecordBatchLoader; +import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.exec.rpc.user.QueryDataBatch; import org.apache.drill.exec.vector.IntervalYearVector; import org.apache.drill.test.ClusterFixture; @@ -46,6 +52,17 @@ import org.apache.drill.shaded.guava.com.google.common.collect.Lists; import org.apache.drill.shaded.guava.com.google.common.collect.Maps; +import static org.apache.drill.common.types.TypeProtos.MinorType.BIGINT; +import static org.apache.drill.common.types.TypeProtos.MinorType.BIT; +import static org.apache.drill.common.types.TypeProtos.MinorType.DATE; +import static org.apache.drill.common.types.TypeProtos.MinorType.FLOAT4; +import static org.apache.drill.common.types.TypeProtos.MinorType.FLOAT8; +import static org.apache.drill.common.types.TypeProtos.MinorType.INT; +import static org.apache.drill.common.types.TypeProtos.MinorType.INTERVALYEAR; +import static org.apache.drill.common.types.TypeProtos.MinorType.TIME; +import static org.apache.drill.common.types.TypeProtos.MinorType.TIMESTAMP; +import static org.apache.drill.common.types.TypeProtos.MinorType.VARCHAR; +import static org.apache.drill.common.types.TypeProtos.MinorType.VARDECIMAL; import static org.apache.drill.exec.ExecTest.mockUtcDateTimeZone; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.hasItem; @@ -752,18 +769,54 @@ public void testCastTimeLiteralInFilter() throws Exception { @Test public void testCastUntypedNull() throws Exception { - String[] types = new String[] { - "BOOLEAN", "INT", "BIGINT", "FLOAT", "DOUBLE", "DATE", "TIME", "TIMESTAMP", "INTERVAL MONTH", - "INTERVAL YEAR", "VARBINARY", "VARCHAR", "DECIMAL(9)", "DECIMAL(18)", "DECIMAL(28)", "DECIMAL(38)" - }; String query = "select cast(coalesce(unk1, unk2) as %s) as coal from cp.`tpch/nation.parquet` limit 1"; - for (String type : types) { + + Map typesMap = createCastTypeMap(); + for (Map.Entry entry : typesMap.entrySet()) { + String q = String.format(query, entry.getKey()); + + MaterializedField field = MaterializedField.create("coal", entry.getValue()); + BatchSchema expectedSchema = new SchemaBuilder() + .add(field) + .build(); + + // Validate schema + testBuilder() + .sqlQuery(q) + .schemaBaseLine(expectedSchema) + .go(); + + // Validate result testBuilder() - .sqlQuery(String.format(query, type)) + .sqlQuery(q) .unOrdered() .baselineColumns("coal") .baselineValues(new Object[] {null}) .go(); } } + + private static Map createCastTypeMap() { + TypeProtos.DataMode mode = TypeProtos.DataMode.OPTIONAL; + Map typesMap = new HashMap<>(); + typesMap.put("BOOLEAN", Types.withMode(BIT, mode)); + typesMap.put("INT", Types.withMode(INT, mode)); + typesMap.put("BIGINT", Types.withMode(BIGINT, mode)); + typesMap.put("FLOAT", Types.withMode(FLOAT4, mode)); + typesMap.put("DOUBLE", Types.withMode(FLOAT8, mode)); + typesMap.put("DATE", Types.withMode(DATE, mode)); + typesMap.put("TIME", Types.withMode(TIME, mode)); + typesMap.put("TIMESTAMP", Types.withMode(TIMESTAMP, mode)); + typesMap.put("INTERVAL MONTH", Types.withMode(INTERVALYEAR, mode)); + typesMap.put("INTERVAL YEAR", Types.withMode(INTERVALYEAR, mode)); + // todo: uncomment after DRILL-6993 is resolved + // typesMap.put("VARBINARY(31)", Types.withPrecision(VARBINARY, mode, 31)); + typesMap.put("VARCHAR(26)", Types.withPrecision(VARCHAR, mode, 26)); + typesMap.put("DECIMAL(9, 2)", Types.withScaleAndPrecision(VARDECIMAL, mode, 2, 9)); + typesMap.put("DECIMAL(18, 5)", Types.withScaleAndPrecision(VARDECIMAL, mode, 5, 18)); + typesMap.put("DECIMAL(28, 3)", Types.withScaleAndPrecision(VARDECIMAL, mode, 3, 28)); + typesMap.put("DECIMAL(38, 2)", Types.withScaleAndPrecision(VARDECIMAL, mode, 2, 38)); + + return typesMap; + } } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/UntypedNullVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/UntypedNullVector.java index 394aa75779d..b83d87201f0 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/UntypedNullVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/UntypedNullVector.java @@ -126,6 +126,7 @@ public TransferPair makeTransferPair(ValueVector to) { public void transferTo(UntypedNullVector target) { target.valueCount = valueCount; + clear(); } public void splitAndTransferTo(int startIndex, int length, UntypedNullVector target) { } @@ -173,6 +174,11 @@ public void copyFromSafe(int fromIndex, int thisIndex, UntypedNullVector from) { public void copyEntry(int toIndex, ValueVector from, int fromIndex) { } + @Override + public void clear() { + valueCount = 0; + } + public final class Accessor extends BaseAccessor { @Override public int getValueCount() {