From e3642de28a5f03c702433fe581819acced7847a7 Mon Sep 17 00:00:00 2001 From: spanchamia Date: Wed, 29 Jul 2015 16:59:31 -0700 Subject: [PATCH 1/4] DRILL-3364: Prune scan range if the filter is on the leading field with byte comparable encoding The change adds support to perform row-key range pruning when the row-key prefix is interpretted as UINT4_BE, TIMESTAMP_EPOCH_BE, TIME_EPOCH_BE, DATE_EPOCH_BE, UINT8_BE encoded. Testing Done: Added a unit-tests for the new feature, also ran all existing unit-tests to make sure there is no regression. --- .../hbase/CompareFunctionsProcessor.java | 335 +++++++++++++++--- .../exec/store/hbase/HBaseFilterBuilder.java | 21 ++ .../apache/drill/hbase/HBaseTestsSuite.java | 17 +- .../drill/hbase/TestHBaseFilterPushDown.java | 167 +++++++++ .../drill/hbase/TestTableGenerator.java | 125 +++++++ .../conv/TimeStampEpochBEConvertFrom.java | 45 +++ .../impl/conv/TimeStampEpochBEConvertTo.java | 54 +++ .../impl/conv/TimeStampEpochConvertFrom.java | 47 +++ .../fn/impl/conv/TimeStampEpochConvertTo.java | 55 +++ .../expr/fn/impl/conv/UInt4BEConvertFrom.java | 45 +++ .../expr/fn/impl/conv/UInt4BEConvertTo.java | 54 +++ .../expr/fn/impl/conv/UInt4ConvertFrom.java | 46 +++ .../expr/fn/impl/conv/UInt4ConvertTo.java | 55 +++ 13 files changed, 1007 insertions(+), 59 deletions(-) create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertFrom.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertTo.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertFrom.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertTo.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertFrom.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertTo.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertFrom.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertTo.java diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java index 803f5209458..87eb42ea996 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java @@ -20,6 +20,7 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; +import java.nio.ByteBuffer; import java.nio.ByteOrder; import org.apache.drill.common.expression.CastExpression; @@ -35,7 +36,11 @@ import org.apache.drill.common.expression.ValueExpressions.LongExpression; import org.apache.drill.common.expression.ValueExpressions.QuotedString; import org.apache.drill.common.expression.ValueExpressions.TimeExpression; +import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression; import org.apache.drill.common.expression.visitors.AbstractExprVisitor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.PrefixFilter; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; @@ -48,6 +53,14 @@ class CompareFunctionsProcessor extends AbstractExprVisitor= 2 ? call.args.get(1) : null; + LogicalExpression valueArg2 = call.args.size() >= 3 ? call.args.get(2) : null; + + if (((nameArg instanceof SchemaPath) == false) || + (valueArg1 == null) || ((valueArg1 instanceof IntExpression) == false) || + (valueArg2 == null) || ((valueArg2 instanceof IntExpression) == false)) { + return false; + } + + boolean isRowKey = ((SchemaPath)nameArg).getAsUnescapedPath().equals(DrillHBaseConstants.ROW_KEY); + int offset = ((IntExpression)valueArg1).getInt(); + + if (!isRowKey || (offset != 1)) { + return false; + } + + this.path = (SchemaPath)nameArg; + prefixLength = ((IntExpression)valueArg2).getInt(); + this.isRowKeyPrefixComparison = true; + return visitRowKeyPrefixConvertExpression(e, prefixLength, valueArg); + } + + if (e.getInput() instanceof SchemaPath) { + ByteBuf bb = null; + switch (encodingType) { + case "INT_BE": + case "INT": + case "UINT_BE": + case "UINT": + case "UINT4_BE": + case "UINT4": + if (valueArg instanceof IntExpression + && (isEqualityFn || encodingType.startsWith("U"))) { + bb = newByteBuf(4, encodingType.endsWith("_BE")); + bb.writeInt(((IntExpression)valueArg).getInt()); + } + break; + case "BIGINT_BE": + case "BIGINT": + case "UINT8_BE": + case "UINT8": + if (valueArg instanceof LongExpression + && (isEqualityFn || encodingType.startsWith("U"))) { + bb = newByteBuf(8, encodingType.endsWith("_BE")); + bb.writeLong(((LongExpression)valueArg).getLong()); + } + break; + case "FLOAT": + if (valueArg instanceof FloatExpression && isEqualityFn) { bb = newByteBuf(4, true); - bb.writeFloat(((FloatExpression)valueArg).getFloat()); + bb.writeFloat(((FloatExpression)valueArg).getFloat()); + } + break; + case "DOUBLE": + if (valueArg instanceof DoubleExpression && isEqualityFn) { + bb = newByteBuf(8, true); + bb.writeDouble(((DoubleExpression)valueArg).getDouble()); + } + break; + case "TIME_EPOCH": + case "TIME_EPOCH_BE": + if (valueArg instanceof TimeExpression) { + bb = newByteBuf(8, encodingType.endsWith("_BE")); + bb.writeLong(((TimeExpression)valueArg).getTime()); + } + break; + case "DATE_EPOCH": + case "DATE_EPOCH_BE": + if (valueArg instanceof DateExpression) { + bb = newByteBuf(8, encodingType.endsWith("_BE")); + bb.writeLong(((DateExpression)valueArg).getDate()); + } + break; + case "BOOLEAN_BYTE": + if (valueArg instanceof BooleanExpression) { + bb = newByteBuf(1, false /* does not matter */); + bb.writeByte(((BooleanExpression)valueArg).getBoolean() ? 1 : 0); + } + break; + case "UTF8": + // let visitSchemaPath() handle this. + return e.getInput().accept(this, valueArg); } - break; - case "DOUBLE": - if (valueArg instanceof DoubleExpression && isEqualityFn) { - bb = newByteBuf(8, true); - bb.writeDouble(((DoubleExpression)valueArg).getDouble()); + + if (bb != null) { + this.value = bb.array(); + this.path = (SchemaPath)e.getInput(); + return true; } - break; - case "TIME_EPOCH": - case "TIME_EPOCH_BE": - if (valueArg instanceof TimeExpression) { - bb = newByteBuf(8, encodingType.endsWith("_BE")); - bb.writeLong(((TimeExpression)valueArg).getTime()); + } + } + return false; + } + + private Boolean visitRowKeyPrefixConvertExpression(ConvertExpression e, + int prefixLength, LogicalExpression valueArg) { + String encodingType = e.getEncodingType(); + rowKeyPrefixStartRow = HConstants.EMPTY_START_ROW; + rowKeyPrefixStopRow = HConstants.EMPTY_START_ROW; + rowKeyPrefixFilter = null; + + if ((encodingType.compareTo("UINT4_BE") == 0) || + (encodingType.compareTo("UINT_BE") == 0)) { + if (prefixLength != 4) { + throw new RuntimeException("Invalid length(" + prefixLength + ") of row-key prefix"); + } + + int val; + if ((valueArg instanceof IntExpression) == false) { + return false; + } + + val = ((IntExpression)valueArg).getInt(); + + // For TIME_EPOCH_BE/BIGINT_BE encoding, the operators that we push-down are =, <>, <, <=, >, >= + switch (functionName) { + case "equal": + rowKeyPrefixFilter = new PrefixFilter(ByteBuffer.allocate(4).putInt(val).array()); + rowKeyPrefixStartRow = ByteBuffer.allocate(4).putInt(val).array(); + rowKeyPrefixStopRow = ByteBuffer.allocate(4).putInt(val + 1).array(); + return true; + case "greater_than_or_equal_to": + rowKeyPrefixStartRow = ByteBuffer.allocate(4).putInt(val).array(); + return true; + case "greater_than": + rowKeyPrefixStartRow = ByteBuffer.allocate(4).putInt(val + 1).array(); + return true; + case "less_than_or_equal_to": + rowKeyPrefixStopRow = ByteBuffer.allocate(4).putInt(val + 1).array(); + return true; + case "less_than": + rowKeyPrefixStopRow = ByteBuffer.allocate(4).putInt(val).array(); + return true; + } + + return false; + } + + if ((encodingType.compareTo("TIMESTAMP_EPOCH_BE") == 0) || + (encodingType.compareTo("TIME_EPOCH_BE") == 0) || + (encodingType.compareTo("UINT8_BE") == 0)) { + + if (prefixLength != 8) { + throw new RuntimeException("Invalid length(" + prefixLength + ") of row-key prefix"); + } + + long val; + if (encodingType.compareTo("TIME_EPOCH_BE") == 0) { + if ((valueArg instanceof TimeExpression) == false) { + return false; } - break; - case "DATE_EPOCH": - case "DATE_EPOCH_BE": - if (valueArg instanceof DateExpression) { - bb = newByteBuf(8, encodingType.endsWith("_BE")); - bb.writeLong(((DateExpression)valueArg).getDate()); + + val = ((TimeExpression)valueArg).getTime(); + } else if (encodingType.compareTo("UINT8_BE") == 0){ + if ((valueArg instanceof LongExpression) == false) { + return false; } - break; - case "BOOLEAN_BYTE": - if (valueArg instanceof BooleanExpression) { - bb = newByteBuf(1, false /* does not matter */); - bb.writeByte(((BooleanExpression)valueArg).getBoolean() ? 1 : 0); + + val = ((LongExpression)valueArg).getLong(); + } else if (encodingType.compareTo("TIMESTAMP_EPOCH_BE") == 0) { + if ((valueArg instanceof TimeStampExpression) == false) { + return false; } - break; - case "UTF8": - // let visitSchemaPath() handle this. - return e.getInput().accept(this, valueArg); + + val = ((TimeStampExpression)valueArg).getTimeStamp(); + } else { + // Should not reach here. + return false; } - if (bb != null) { - this.value = bb.array(); - this.path = (SchemaPath)e.getInput(); + // For TIME_EPOCH_BE/BIGINT_BE encoding, the operators that we push-down are =, <>, <, <=, >, >= + switch (functionName) { + case "equal": + rowKeyPrefixFilter = new PrefixFilter(ByteBuffer.allocate(8).putLong(val).array()); + rowKeyPrefixStartRow = ByteBuffer.allocate(8).putLong(val).array(); + rowKeyPrefixStopRow = ByteBuffer.allocate(8).putLong(val + 1).array(); + return true; + case "greater_than_or_equal_to": + rowKeyPrefixStartRow = ByteBuffer.allocate(8).putLong(val).array(); + return true; + case "greater_than": + rowKeyPrefixStartRow = ByteBuffer.allocate(8).putLong(val + 1).array(); + return true; + case "less_than_or_equal_to": + rowKeyPrefixStopRow = ByteBuffer.allocate(8).putLong(val + 1).array(); + return true; + case "less_than": + rowKeyPrefixStopRow = ByteBuffer.allocate(8).putLong(val).array(); return true; } + + return false; } - return false; + + if (encodingType.compareTo("DATE_EPOCH_BE") == 0) { + if ((valueArg instanceof DateExpression) == false) { + return false; + } + + if (prefixLength != 8) { + throw new RuntimeException("Invalid length(" + prefixLength + ") of row-key prefix"); + } + + final long MILLISECONDS_IN_A_DAY = (long)1000 * 60 * 60 * 24; + long dateToSet; + // For DATE encoding, the operators that we push-down are =, <>, <, <=, >, >= + switch (functionName) { + case "equal": + long startDate = ((DateExpression)valueArg).getDate(); + rowKeyPrefixStartRow = ByteBuffer.allocate(8).putLong(startDate).array(); + long stopDate = ((DateExpression)valueArg).getDate() + MILLISECONDS_IN_A_DAY; + rowKeyPrefixStopRow = ByteBuffer.allocate(8).putLong(stopDate).array(); + return true; + case "greater_than_or_equal_to": + dateToSet = ((DateExpression)valueArg).getDate(); + rowKeyPrefixStartRow = ByteBuffer.allocate(8).putLong(dateToSet).array(); + return true; + case "greater_than": + dateToSet = ((DateExpression)valueArg).getDate() + MILLISECONDS_IN_A_DAY; + rowKeyPrefixStartRow = ByteBuffer.allocate(8).putLong(dateToSet).array(); + return true; + case "less_than_or_equal_to": + dateToSet = ((DateExpression)valueArg).getDate() + MILLISECONDS_IN_A_DAY; + rowKeyPrefixStopRow = ByteBuffer.allocate(8).putLong(dateToSet).array(); + return true; + case "less_than": + dateToSet = ((DateExpression)valueArg).getDate(); + rowKeyPrefixStopRow = ByteBuffer.allocate(8).putLong(dateToSet).array(); + return true; + } + + return false; } - @Override + return false; +} + +@Override public Boolean visitUnknown(LogicalExpression e, LogicalExpression valueArg) throws RuntimeException { return false; } @@ -237,4 +456,4 @@ private static ByteBuf newByteBuf(int size, boolean bigEndian) { .build(); } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java index b10415d5e76..f77ab1996d2 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.NullComparator; +import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.RegexStringComparator; import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; @@ -167,6 +168,10 @@ private HBaseScanSpec createHBaseScanSpec(FunctionCall call, CompareFunctionsPro return null; } + if (processor.isRowKeyPrefixComparison()) { + return createRowKeyPrefixScanSpec(call, processor); + } + CompareOp compareOp = null; boolean isNullTest = false; ByteArrayComparable comparator = new BinaryComparator(fieldValue); @@ -294,4 +299,20 @@ private HBaseScanSpec createHBaseScanSpec(FunctionCall call, CompareFunctionsPro return null; } +private HBaseScanSpec createRowKeyPrefixScanSpec(FunctionCall call, + CompareFunctionsProcessor processor) { + byte[] startRow = processor.getRowKeyPrefixStartRow(); + byte[] stopRow = processor.getRowKeyPrefixStopRow(); + Filter filter = processor.getRowKeyPrefixFilter(); + + if (startRow != HConstants.EMPTY_START_ROW || + stopRow != HConstants.EMPTY_END_ROW || + filter != null) { + return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter); + } + + // else + return null; +} + } diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java index a77baba489a..a5dbc6fc83f 100644 --- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java @@ -50,6 +50,9 @@ public class HBaseTestsSuite { protected static final String TEST_TABLE_1 = "TestTable1"; protected static final String TEST_TABLE_3 = "TestTable3"; + protected static final String TEST_TABLE_COMPOSITE_DATE = "TestTableCompositeDate"; + protected static final String TEST_TABLE_COMPOSITE_TIME = "TestTableCompositeTime"; + protected static final String TEST_TABLE_COMPOSITE_INT = "TestTableCompositeInt"; private static Configuration conf; @@ -135,7 +138,10 @@ public static HBaseTestingUtility getHBaseTestingUtility() { } private static boolean tablesExist() throws IOException { - return admin.tableExists(TEST_TABLE_1) && admin.tableExists(TEST_TABLE_3); + return admin.tableExists(TEST_TABLE_1) && admin.tableExists(TEST_TABLE_3) && + admin.tableExists(TEST_TABLE_COMPOSITE_DATE) && + admin.tableExists(TEST_TABLE_COMPOSITE_TIME) && + admin.tableExists(TEST_TABLE_COMPOSITE_INT); } private static void createTestTables() throws Exception { @@ -146,6 +152,9 @@ private static void createTestTables() throws Exception { */ TestTableGenerator.generateHBaseDataset1(admin, TEST_TABLE_1, 1); TestTableGenerator.generateHBaseDataset3(admin, TEST_TABLE_3, 1); + TestTableGenerator.generateHBaseDatasetCompositeKeyDate(admin, TEST_TABLE_COMPOSITE_DATE, 1); + TestTableGenerator.generateHBaseDatasetCompositeKeyTime(admin, TEST_TABLE_COMPOSITE_TIME, 1); + TestTableGenerator.generateHBaseDatasetCompositeKeyInt(admin, TEST_TABLE_COMPOSITE_INT, 1); } private static void cleanupTestTables() throws IOException { @@ -153,6 +162,12 @@ private static void cleanupTestTables() throws IOException { admin.deleteTable(TEST_TABLE_1); admin.disableTable(TEST_TABLE_3); admin.deleteTable(TEST_TABLE_3); + admin.disableTable(TEST_TABLE_COMPOSITE_DATE); + admin.deleteTable(TEST_TABLE_COMPOSITE_DATE); + admin.disableTable(TEST_TABLE_COMPOSITE_TIME); + admin.deleteTable(TEST_TABLE_COMPOSITE_TIME); + admin.disableTable(TEST_TABLE_COMPOSITE_INT); + admin.deleteTable(TEST_TABLE_COMPOSITE_INT); } public static int getZookeeperPort() { diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java index 7abcb294436..47f278646d1 100644 --- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java @@ -58,6 +58,173 @@ public void testFilterPushDownRowKeyEqualWithItem() throws Exception { PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan); } + + @Test + public void testFilterPushDownCompositeDateRowKey1() throws Exception { + setColumnWidths(new int[] {11, 22, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeDate` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') < DATE '2015-06-18' AND\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') > DATE '2015-06-13'" + , 12); + } + + @Test + public void testFilterPushDownCompositeDateRowKey2() throws Exception { + setColumnWidths(new int[] {11, 22, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeDate` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') = DATE '2015-08-22'" + , 3); + } + + @Test + public void testFilterPushDownCompositeDateRowKey3() throws Exception { + setColumnWidths(new int[] {11, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeDate` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') < DATE '2015-06-18' AND\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') > DATE '2015-06-13'" + , 1); + } + + @Test + public void testFilterPushDownCompositeDateRowKey4() throws Exception { + setColumnWidths(new int[] {30, 22, 30, 10}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'timestamp_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') t\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeDate` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'timestamp_epoch_be') >= TIMESTAMP '2015-06-18 08:00:00.000' AND\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'timestamp_epoch_be') < TIMESTAMP '2015-06-20 16:00:00.000'" + , 7); + } + + @Test + public void testFilterPushDownCompositeTimeRowKey1() throws Exception { + setColumnWidths(new int[] {50, 40, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeTime` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') = TIME '23:57:15.275'"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n" + , 1); + } + + @Test + public void testFilterPushDownCompositeTimeRowKey2() throws Exception { + setColumnWidths(new int[] {30, 2002, 32}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeTime` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') = TIME '23:55:51.250'"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n" + , 1); + } + + @Test + public void testFilterPushDownCompositeTimeRowKey3() throws Exception { + setColumnWidths(new int[] {30, 22, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeTime` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') > TIME '23:57:06' AND"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') < TIME '23:59:59'" + , 8); + } + + @Test + public void testFilterPushDownCompositeBigIntRowKey1() throws Exception { + setColumnWidths(new int[] {15, 40, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'bigint_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeDate` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'bigint_be') = cast(1409040000000 as bigint)"//convert_from(binary_string('\\x00\\x00\\x00\\x00\\x55\\x4D\\xBE\\x80'), 'BIGINT_BE') \n" + , 1); + } + + @Test + public void testFilterPushDownCompositeBigIntRowKey2() throws Exception { + setColumnWidths(new int[] {16, 22, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'bigint_be') i\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'date_epoch_be') d\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'time_epoch_be') t\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 9, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeDate` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'uint8_be') > cast(1438300800000 as bigint) AND\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'uint8_be') < cast(1438617600000 as bigint)" + , 10); + } + + @Test + public void testFilterPushDownCompositeIntRowKey1() throws Exception { + setColumnWidths(new int[] {16, 22, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') i\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 5, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeInt` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') >= cast(423 as int) AND" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') < cast(940 as int)" + , 11); + } + + @Test + public void testFilterPushDownCompositeIntRowKey2() throws Exception { + setColumnWidths(new int[] {16, 2002, 32}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') i\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 5, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeInt` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') >= cast(300 as int) AND" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') < cast(900 as int)" + , 1); + } + + @Test + public void testFilterPushDownCompositeIntRowKey3() throws Exception { + setColumnWidths(new int[] {16, 22, 32}); + runHBaseSQLVerifyCount("SELECT \n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') i\n" + + ", CONVERT_FROM(BYTE_SUBSTR(row_key, 5, 8), 'bigint_be') id\n" + + ", CONVERT_FROM(tableName.f.c, 'UTF8') \n" + + " FROM hbase.`TestTableCompositeInt` tableName\n" + + " WHERE\n" + + " CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 4), 'uint4_be') = cast(658 as int)" + , 1); + } + @Test public void testFilterPushDownRowKeyLike() throws Exception { setColumnWidths(new int[] {8, 22}); diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java index 097947cabbb..122a323e92c 100644 --- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java @@ -17,7 +17,9 @@ */ package org.apache.drill.hbase; +import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Date; import java.util.Random; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -197,4 +199,127 @@ public static void generateHBaseDataset3(HBaseAdmin admin, String tableName, int admin.flush(tableName); } + public static void generateHBaseDatasetCompositeKeyDate(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + Date startDate = new Date(1408924800000L); + long startTime = startDate.getTime(); + long MILLISECONDS_IN_A_DAY = (long)1000 * 60 * 60 * 24; + long MILLISECONDS_IN_A_YEAR = MILLISECONDS_IN_A_DAY * 365; + long endTime = startTime + MILLISECONDS_IN_A_YEAR; + long interval = MILLISECONDS_IN_A_DAY / 3; + + for (long ts = startTime, counter = 0; ts < endTime; ts += interval, counter ++) { + byte[] rowKey = ByteBuffer.allocate(16) .putLong(ts).array(); + + for(int i = 0; i < 8; ++i) { + rowKey[8 + i] = (byte)(counter >> (56 - (i * 8))); + } + + Put p = new Put(rowKey); + p.add(FAMILY_F, COLUMN_C, "dummy".getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + } + + public static void generateHBaseDatasetCompositeKeyTime(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + long startTime = 0; + long MILLISECONDS_IN_A_SEC = (long)1000; + long MILLISECONDS_IN_A_DAY = MILLISECONDS_IN_A_SEC * 60 * 60 * 24; + long endTime = startTime + MILLISECONDS_IN_A_DAY; + long smallInterval = 25; + long largeInterval = MILLISECONDS_IN_A_SEC * 42; + long interval = smallInterval; + + for (long ts = startTime, counter = 0; ts < endTime; ts += interval, counter ++) { + byte[] rowKey = ByteBuffer.allocate(16) .putLong(ts).array(); + + for(int i = 0; i < 8; ++i) { + rowKey[8 + i] = (byte)(counter >> (56 - (i * 8))); + } + + Put p = new Put(rowKey); + p.add(FAMILY_F, COLUMN_C, "dummy".getBytes()); + table.put(p); + + if (interval == smallInterval) { + interval = largeInterval; + } else { + interval = smallInterval; + } + } + + table.flushCommits(); + table.close(); + } + + public static void generateHBaseDatasetCompositeKeyInt(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + int startVal = 0; + int stopVal = 1000; + int interval = 47; + long counter = 0; + for (int i = startVal; i < stopVal; i += interval, counter ++) { + byte[] rowKey = ByteBuffer.allocate(12).putInt(i).array(); + + for(int j = 0; j < 8; ++j) { + rowKey[4 + j] = (byte)(counter >> (56 - (j * 8))); + } + + Put p = new Put(rowKey); + p.add(FAMILY_F, COLUMN_C, "dummy".getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertFrom.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertFrom.java new file mode 100644 index 00000000000..eec715992cd --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertFrom.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.TimeStampHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_fromTIMESTAMP_EPOCH_BE", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class TimeStampEpochBEConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output TimeStampHolder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 8); + + in.buffer.readerIndex(in.start); + out.value = Long.reverseBytes(in.buffer.readLong()); + } +} diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertTo.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertTo.java new file mode 100644 index 00000000000..504cb455ef7 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochBEConvertTo.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.TimeStampHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toTIMESTAMP_EPOCH_BE", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class TimeStampEpochBEConvertTo implements DrillSimpleFunc { + + @Param TimeStampHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(8); + } + + @Override + public void eval() { + buffer.clear(); + buffer.writeLong(Long.reverseBytes(in.value)); + out.buffer = buffer; + out.start = 0; + out.end = 8; + } +} diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertFrom.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertFrom.java new file mode 100644 index 00000000000..e68d3016dda --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertFrom.java @@ -0,0 +1,47 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.TimeHolder; +import org.apache.drill.exec.expr.holders.TimeStampHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_fromTIMESTAMP_EPOCH", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class TimeStampEpochConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output TimeStampHolder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 8); + + in.buffer.readerIndex(in.start); + out.value = in.buffer.readLong(); + } +} diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertTo.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertTo.java new file mode 100644 index 00000000000..40224874b9f --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/TimeStampEpochConvertTo.java @@ -0,0 +1,55 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.TimeStampHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toTIMESTAMP_EPOCH", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class TimeStampEpochConvertTo implements DrillSimpleFunc { + + @Param TimeStampHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(8); + } + + @Override + public void eval() { + buffer.clear(); + buffer.writeLong(in.value); + out.buffer = buffer; + out.start = 0; + out.end = 8; + } +} \ No newline at end of file diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertFrom.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertFrom.java new file mode 100644 index 00000000000..dd2c29ccc88 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertFrom.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.UInt4Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_fromUINT4_BE", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class UInt4BEConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output UInt4Holder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 4); + + in.buffer.readerIndex(in.start); + out.value = Integer.reverseBytes(in.buffer.readInt()); + } +} \ No newline at end of file diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertTo.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertTo.java new file mode 100644 index 00000000000..302f18cf276 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4BEConvertTo.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.UInt4Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toUINT4_BE", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class UInt4BEConvertTo implements DrillSimpleFunc { + + @Param UInt4Holder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(4); + } + + @Override + public void eval() { + buffer.clear(); + buffer.writeInt(Integer.reverseBytes(in.value)); + out.buffer = buffer; + out.start = 0; + out.end = 4; + } +} diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertFrom.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertFrom.java new file mode 100644 index 00000000000..fba2b97525c --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertFrom.java @@ -0,0 +1,46 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.UInt4Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_fromUINT4", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class UInt4ConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output UInt4Holder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 4); + + in.buffer.readerIndex(in.start); + out.value = in.buffer.readInt(); + } +} \ No newline at end of file diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertTo.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertTo.java new file mode 100644 index 00000000000..a362bd8994e --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/UInt4ConvertTo.java @@ -0,0 +1,55 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.UInt4Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toUINT4", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class UInt4ConvertTo implements DrillSimpleFunc { + + @Param UInt4Holder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(4); + } + + @Override + public void eval() { + buffer.clear(); + buffer.writeInt(in.value); + out.buffer = buffer; + out.start = 0; + out.end = 4; + } +} \ No newline at end of file From 70e1f3b2ca0410748b9872535bb205651e86d6c9 Mon Sep 17 00:00:00 2001 From: spanchamia Date: Wed, 29 Jul 2015 22:53:04 -0700 Subject: [PATCH 2/4] DRILL-3492: Add support for encoding/decoding of to/from OrderedBytes format Description: This change allows encoding/decoding of data from/to 'double', 'float', 'bigint', 'int' and 'utf8' data types to/from OrderedBytes format. It also allows for OrderedByte encoded row-keys to be stored in ascending as well as descending order. The following JIRA added the OrderedBytes encoding to HBase: https://issues.apache.org/jira/browse/HBASE-8201 This encoding scheme will preserve the sort-order of the native data-type when it is stored as sorted byte arrays on disk. Thus, it will help the HBase storage plugin if the row-keys have been encoded in OrderedBytes format. This functionality allows us to prune the scan ranges, thus reading much lesser data from the server. Testing Done: Added a new unit-test class TestOrderedBytesConvertFunctions.java which derives from TestConvertFunctions.java class. Also add new test cases to TestHBaseFilterPushDown class that will test if we were able to push-down filters correctly and if the results are correct. --- contrib/storage-hbase/pom.xml | 31 ++ .../conv/OrderedBytesBigIntConvertFrom.java | 49 +++ .../conv/OrderedBytesBigIntConvertTo.java | 61 +++ .../conv/OrderedBytesBigIntDescConvertTo.java | 61 +++ .../conv/OrderedBytesDoubleConvertFrom.java | 49 +++ .../conv/OrderedBytesDoubleConvertTo.java | 61 +++ .../conv/OrderedBytesDoubleDescConvertTo.java | 61 +++ .../conv/OrderedBytesFloatConvertFrom.java | 49 +++ .../impl/conv/OrderedBytesFloatConvertTo.java | 61 +++ .../conv/OrderedBytesFloatDescConvertTo.java | 61 +++ .../impl/conv/OrderedBytesIntConvertFrom.java | 49 +++ .../impl/conv/OrderedBytesIntConvertTo.java | 61 +++ .../conv/OrderedBytesIntDescConvertTo.java | 61 +++ .../conv/OrderedBytesUTF8ConvertFrom.java | 58 +++ .../impl/conv/OrderedBytesUTF8ConvertTo.java | 64 ++++ .../conv/OrderedBytesUTF8DescConvertTo.java | 64 ++++ .../hbase/CompareFunctionsProcessor.java | 70 ++++ .../exec/store/hbase/HBaseFilterBuilder.java | 59 ++- .../apache/drill/hbase/HBaseTestsSuite.java | 58 ++- .../drill/hbase/TestHBaseFilterPushDown.java | 284 ++++++++++++++ .../TestOrderedBytesConvertFunctions.java | 150 ++++++++ .../drill/hbase/TestTableGenerator.java | 361 ++++++++++++++++++ 22 files changed, 1865 insertions(+), 18 deletions(-) create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java create mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java create mode 100644 contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java diff --git a/contrib/storage-hbase/pom.xml b/contrib/storage-hbase/pom.xml index d02777185be..254035c97fc 100644 --- a/contrib/storage-hbase/pom.xml +++ b/contrib/storage-hbase/pom.xml @@ -93,6 +93,37 @@ + + maven-resources-plugin + + + copy-java-sources + process-sources + + copy-resources + + + ${basedir}/target/classes/org/apache/drill/exec/expr/fn/impl + + + src/main/java/org/apache/drill/exec/expr/fn/impl + true + + + src/test/java + true + + + target/generated-sources + + true + + + + + + + diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java new file mode 100644 index 00000000000..c6248213bcf --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java @@ -0,0 +1,49 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.BigIntHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(names = {"convert_fromBIGINT_OB", "convert_fromBIGINT_OBD"}, + scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesBigIntConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output BigIntHolder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 9); + byte[] bytes = new byte[9]; + in.buffer.getBytes(in.start, bytes, 0, 9); + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeInt64(br); + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java new file mode 100644 index 00000000000..8677ce5ae75 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.BigIntHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toBIGINT_OB", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesBigIntConvertTo implements DrillSimpleFunc { + + @Param BigIntHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(9); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, in.value, + org.apache.hadoop.hbase.util.Order.ASCENDING); + + buffer.setBytes(0, bytes, 0, 9); + out.buffer = buffer; + out.start = 0; + out.end = 9; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java new file mode 100644 index 00000000000..91b1e659a2c --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.BigIntHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toBIGINT_OBD", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesBigIntDescConvertTo implements DrillSimpleFunc { + + @Param BigIntHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(9); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, in.value, + org.apache.hadoop.hbase.util.Order.DESCENDING); + + buffer.setBytes(0, bytes, 0, 9); + out.buffer = buffer; + out.start = 0; + out.end = 9; + } +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java new file mode 100644 index 00000000000..5b848ba8371 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java @@ -0,0 +1,49 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.Float8Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(names = {"convert_fromDOUBLE_OB", "convert_fromDOUBLE_OBD"}, + scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesDoubleConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output Float8Holder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 9); + byte[] bytes = new byte[9]; + in.buffer.getBytes(in.start, bytes, 0, 9); + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeFloat64(br); + } +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java new file mode 100644 index 00000000000..4f4ce1ecba1 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.Float8Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toDOUBLE_OB", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesDoubleConvertTo implements DrillSimpleFunc { + + @Param Float8Holder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(9); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, in.value, + org.apache.hadoop.hbase.util.Order.ASCENDING); + + buffer.setBytes(0, bytes, 0, 9); + out.buffer = buffer; + out.start = 0; + out.end = 9; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java new file mode 100644 index 00000000000..a4f2dfce415 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.Float8Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toDOUBLE_OBD", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesDoubleDescConvertTo implements DrillSimpleFunc { + + @Param Float8Holder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(9); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, in.value, + org.apache.hadoop.hbase.util.Order.DESCENDING); + + buffer.setBytes(0, bytes, 0, 9); + out.buffer = buffer; + out.start = 0; + out.end = 9; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java new file mode 100644 index 00000000000..74b83f8e6fb --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java @@ -0,0 +1,49 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.Float4Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(names = {"convert_fromFLOAT_OB", "convert_fromFLOAT_OBD"}, + scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesFloatConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output Float4Holder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 5); + byte[] bytes = new byte[5]; + in.buffer.getBytes(in.start, bytes, 0, 5); + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeFloat32(br); + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java new file mode 100644 index 00000000000..9d58f6ab9cc --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.Float4Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toFLOAT_OB", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesFloatConvertTo implements DrillSimpleFunc { + + @Param Float4Holder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(5); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, in.value, + org.apache.hadoop.hbase.util.Order.ASCENDING); + + buffer.setBytes(0, bytes, 0, 5); + out.buffer = buffer; + out.start = 0; + out.end = 5; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java new file mode 100644 index 00000000000..d6c83c54809 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.Float4Holder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toFLOAT_OBD", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesFloatDescConvertTo implements DrillSimpleFunc { + + @Param Float4Holder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(5); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, in.value, + org.apache.hadoop.hbase.util.Order.DESCENDING); + + buffer.setBytes(0, bytes, 0, 5); + out.buffer = buffer; + out.start = 0; + out.end = 5; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java new file mode 100644 index 00000000000..964112fc056 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java @@ -0,0 +1,49 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.IntHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(names = {"convert_fromINT_OB", "convert_fromINT_OBD"}, + scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesIntConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output IntHolder out; + + @Override + public void setup() { } + + @Override + public void eval() { + org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 5); + byte[] bytes = new byte[5]; + in.buffer.getBytes(in.start, bytes, 0, 5); + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeInt32(br); + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java new file mode 100644 index 00000000000..f37487cee73 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.IntHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toINT_OB", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesIntConvertTo implements DrillSimpleFunc { + + @Param IntHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(5); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, in.value, + org.apache.hadoop.hbase.util.Order.ASCENDING); + + buffer.setBytes(0, bytes, 0, 5); + out.buffer = buffer; + out.start = 0; + out.end = 5; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java new file mode 100644 index 00000000000..34558365277 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java @@ -0,0 +1,61 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import io.netty.buffer.DrillBuf; + +import javax.inject.Inject; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.IntHolder; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; + +@FunctionTemplate(name = "convert_toINT_OBD", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesIntDescConvertTo implements DrillSimpleFunc { + + @Param IntHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(5); + } + + @Override + public void eval() { + buffer.clear(); + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, in.value, + org.apache.hadoop.hbase.util.Order.DESCENDING); + + buffer.setBytes(0, bytes, 0, 5); + out.buffer = buffer; + out.start = 0; + out.end = 5; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java new file mode 100644 index 00000000000..006af40dd90 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java @@ -0,0 +1,58 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import javax.inject.Inject; + +import io.netty.buffer.DrillBuf; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; +import org.apache.drill.exec.expr.holders.VarCharHolder; + +@FunctionTemplate(names = {"convert_fromUTF8_OB", "convert_fromUTF8_OBD"}, + scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesUTF8ConvertFrom implements DrillSimpleFunc { + + @Param VarBinaryHolder in; + @Output VarCharHolder out; + @Inject DrillBuf buffer; + + @Override + public void setup() { } + + @Override + public void eval() { + buffer = buffer.reallocIfNeeded(in.end - in.start - 2); + byte[] bytes = new byte[in.end - in.start]; + in.buffer.getBytes(in.start, bytes, 0, in.end - in.start); + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + String str = org.apache.hadoop.hbase.util.OrderedBytes.decodeString(br); + buffer.setBytes(0, str.getBytes(), 0, str.length()); + out.buffer = buffer; + out.start = 0; + out.end = str.length(); + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java new file mode 100644 index 00000000000..8f6d9433183 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java @@ -0,0 +1,64 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import java.nio.charset.StandardCharsets; + +import javax.inject.Inject; + +import io.netty.buffer.DrillBuf; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; +import org.apache.drill.exec.expr.holders.VarCharHolder; + +@FunctionTemplate(name = "convert_toUTF8_OB", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesUTF8ConvertTo implements DrillSimpleFunc { + + @Param VarCharHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(in.end - in.start + 2); + } + + @Override + public void eval() { + buffer.clear(); + byte [] bytes = new byte[in.end - in.start + 2]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start + 2); + + java.lang.String ip = new String(in.buffer.array(), java.nio.charset.StandardCharsets.UTF_8); + org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, ip, + org.apache.hadoop.hbase.util.Order.ASCENDING); + + buffer.setBytes(0, bytes, 0, in.end - in.start + 2); + out.buffer = buffer; + out.start = 0; + out.end = in.end - in.start + 2; + } +} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java new file mode 100644 index 00000000000..ed00385ce61 --- /dev/null +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java @@ -0,0 +1,64 @@ +/******************************************************************************* + + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ******************************************************************************/ +package org.apache.drill.exec.expr.fn.impl.conv; + +import java.nio.charset.StandardCharsets; + +import javax.inject.Inject; + +import io.netty.buffer.DrillBuf; + +import org.apache.drill.exec.expr.DrillSimpleFunc; +import org.apache.drill.exec.expr.annotations.FunctionTemplate; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; +import org.apache.drill.exec.expr.annotations.Output; +import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.holders.VarBinaryHolder; +import org.apache.drill.exec.expr.holders.VarCharHolder; + +@FunctionTemplate(name = "convert_toUTF8_OBD", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) +public class OrderedBytesUTF8DescConvertTo implements DrillSimpleFunc { + + @Param VarCharHolder in; + @Output VarBinaryHolder out; + @Inject DrillBuf buffer; + + @Override + public void setup() { + buffer = buffer.reallocIfNeeded(in.end - in.start + 2); + } + + @Override + public void eval() { + buffer.clear(); + byte [] bytes = new byte[in.end - in.start + 2]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start + 2); + + java.lang.String ip = new String(in.buffer.array(), java.nio.charset.StandardCharsets.UTF_8); + org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, ip, + org.apache.hadoop.hbase.util.Order.DESCENDING); + + buffer.setBytes(0, bytes, 0, in.end - in.start + 2); + out.buffer = buffer; + out.start = 0; + out.end = in.end - in.start + 2; + } +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java index 87eb42ea996..2527e8df46b 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/CompareFunctionsProcessor.java @@ -38,6 +38,10 @@ import org.apache.drill.common.expression.ValueExpressions.TimeExpression; import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression; import org.apache.drill.common.expression.visitors.AbstractExprVisitor; +import org.apache.hadoop.hbase.util.Order; +import org.apache.hadoop.hbase.util.PositionedByteRange; +import org.apache.hadoop.hbase.util.SimplePositionedByteRange; + import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.PrefixFilter; @@ -52,6 +56,7 @@ class CompareFunctionsProcessor extends AbstractExprVisitor cast(95.54 as DOUBLE)" + , 6); + } + + @Test + public void testFilterPushDownDoubleOBPlan() throws Exception { + setColumnWidths(new int[] {8, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'DOUBLE_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableDoubleOB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'DOUBLE_OB') > cast(95.54 as DOUBLE)" + , 1); + } + + @Test + public void testFilterPushDownDoubleOBDesc() throws Exception { + setColumnWidths(new int[] {8, 25}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'DOUBLE_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableDoubleOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'DOUBLE_OBD') > cast(95.54 as DOUBLE)" + , 6); + } + + @Test + public void testFilterPushDownDoubleOBDescPlan() throws Exception { + setColumnWidths(new int[] {8, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'DOUBLE_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableDoubleOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'DOUBLE_OBD') > cast(95.54 as DOUBLE)" + , 1); + } + + @Test + public void testFilterPushDownIntOB() throws Exception { + setColumnWidths(new int[] {15, 25}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'INT_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableIntOB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'INT_OB') >= cast(-32 as INT) AND" + + " CONVERT_FROM(row_key, 'INT_OB') < cast(59 as INT)" + , 91); + } + + @Test + public void testFilterPushDownIntOBDesc() throws Exception { + setColumnWidths(new int[] {15, 25}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'INT_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableIntOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'INT_OBD') >= cast(-32 as INT) AND" + + " CONVERT_FROM(row_key, 'INT_OBD') < cast(59 as INT)" + , 91); + } + + @Test + public void testFilterPushDownIntOBPlan() throws Exception { + setColumnWidths(new int[] {15, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'INT_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableIntOB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'INT_OB') > cast(-23 as INT) AND" + + " CONVERT_FROM(row_key, 'INT_OB') < cast(14 as INT)" + , 1); + } + + @Test + public void testFilterPushDownIntOBDescPlan() throws Exception { + setColumnWidths(new int[] {15, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'INT_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableIntOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'INT_OBD') > cast(-23 as INT) AND" + + " CONVERT_FROM(row_key, 'INT_OBD') < cast(14 as INT)" + , 1); + } + + @Test + public void testFilterPushDownBigIntOB() throws Exception { + setColumnWidths(new int[] {15, 25}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'BIGINT_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableBigIntOB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'BIGINT_OB') > cast(1438034423063 as BIGINT) AND" + + " CONVERT_FROM(row_key, 'BIGINT_OB') <= cast(1438034423097 as BIGINT)" + , 34); + } + + @Test + public void testFilterPushDownBigIntOBPlan() throws Exception { + setColumnWidths(new int[] {15, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'BIGINT_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableBigIntOB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'BIGINT_OB') > cast(1438034423063 as BIGINT) AND" + + " CONVERT_FROM(row_key, 'BIGINT_OB') < cast(1438034423097 as BIGINT)" + , 1); + } + + @Test + public void testFilterPushDownFloatOB() throws Exception { + setColumnWidths(new int[] {8, 25}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'FLOAT_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableFloatOB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'FLOAT_OB') > cast(95.74 as FLOAT) AND" + + " CONVERT_FROM(row_key, 'FLOAT_OB') < cast(99.5 as FLOAT)" + , 5); + } + + @Test + public void testFilterPushDownFloatOBPlan() throws Exception { + setColumnWidths(new int[] {8, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'FLOAT_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableFloatOB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'FLOAT_OB') > cast(95.54 as FLOAT) AND" + + " CONVERT_FROM(row_key, 'FLOAT_OB') < cast(99.77 as FLOAT)" + , 1); + } + + @Test + public void testFilterPushDownBigIntOBDesc() throws Exception { + setColumnWidths(new int[] {15, 25}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'BIGINT_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableBigIntOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'BIGINT_OBD') > cast(1438034423063 as BIGINT) AND" + + " CONVERT_FROM(row_key, 'BIGINT_OBD') <= cast(1438034423097 as BIGINT)" + , 34); + } + + @Test + public void testFilterPushDownBigIntOBDescPlan() throws Exception { + setColumnWidths(new int[] {15, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'BIGINT_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableBigIntOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'BIGINT_OBD') > cast(1438034423063 as BIGINT) AND" + + " CONVERT_FROM(row_key, 'BIGINT_OBD') < cast(1438034423097 as BIGINT)" + , 1); + } + + @Test + public void testFilterPushDownFloatOBDesc() throws Exception { + setColumnWidths(new int[] {8, 25}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'FLOAT_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableFloatOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'FLOAT_OBD') > cast(95.74 as FLOAT) AND" + + " CONVERT_FROM(row_key, 'FLOAT_OBD') < cast(99.5 as FLOAT)" + , 5); + } + + @Test + public void testFilterPushDownFloatOBDescPlan() throws Exception { + setColumnWidths(new int[] {8, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR\n" + + "SELECT\n" + + " convert_from(t.row_key, 'FLOAT_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableFloatOBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'FLOAT_OBD') > cast(95.54 as FLOAT) AND" + + " CONVERT_FROM(row_key, 'FLOAT_OBD') < cast(99.77 as FLOAT)" + , 1); + } + + @Test + public void testFilterPushDownUTF8OB() throws Exception { + setColumnWidths(new int[] {40, 2000}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'UTF8_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableUTF8OB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'UTF8_OB') > 'W' AND" + + " CONVERT_FROM(row_key, 'UTF8_OB') < 'Z'" + , 6); + } + + @Test + public void testFilterPushDownUTF8OBPlan() throws Exception { + setColumnWidths(new int[] {40, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR SELECT\n" + + " convert_from(t.row_key, 'UTF8_OB') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableUTF8OB` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'UTF8_OB') < 'A' AND" + + " CONVERT_FROM(row_key, 'UTF8_OB') >= 'H'" + , 1); + } + + @Test + public void testFilterPushDownUTF8OBDesc() throws Exception { + setColumnWidths(new int[] {40, 2000}); + runHBaseSQLVerifyCount("SELECT\n" + + " convert_from(t.row_key, 'UTF8_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableUTF8OBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'UTF8_OBD') > 'D' AND" + + " CONVERT_FROM(row_key, 'UTF8_OBD') <= 'KH'" + , 23); + } + + @Test + public void testFilterPushDownUTF8OBDescPlan() throws Exception { + setColumnWidths(new int[] {40, 2000}); + runHBaseSQLVerifyCount("EXPLAIN PLAN FOR SELECT\n" + + " convert_from(t.row_key, 'UTF8_OBD') rk,\n" + + " convert_from(t.`f`.`c`, 'UTF8') val\n" + + "FROM\n" + + " hbase.`TestTableUTF8OBDesc` t\n" + + "WHERE\n" + + " CONVERT_FROM(row_key, 'UTF8_OBD') < 'HY' AND" + + " CONVERT_FROM(row_key, 'UTF8_OBD') >= 'UY'" + , 1); + } + @Test public void testFilterPushDownRowKeyLike() throws Exception { setColumnWidths(new int[] {8, 22}); diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java new file mode 100644 index 00000000000..96c3668dbd8 --- /dev/null +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestOrderedBytesConvertFunctions.java @@ -0,0 +1,150 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.hbase; + +import static org.apache.drill.TestBuilder.listOf; +import static org.apache.drill.TestBuilder.mapOf; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import io.netty.buffer.DrillBuf; + +import java.util.ArrayList; +import java.util.List; + +import mockit.Injectable; + +import org.apache.drill.BaseTestQuery; +import org.apache.drill.TestBuilder; +import org.apache.drill.exec.compile.ClassTransformer; +import org.apache.drill.exec.compile.ClassTransformer.ScalarReplacementOption; +import org.apache.drill.exec.expr.fn.impl.DateUtility; +import org.apache.drill.exec.proto.UserBitShared.QueryType; +import org.apache.drill.exec.record.RecordBatchLoader; +import org.apache.drill.exec.rpc.RpcException; +import org.apache.drill.exec.rpc.user.QueryDataBatch; +import org.apache.drill.exec.rpc.user.UserServer; +import org.apache.drill.exec.server.Drillbit; +import org.apache.drill.exec.server.DrillbitContext; +import org.apache.drill.exec.server.options.OptionManager; +import org.apache.drill.exec.server.options.OptionValue; +import org.apache.drill.exec.server.options.OptionValue.OptionType; +import org.apache.drill.exec.util.ByteBufUtil.HadoopWritables; +import org.apache.drill.exec.util.VectorUtil; +import org.apache.drill.exec.vector.ValueVector; +import org.apache.drill.exec.vector.VarCharVector; +import org.joda.time.DateTime; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +import com.google.common.base.Charsets; +import com.google.common.io.Resources; + +public class TestOrderedBytesConvertFunctions extends BaseTestQuery { + + private static final String CONVERSION_TEST_PHYSICAL_PLAN = "functions/conv/conversionTestWithPhysicalPlan.json"; + private static final float DELTA = (float) 0.0001; + + String textFileContent; + + @Test + public void testOrderedBytesDouble() throws Throwable { + verifyPhysicalPlan("convert_to(4.9e-324, 'DOUBLE_OB')", new byte[] {0x31, (byte)0x80, 0, 0, 0, 0, 0, 0, 0x01}); + } + + @Test + public void testOrderedBytesDoubleConvertFrom() throws Throwable { + verifyPhysicalPlan("convert_from(binary_string('\\x31\\x80\\x00\\x00\\x00\\x00\\x00\\x00\\x01'), 'DOUBLE_OB')", new Double(4.9e-324)); + } + + protected void verifyPhysicalPlan(String expression, T expectedResults) throws Throwable { + expression = expression.replace("\\", "\\\\\\\\"); // "\\\\\\\\" => Java => "\\\\" => JsonParser => "\\" => AntlrParser "\" + + if (textFileContent == null) { + textFileContent = Resources.toString(Resources.getResource(CONVERSION_TEST_PHYSICAL_PLAN), Charsets.UTF_8); + } + String planString = textFileContent.replace("__CONVERT_EXPRESSION__", expression); + + verifyResults(expression, expectedResults, getRunResult(QueryType.PHYSICAL, planString)); + } + + protected Object[] getRunResult(QueryType queryType, String planString) throws Exception { + List resultList = testRunAndReturn(queryType, planString); + + List res = new ArrayList(); + RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); + for(QueryDataBatch result : resultList) { + if (result.getData() != null) { + loader.load(result.getHeader().getDef(), result.getData()); + ValueVector v = loader.iterator().next().getValueVector(); + for (int j = 0; j < v.getAccessor().getValueCount(); j++) { + if (v instanceof VarCharVector) { + res.add(new String(((VarCharVector) v).getAccessor().get(j))); + } else { + res.add(v.getAccessor().getObject(j)); + } + } + loader.clear(); + result.release(); + } + } + + return res.toArray(); + } + + protected void verifyResults(String expression, T expectedResults, Object[] actualResults) throws Throwable { + String testName = String.format("Expression: %s.", expression); + assertEquals(testName, 1, actualResults.length); + assertNotNull(testName, actualResults[0]); + if (expectedResults.getClass().isArray()) { + assertArraysEquals(testName, expectedResults, actualResults[0]); + } else { + assertEquals(testName, expectedResults, actualResults[0]); + } + } + + protected void assertArraysEquals(Object expected, Object actual) { + assertArraysEquals(null, expected, actual); + } + + protected void assertArraysEquals(String message, Object expected, Object actual) { + if (expected instanceof byte[] && actual instanceof byte[]) { + assertArrayEquals(message, (byte[]) expected, (byte[]) actual); + } else if (expected instanceof Object[] && actual instanceof Object[]) { + assertArrayEquals(message, (Object[]) expected, (Object[]) actual); + } else if (expected instanceof char[] && actual instanceof char[]) { + assertArrayEquals(message, (char[]) expected, (char[]) actual); + } else if (expected instanceof short[] && actual instanceof short[]) { + assertArrayEquals(message, (short[]) expected, (short[]) actual); + } else if (expected instanceof int[] && actual instanceof int[]) { + assertArrayEquals(message, (int[]) expected, (int[]) actual); + } else if (expected instanceof long[] && actual instanceof long[]) { + assertArrayEquals(message, (long[]) expected, (long[]) actual); + } else if (expected instanceof float[] && actual instanceof float[]) { + assertArrayEquals(message, (float[]) expected, (float[]) actual, DELTA); + } else if (expected instanceof double[] && actual instanceof double[]) { + assertArrayEquals(message, (double[]) expected, (double[]) actual, DELTA); + } else { + fail(String.format("%s: Error comparing arrays of type '%s' and '%s'", + expected.getClass().getName(), (actual == null ? "null" : actual.getClass().getName()))); + } + } +} diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java index 122a323e92c..ae5d002d57b 100644 --- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java @@ -322,4 +322,365 @@ public static void generateHBaseDatasetCompositeKeyInt(HBaseAdmin admin, String table.flushCommits(); table.close(); } + + public static void generateHBaseDatasetDoubleOB(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + for (double i = 0.5; i <= 100.00; i += 0.75) { + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, i, + org.apache.hadoop.hbase.util.Order.ASCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetFloatOB(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + for (float i = (float)0.5; i <= 100.00; i += 0.75) { + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, i, + org.apache.hadoop.hbase.util.Order.ASCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetBigIntOB(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + long startTime = (long)1438034423 * 1000; + for (long i = startTime; i <= startTime + 100; i ++) { + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, i, + org.apache.hadoop.hbase.util.Order.ASCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetIntOB(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + for (int i = -49; i <= 100; i ++) { + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, i, + org.apache.hadoop.hbase.util.Order.ASCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetDoubleOBDesc(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + for (double i = 0.5; i <= 100.00; i += 0.75) { + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, i, + org.apache.hadoop.hbase.util.Order.DESCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetFloatOBDesc(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + for (float i = (float)0.5; i <= 100.00; i += 0.75) { + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, i, + org.apache.hadoop.hbase.util.Order.DESCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %03f", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetBigIntOBDesc(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + long startTime = (long)1438034423 * 1000; + for (long i = startTime; i <= startTime + 100; i ++) { + byte[] bytes = new byte[9]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, i, + org.apache.hadoop.hbase.util.Order.DESCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + + public static void generateHBaseDatasetIntOBDesc(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + + for (int i = -49; i <= 100; i ++) { + byte[] bytes = new byte[5]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, i, + org.apache.hadoop.hbase.util.Order.DESCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %d", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetUTF8OB(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + final String AB = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"; + final int maxKeyLen = 25; + Random rnd = new Random(); + rnd.setSeed(47); + + for (int i = 0; i < 100; ++i) { + int keyLen = rnd.nextInt(maxKeyLen) + 1; + StringBuilder sb = new StringBuilder(keyLen); + for (int j = 0; j < keyLen; ++j) { + sb.append(AB.charAt(rnd.nextInt(AB.length()))); + } + + byte[] bytes = new byte[keyLen + 2]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, keyLen + 2); + org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, sb.toString(), + org.apache.hadoop.hbase.util.Order.ASCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %03d", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } + + public static void generateHBaseDatasetUTF8OBDesc(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { + if (admin.tableExists(tableName)) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + HTableDescriptor desc = new HTableDescriptor(tableName); + desc.addFamily(new HColumnDescriptor(FAMILY_F)); + + if (numberRegions > 1) { + admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); + } else { + admin.createTable(desc); + } + + HTable table = new HTable(admin.getConfiguration(), tableName); + final String AB = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"; + final int maxKeyLen = 25; + Random rnd = new Random(); + rnd.setSeed(47); + + for (int i = 0; i < 100; ++i) { + int keyLen = rnd.nextInt(maxKeyLen) + 1; + StringBuilder sb = new StringBuilder(keyLen); + for (int j = 0; j < keyLen; ++j) { + sb.append(AB.charAt(rnd.nextInt(AB.length()))); + } + + byte[] bytes = new byte[keyLen + 2]; + org.apache.hadoop.hbase.util.PositionedByteRange br = + new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, keyLen + 2); + org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, sb.toString(), + org.apache.hadoop.hbase.util.Order.DESCENDING); + Put p = new Put(bytes); + p.add(FAMILY_F, COLUMN_C, String.format("value %03d", i).getBytes()); + table.put(p); + } + + table.flushCommits(); + table.close(); + + admin.flush(tableName); + } } From 2daacad4ca62e753bbcad7f3637512ca810ea491 Mon Sep 17 00:00:00 2001 From: Smidth Panchamia Date: Wed, 19 Aug 2015 15:18:33 -0700 Subject: [PATCH 3/4] DRILL-3492 - * Remove repeated allocations of byte arrays and PositionedByteRange objects on heap(as suggested by Jason). * Remove OrderedBytes encode/decode operations on UTF8 types. Reasons - 1. These operations are slow and incur a lot of heap allocations 2. UTF8 types maintain their natural sort order when stored as binary arrays. --- .../conv/OrderedBytesBigIntConvertFrom.java | 14 ++-- .../conv/OrderedBytesBigIntConvertTo.java | 12 ++-- .../conv/OrderedBytesBigIntDescConvertTo.java | 10 +-- .../conv/OrderedBytesDoubleConvertFrom.java | 12 ++-- .../conv/OrderedBytesDoubleConvertTo.java | 12 ++-- .../conv/OrderedBytesDoubleDescConvertTo.java | 12 ++-- .../conv/OrderedBytesFloatConvertFrom.java | 14 ++-- .../impl/conv/OrderedBytesFloatConvertTo.java | 12 ++-- .../conv/OrderedBytesFloatDescConvertTo.java | 12 ++-- .../impl/conv/OrderedBytesIntConvertFrom.java | 14 ++-- .../impl/conv/OrderedBytesIntConvertTo.java | 12 ++-- .../conv/OrderedBytesIntDescConvertTo.java | 12 ++-- .../conv/OrderedBytesUTF8ConvertFrom.java | 58 ----------------- .../impl/conv/OrderedBytesUTF8ConvertTo.java | 64 ------------------- .../conv/OrderedBytesUTF8DescConvertTo.java | 64 ------------------- .../drill/hbase/TestHBaseFilterPushDown.java | 56 ---------------- 16 files changed, 90 insertions(+), 300 deletions(-) delete mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java delete mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java delete mode 100644 contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java index c6248213bcf..3b8391d85af 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertFrom.java @@ -24,6 +24,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.BigIntHolder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -33,17 +34,20 @@ public class OrderedBytesBigIntConvertFrom implements DrillSimpleFunc { @Param VarBinaryHolder in; @Output BigIntHolder out; + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override - public void setup() { } + public void setup() { + bytes = new byte[9]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); + } @Override public void eval() { org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 9); - byte[] bytes = new byte[9]; in.buffer.getBytes(in.start, bytes, 0, 9); - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + br.set(bytes); out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeInt64(br); } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java index 8677ce5ae75..d012531f614 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.BigIntHolder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesBigIntConvertTo implements DrillSimpleFunc { @Param BigIntHolder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(9); + bytes = new byte[9]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[9]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, in.value, org.apache.hadoop.hbase.util.Order.ASCENDING); @@ -58,4 +60,4 @@ public void eval() { out.start = 0; out.end = 9; } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java index 91b1e659a2c..463483c7d14 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesBigIntDescConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.BigIntHolder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesBigIntDescConvertTo implements DrillSimpleFunc { @Param BigIntHolder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(9); + bytes = new byte[9]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[9]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br, in.value, org.apache.hadoop.hbase.util.Order.DESCENDING); diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java index 5b848ba8371..b2ae2687122 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertFrom.java @@ -24,6 +24,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.Float8Holder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -33,17 +34,20 @@ public class OrderedBytesDoubleConvertFrom implements DrillSimpleFunc { @Param VarBinaryHolder in; @Output Float8Holder out; + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override - public void setup() { } + public void setup() { + bytes = new byte[9]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); + } @Override public void eval() { org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 9); - byte[] bytes = new byte[9]; in.buffer.getBytes(in.start, bytes, 0, 9); - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + br.set(bytes); out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeFloat64(br); } } diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java index 4f4ce1ecba1..d90b620b7f3 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.Float8Holder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesDoubleConvertTo implements DrillSimpleFunc { @Param Float8Holder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(9); + bytes = new byte[9]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[9]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, in.value, org.apache.hadoop.hbase.util.Order.ASCENDING); @@ -58,4 +60,4 @@ public void eval() { out.start = 0; out.end = 9; } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java index a4f2dfce415..944b1d10af5 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesDoubleDescConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.Float8Holder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesDoubleDescConvertTo implements DrillSimpleFunc { @Param Float8Holder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(9); + bytes = new byte[9]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[9]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 9); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br, in.value, org.apache.hadoop.hbase.util.Order.DESCENDING); @@ -58,4 +60,4 @@ public void eval() { out.start = 0; out.end = 9; } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java index 74b83f8e6fb..a66e58005bc 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertFrom.java @@ -24,6 +24,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.Float4Holder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -33,17 +34,20 @@ public class OrderedBytesFloatConvertFrom implements DrillSimpleFunc { @Param VarBinaryHolder in; @Output Float4Holder out; + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override - public void setup() { } + public void setup() { + bytes = new byte[5]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); + } @Override public void eval() { org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 5); - byte[] bytes = new byte[5]; in.buffer.getBytes(in.start, bytes, 0, 5); - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + br.set(bytes); out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeFloat32(br); } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java index 9d58f6ab9cc..e41469c0e4a 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.Float4Holder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesFloatConvertTo implements DrillSimpleFunc { @Param Float4Holder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(5); + bytes = new byte[5]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[5]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, in.value, org.apache.hadoop.hbase.util.Order.ASCENDING); @@ -58,4 +60,4 @@ public void eval() { out.start = 0; out.end = 5; } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java index d6c83c54809..5c40e795e32 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesFloatDescConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.Float4Holder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesFloatDescConvertTo implements DrillSimpleFunc { @Param Float4Holder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(5); + bytes = new byte[5]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[5]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br, in.value, org.apache.hadoop.hbase.util.Order.DESCENDING); @@ -58,4 +60,4 @@ public void eval() { out.start = 0; out.end = 5; } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java index 964112fc056..6c159471cee 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertFrom.java @@ -24,6 +24,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.IntHolder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -33,17 +34,20 @@ public class OrderedBytesIntConvertFrom implements DrillSimpleFunc { @Param VarBinaryHolder in; @Output IntHolder out; + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override - public void setup() { } + public void setup() { + bytes = new byte[5]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); + } @Override public void eval() { org.apache.drill.exec.util.ByteBufUtil.checkBufferLength(in.buffer, in.start, in.end, 5); - byte[] bytes = new byte[5]; in.buffer.getBytes(in.start, bytes, 0, 5); - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); + br.set(bytes); out.value = org.apache.hadoop.hbase.util.OrderedBytes.decodeInt32(br); } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java index f37487cee73..d703318b389 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.IntHolder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesIntConvertTo implements DrillSimpleFunc { @Param IntHolder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(5); + bytes = new byte[5]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[5]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, in.value, org.apache.hadoop.hbase.util.Order.ASCENDING); @@ -58,4 +60,4 @@ public void eval() { out.start = 0; out.end = 5; } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java index 34558365277..6ed4fbffe28 100644 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java +++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesIntDescConvertTo.java @@ -28,6 +28,7 @@ import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; +import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.IntHolder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; @@ -37,19 +38,20 @@ public class OrderedBytesIntDescConvertTo implements DrillSimpleFunc { @Param IntHolder in; @Output VarBinaryHolder out; @Inject DrillBuf buffer; - + @Workspace byte[] bytes; + @Workspace org.apache.hadoop.hbase.util.PositionedByteRange br; @Override public void setup() { buffer = buffer.reallocIfNeeded(5); + bytes = new byte[5]; + br = new org.apache.hadoop.hbase.util.SimplePositionedByteRange(); } @Override public void eval() { buffer.clear(); - byte[] bytes = new byte[5]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, 5); + br.set(bytes); org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br, in.value, org.apache.hadoop.hbase.util.Order.DESCENDING); @@ -58,4 +60,4 @@ public void eval() { out.start = 0; out.end = 5; } -} \ No newline at end of file +} diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java deleted file mode 100644 index 006af40dd90..00000000000 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertFrom.java +++ /dev/null @@ -1,58 +0,0 @@ -/******************************************************************************* - - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - ******************************************************************************/ -package org.apache.drill.exec.expr.fn.impl.conv; - -import javax.inject.Inject; - -import io.netty.buffer.DrillBuf; - -import org.apache.drill.exec.expr.DrillSimpleFunc; -import org.apache.drill.exec.expr.annotations.FunctionTemplate; -import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; -import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; -import org.apache.drill.exec.expr.annotations.Output; -import org.apache.drill.exec.expr.annotations.Param; -import org.apache.drill.exec.expr.holders.VarBinaryHolder; -import org.apache.drill.exec.expr.holders.VarCharHolder; - -@FunctionTemplate(names = {"convert_fromUTF8_OB", "convert_fromUTF8_OBD"}, - scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) -public class OrderedBytesUTF8ConvertFrom implements DrillSimpleFunc { - - @Param VarBinaryHolder in; - @Output VarCharHolder out; - @Inject DrillBuf buffer; - - @Override - public void setup() { } - - @Override - public void eval() { - buffer = buffer.reallocIfNeeded(in.end - in.start - 2); - byte[] bytes = new byte[in.end - in.start]; - in.buffer.getBytes(in.start, bytes, 0, in.end - in.start); - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start); - String str = org.apache.hadoop.hbase.util.OrderedBytes.decodeString(br); - buffer.setBytes(0, str.getBytes(), 0, str.length()); - out.buffer = buffer; - out.start = 0; - out.end = str.length(); - } -} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java deleted file mode 100644 index 8f6d9433183..00000000000 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8ConvertTo.java +++ /dev/null @@ -1,64 +0,0 @@ -/******************************************************************************* - - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - ******************************************************************************/ -package org.apache.drill.exec.expr.fn.impl.conv; - -import java.nio.charset.StandardCharsets; - -import javax.inject.Inject; - -import io.netty.buffer.DrillBuf; - -import org.apache.drill.exec.expr.DrillSimpleFunc; -import org.apache.drill.exec.expr.annotations.FunctionTemplate; -import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; -import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; -import org.apache.drill.exec.expr.annotations.Output; -import org.apache.drill.exec.expr.annotations.Param; -import org.apache.drill.exec.expr.holders.VarBinaryHolder; -import org.apache.drill.exec.expr.holders.VarCharHolder; - -@FunctionTemplate(name = "convert_toUTF8_OB", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) -public class OrderedBytesUTF8ConvertTo implements DrillSimpleFunc { - - @Param VarCharHolder in; - @Output VarBinaryHolder out; - @Inject DrillBuf buffer; - - @Override - public void setup() { - buffer = buffer.reallocIfNeeded(in.end - in.start + 2); - } - - @Override - public void eval() { - buffer.clear(); - byte [] bytes = new byte[in.end - in.start + 2]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start + 2); - - java.lang.String ip = new String(in.buffer.array(), java.nio.charset.StandardCharsets.UTF_8); - org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, ip, - org.apache.hadoop.hbase.util.Order.ASCENDING); - - buffer.setBytes(0, bytes, 0, in.end - in.start + 2); - out.buffer = buffer; - out.start = 0; - out.end = in.end - in.start + 2; - } -} \ No newline at end of file diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java deleted file mode 100644 index ed00385ce61..00000000000 --- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/OrderedBytesUTF8DescConvertTo.java +++ /dev/null @@ -1,64 +0,0 @@ -/******************************************************************************* - - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - ******************************************************************************/ -package org.apache.drill.exec.expr.fn.impl.conv; - -import java.nio.charset.StandardCharsets; - -import javax.inject.Inject; - -import io.netty.buffer.DrillBuf; - -import org.apache.drill.exec.expr.DrillSimpleFunc; -import org.apache.drill.exec.expr.annotations.FunctionTemplate; -import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; -import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; -import org.apache.drill.exec.expr.annotations.Output; -import org.apache.drill.exec.expr.annotations.Param; -import org.apache.drill.exec.expr.holders.VarBinaryHolder; -import org.apache.drill.exec.expr.holders.VarCharHolder; - -@FunctionTemplate(name = "convert_toUTF8_OBD", scope = FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) -public class OrderedBytesUTF8DescConvertTo implements DrillSimpleFunc { - - @Param VarCharHolder in; - @Output VarBinaryHolder out; - @Inject DrillBuf buffer; - - @Override - public void setup() { - buffer = buffer.reallocIfNeeded(in.end - in.start + 2); - } - - @Override - public void eval() { - buffer.clear(); - byte [] bytes = new byte[in.end - in.start + 2]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, in.end - in.start + 2); - - java.lang.String ip = new String(in.buffer.array(), java.nio.charset.StandardCharsets.UTF_8); - org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, ip, - org.apache.hadoop.hbase.util.Order.DESCENDING); - - buffer.setBytes(0, bytes, 0, in.end - in.start + 2); - out.buffer = buffer; - out.start = 0; - out.end = in.end - in.start + 2; - } -} diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java index bbe130a440a..05fb0b7c143 100644 --- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java @@ -453,62 +453,6 @@ public void testFilterPushDownFloatOBDescPlan() throws Exception { , 1); } - @Test - public void testFilterPushDownUTF8OB() throws Exception { - setColumnWidths(new int[] {40, 2000}); - runHBaseSQLVerifyCount("SELECT\n" - + " convert_from(t.row_key, 'UTF8_OB') rk,\n" - + " convert_from(t.`f`.`c`, 'UTF8') val\n" - + "FROM\n" - + " hbase.`TestTableUTF8OB` t\n" - + "WHERE\n" - + " CONVERT_FROM(row_key, 'UTF8_OB') > 'W' AND" - + " CONVERT_FROM(row_key, 'UTF8_OB') < 'Z'" - , 6); - } - - @Test - public void testFilterPushDownUTF8OBPlan() throws Exception { - setColumnWidths(new int[] {40, 2000}); - runHBaseSQLVerifyCount("EXPLAIN PLAN FOR SELECT\n" - + " convert_from(t.row_key, 'UTF8_OB') rk,\n" - + " convert_from(t.`f`.`c`, 'UTF8') val\n" - + "FROM\n" - + " hbase.`TestTableUTF8OB` t\n" - + "WHERE\n" - + " CONVERT_FROM(row_key, 'UTF8_OB') < 'A' AND" - + " CONVERT_FROM(row_key, 'UTF8_OB') >= 'H'" - , 1); - } - - @Test - public void testFilterPushDownUTF8OBDesc() throws Exception { - setColumnWidths(new int[] {40, 2000}); - runHBaseSQLVerifyCount("SELECT\n" - + " convert_from(t.row_key, 'UTF8_OBD') rk,\n" - + " convert_from(t.`f`.`c`, 'UTF8') val\n" - + "FROM\n" - + " hbase.`TestTableUTF8OBDesc` t\n" - + "WHERE\n" - + " CONVERT_FROM(row_key, 'UTF8_OBD') > 'D' AND" - + " CONVERT_FROM(row_key, 'UTF8_OBD') <= 'KH'" - , 23); - } - - @Test - public void testFilterPushDownUTF8OBDescPlan() throws Exception { - setColumnWidths(new int[] {40, 2000}); - runHBaseSQLVerifyCount("EXPLAIN PLAN FOR SELECT\n" - + " convert_from(t.row_key, 'UTF8_OBD') rk,\n" - + " convert_from(t.`f`.`c`, 'UTF8') val\n" - + "FROM\n" - + " hbase.`TestTableUTF8OBDesc` t\n" - + "WHERE\n" - + " CONVERT_FROM(row_key, 'UTF8_OBD') < 'HY' AND" - + " CONVERT_FROM(row_key, 'UTF8_OBD') >= 'UY'" - , 1); - } - @Test public void testFilterPushDownRowKeyLike() throws Exception { setColumnWidths(new int[] {8, 22}); From 71b053006b587f39a47025302e7d3de8dcac482d Mon Sep 17 00:00:00 2001 From: Smidth Panchamia Date: Wed, 19 Aug 2015 15:27:02 -0700 Subject: [PATCH 4/4] DRILL-3492 - Remove test code that creates test tables with UTF8 OrderedByte encoding. --- .../apache/drill/hbase/HBaseTestsSuite.java | 12 +-- .../drill/hbase/TestTableGenerator.java | 88 ------------------- 2 files changed, 1 insertion(+), 99 deletions(-) diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java index cdb8be39ffc..2063503ac1c 100644 --- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/HBaseTestsSuite.java @@ -61,8 +61,6 @@ public class HBaseTestsSuite { protected static final String TEST_TABLE_FLOAT_OB_DESC = "TestTableFloatOBDesc"; protected static final String TEST_TABLE_BIGINT_OB_DESC = "TestTableBigIntOBDesc"; protected static final String TEST_TABLE_INT_OB_DESC = "TestTableIntOBDesc"; - protected static final String TEST_TABLE_UTF8_OB = "TestTableUTF8OB"; - protected static final String TEST_TABLE_UTF8_OB_DESC = "TestTableUTF8OBDesc"; private static Configuration conf; @@ -159,9 +157,7 @@ private static boolean tablesExist() throws IOException { && admin.tableExists(TEST_TABLE_DOUBLE_OB_DESC) && admin.tableExists(TEST_TABLE_FLOAT_OB_DESC) && admin.tableExists(TEST_TABLE_BIGINT_OB_DESC) - && admin.tableExists(TEST_TABLE_INT_OB_DESC) - && admin.tableExists(TEST_TABLE_UTF8_OB) - && admin.tableExists(TEST_TABLE_UTF8_OB_DESC); + && admin.tableExists(TEST_TABLE_INT_OB_DESC); } private static void createTestTables() throws Exception { @@ -183,8 +179,6 @@ private static void createTestTables() throws Exception { TestTableGenerator.generateHBaseDatasetFloatOBDesc(admin, TEST_TABLE_FLOAT_OB_DESC, 1); TestTableGenerator.generateHBaseDatasetBigIntOBDesc(admin, TEST_TABLE_BIGINT_OB_DESC, 1); TestTableGenerator.generateHBaseDatasetIntOBDesc(admin, TEST_TABLE_INT_OB_DESC, 1); - TestTableGenerator.generateHBaseDatasetUTF8OB(admin, TEST_TABLE_UTF8_OB, 1); - TestTableGenerator.generateHBaseDatasetUTF8OBDesc(admin, TEST_TABLE_UTF8_OB_DESC, 1); } private static void cleanupTestTables() throws IOException { @@ -214,10 +208,6 @@ private static void cleanupTestTables() throws IOException { admin.deleteTable(TEST_TABLE_BIGINT_OB_DESC); admin.disableTable(TEST_TABLE_INT_OB_DESC); admin.deleteTable(TEST_TABLE_INT_OB_DESC); - admin.disableTable(TEST_TABLE_UTF8_OB); - admin.deleteTable(TEST_TABLE_UTF8_OB); - admin.disableTable(TEST_TABLE_UTF8_OB_DESC); - admin.deleteTable(TEST_TABLE_UTF8_OB_DESC); } public static int getZookeeperPort() { diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java index ac0c154a870..e738bbafcbb 100644 --- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java +++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestTableGenerator.java @@ -604,92 +604,4 @@ public static void generateHBaseDatasetIntOBDesc(HBaseAdmin admin, String tableN admin.flush(tableName); } - - public static void generateHBaseDatasetUTF8OB(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { - if (admin.tableExists(tableName)) { - admin.disableTable(tableName); - admin.deleteTable(tableName); - } - - HTableDescriptor desc = new HTableDescriptor(tableName); - desc.addFamily(new HColumnDescriptor(FAMILY_F)); - - if (numberRegions > 1) { - admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); - } else { - admin.createTable(desc); - } - - HTable table = new HTable(admin.getConfiguration(), tableName); - final String AB = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"; - final int maxKeyLen = 25; - Random rnd = new Random(); - rnd.setSeed(47); - - for (int i = 0; i < 100; ++i) { - int keyLen = rnd.nextInt(maxKeyLen) + 1; - StringBuilder sb = new StringBuilder(keyLen); - for (int j = 0; j < keyLen; ++j) { - sb.append(AB.charAt(rnd.nextInt(AB.length()))); - } - - byte[] bytes = new byte[keyLen + 2]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, keyLen + 2); - org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, sb.toString(), - org.apache.hadoop.hbase.util.Order.ASCENDING); - Put p = new Put(bytes); - p.add(FAMILY_F, COLUMN_C, String.format("value %03d", i).getBytes()); - table.put(p); - } - - table.flushCommits(); - table.close(); - - admin.flush(tableName); - } - - public static void generateHBaseDatasetUTF8OBDesc(HBaseAdmin admin, String tableName, int numberRegions) throws Exception { - if (admin.tableExists(tableName)) { - admin.disableTable(tableName); - admin.deleteTable(tableName); - } - - HTableDescriptor desc = new HTableDescriptor(tableName); - desc.addFamily(new HColumnDescriptor(FAMILY_F)); - - if (numberRegions > 1) { - admin.createTable(desc, Arrays.copyOfRange(SPLIT_KEYS, 0, numberRegions-1)); - } else { - admin.createTable(desc); - } - - HTable table = new HTable(admin.getConfiguration(), tableName); - final String AB = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"; - final int maxKeyLen = 25; - Random rnd = new Random(); - rnd.setSeed(47); - - for (int i = 0; i < 100; ++i) { - int keyLen = rnd.nextInt(maxKeyLen) + 1; - StringBuilder sb = new StringBuilder(keyLen); - for (int j = 0; j < keyLen; ++j) { - sb.append(AB.charAt(rnd.nextInt(AB.length()))); - } - - byte[] bytes = new byte[keyLen + 2]; - org.apache.hadoop.hbase.util.PositionedByteRange br = - new org.apache.hadoop.hbase.util.SimplePositionedByteRange(bytes, 0, keyLen + 2); - org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br, sb.toString(), - org.apache.hadoop.hbase.util.Order.DESCENDING); - Put p = new Put(bytes); - p.add(FAMILY_F, COLUMN_C, String.format("value %03d", i).getBytes()); - table.put(p); - } - - table.flushCommits(); - table.close(); - - admin.flush(tableName); - } }