Skip to content

Commit

Permalink
DRILL-2641: Move unrelated tests in exec/jdbc module into appropriate…
Browse files Browse the repository at this point in the history
… modules

Also:
- Update the tests to use the new test framework
- Remove duplicate tests (TestHiveScalarUDFs.java)
  • Loading branch information
vkorukanti authored and mehant committed Apr 11, 2015
1 parent 931ed64 commit 375f841
Show file tree
Hide file tree
Showing 11 changed files with 613 additions and 686 deletions.

This file was deleted.

Expand Up @@ -19,11 +19,149 @@

import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.fs.FileSystem;
import org.joda.time.DateTime;
import org.junit.Test;

import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;

public class TestHiveStorage extends HiveTestBase {
@Test
public void testQueryingTablesInNonDefaultFS() throws Exception {
public void hiveReadWithDb() throws Exception {
test("select * from hive.kv");
}

@Test
public void queryEmptyHiveTable() throws Exception {
testBuilder()
.sqlQuery("SELECT * FROM hive.empty_table")
.expectsEmptyResultSet()
.go();
}

/**
* Test to ensure Drill reads the all supported types correctly both normal fields (converted to Nullable types) and
* partition fields (converted to Required types).
* @throws Exception
*/
@Test
public void readAllSupportedHiveDataTypes() throws Exception {

testBuilder().sqlQuery("SELECT * FROM hive.readtest")
.unOrdered()
.baselineColumns(
"binary_field",
"boolean_field",
"tinyint_field",
"decimal0_field",
"decimal9_field",
"decimal18_field",
"decimal28_field",
"decimal38_field",
"double_field",
"float_field",
"int_field",
"bigint_field",
"smallint_field",
"string_field",
"varchar_field",
"timestamp_field",
"date_field",
"binary_part",
"boolean_part",
"tinyint_part",
"decimal0_part",
"decimal9_part",
"decimal18_part",
"decimal28_part",
"decimal38_part",
"double_part",
"float_part",
"int_part",
"bigint_part",
"smallint_part",
"string_part",
"varchar_part",
"timestamp_part",
"date_part")
.baselineValues(
"binaryfield",
false,
(byte) 34,
new BigDecimal("66"),
new BigDecimal("2347.92"),
new BigDecimal("2758725827.99990"),
new BigDecimal("29375892739852.8"),
new BigDecimal("89853749534593985.783"),
8.345d,
4.67f,
123456,
234235L,
(short) 3455,
"stringfield",
"varcharfield",
new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
new DateTime(Date.valueOf("2013-07-05").getTime()),
"binary",
true,
(byte) 64,
new BigDecimal("370000000000"), // TODO(DRILL-2729) Should be 37
new BigDecimal("369000.00"), // TODO(DRILL-2729) Should be 36.90
new BigDecimal("-66367900898250.61888"), // TODO(DRILL-2729) Should be 3289379872.94565
new BigDecimal("39579334534534.4"),
new BigDecimal("363945093845093890.900"),
8.345d,
4.67f,
123456,
234235L,
(short) 3455,
"string",
"varchar",
new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
new DateTime(Date.valueOf("2013-07-05").getTime()))
.baselineValues( // All fields are null, but partition fields have non-null values
"", // For binary (varchar, string too) empty value is considered as empty string instead of "null"
null, null, null, null, null, null, null, null, null, null, null, null,
"", // string_field
"", // varchar_field
null, null,
"binary",
true,
(byte) 64,
new BigDecimal("370000000000"), // TODO(DRILL-2729) Should be 37
new BigDecimal("369000.00"), // TODO(DRILL-2729) Should be 36.90
new BigDecimal("-66367900898250.61888"), // TODO(DRILL-2729) Should be 3289379872.94565
new BigDecimal("39579334534534.4"),
new BigDecimal("363945093845093890.900"),
8.345d,
4.67f,
123456,
234235L,
(short) 3455,
"string",
"varchar",
new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
new DateTime(Date.valueOf("2013-07-05").getTime()))
.go();
}

@Test
public void orderByOnHiveTable() throws Exception {
testBuilder()
.sqlQuery("SELECT * FROM hive.kv ORDER BY `value` DESC")
.ordered()
.baselineColumns("key", "value")
.baselineValues(5, " key_5")
.baselineValues(4, " key_4")
.baselineValues(3, " key_3")
.baselineValues(2, " key_2")
.baselineValues(1, " key_1")
.go();
}

@Test
public void queryingTablesInNonDefaultFS() throws Exception {
// Update the default FS settings in Hive test storage plugin to non-local FS
hiveTest.updatePluginConfig(ImmutableMap.of(FileSystem.FS_DEFAULT_NAME_KEY, "hdfs://localhost:9001"));

Expand All @@ -32,7 +170,6 @@ public void testQueryingTablesInNonDefaultFS() throws Exception {
.unOrdered()
.baselineColumns("key", "value")
.baselineValues(1, " key_1")
.build()
.run();
.go();
}
}
@@ -0,0 +1,120 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.hive;

import org.junit.Test;

public class TestInfoSchemaOnHiveStorage extends HiveTestBase {

@Test
public void showTablesFromDb() throws Exception{
testBuilder()
.sqlQuery("SHOW TABLES FROM hive.`default`")
.unOrdered()
.baselineColumns("TABLE_SCHEMA", "TABLE_NAME")
.baselineValues("hive.default", "partition_pruning_test")
.baselineValues("hive.default", "readtest")
.baselineValues("hive.default", "empty_table")
.baselineValues("hive.default", "infoschematest")
.baselineValues("hive.default", "hiveview")
.baselineValues("hive.default", "kv")
.go();

testBuilder()
.sqlQuery("SHOW TABLES IN hive.db1")
.unOrdered()
.baselineColumns("TABLE_SCHEMA", "TABLE_NAME")
.baselineValues("hive.db1", "kv_db1")
.go();
}

@Test
public void showDatabases() throws Exception{
testBuilder()
.sqlQuery("SHOW DATABASES")
.unOrdered()
.baselineColumns("SCHEMA_NAME")
.baselineValues("hive.default")
.baselineValues("hive.db1")
.baselineValues("dfs.default")
.baselineValues("dfs.root")
.baselineValues("dfs.tmp")
.baselineValues("sys")
.baselineValues("dfs_test.home")
.baselineValues("dfs_test.default")
.baselineValues("dfs_test.tmp")
.baselineValues("cp.default")
.baselineValues("INFORMATION_SCHEMA")
.go();
}

@Test
public void describeTableNullableColumns() throws Exception{
testBuilder()
.sqlQuery("DESCRIBE hive.`default`.kv")
.unOrdered()
.baselineColumns("COLUMN_NAME", "DATA_TYPE", "IS_NULLABLE")
.baselineValues("key", "INTEGER", "YES")
.baselineValues("value", "VARCHAR", "YES")
.go();
}

@Test
public void varCharMaxLengthAndDecimalPrecisionInInfoSchema() throws Exception{
final String query = "SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH, NUMERIC_PRECISION, NUMERIC_SCALE " +
"FROM INFORMATION_SCHEMA.`COLUMNS` " +
"WHERE TABLE_SCHEMA = 'hive.default' AND TABLE_NAME = 'infoschematest' AND " +
"(COLUMN_NAME = 'stringtype' OR COLUMN_NAME = 'varchartype' OR " +
"COLUMN_NAME = 'inttype' OR COLUMN_NAME = 'decimaltype')";

testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("USE hive")
.baselineColumns("COLUMN_NAME", "DATA_TYPE", "CHARACTER_MAXIMUM_LENGTH", "NUMERIC_PRECISION", "NUMERIC_SCALE")
.baselineValues("inttype", "INTEGER", -1, -1, -1)
.baselineValues("decimaltype", "DECIMAL", -1, 38, 2)
.baselineValues("stringtype", "VARCHAR", 65535, -1, -1)
.baselineValues("varchartype", "VARCHAR", 20, -1, -1)
.go();
}

@Test
public void defaultSchemaHive() throws Exception{
testBuilder()
.sqlQuery("SELECT * FROM kv LIMIT 2")
.unOrdered()
.optionSettingQueriesForTestQuery("USE hive")
.baselineColumns("key", "value")
.baselineValues(1, " key_1")
.baselineValues(2, " key_2")
.go();
}

@Test
public void defaultTwoLevelSchemaHive() throws Exception{
testBuilder()
.sqlQuery("SELECT * FROM kv_db1 LIMIT 2")
.unOrdered()
.optionSettingQueriesForTestQuery("USE hive.db1")
.baselineColumns("key", "value")
.baselineValues(1, " key_1")
.baselineValues(2, " key_2")
.go();
}
}
Expand Up @@ -51,11 +51,6 @@ public HiveTestDataGenerator(StoragePluginRegistry pluginRegistry) {
this.pluginRegistry = pluginRegistry;
}

// TODO: Remove this once hive related tests in exec/jdbc are moved to contrib/storage-hive/core module
public HiveTestDataGenerator() {
this(null);
}

private void cleanDir(String dir) throws IOException{
File f = new File(dir);
if (f.exists()) {
Expand Down Expand Up @@ -137,17 +132,10 @@ public void generateTestData() throws Exception {
executeQuery("CREATE DATABASE IF NOT EXISTS db1");
createTableAndLoadData("db1", "kv_db1", testDataFile);

// Generate data with date and timestamp data type
String testDateDataFile = generateTestDataFileWithDate();

// create table with date and timestamp data type
executeQuery("USE default");
executeQuery("CREATE TABLE IF NOT EXISTS default.foodate(a DATE, b TIMESTAMP) "+
"ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
executeQuery(String.format("LOAD DATA LOCAL INPATH '%s' OVERWRITE INTO TABLE default.foodate", testDateDataFile));

// create a table with no data
executeQuery("CREATE TABLE IF NOT EXISTS default.empty_table(a INT, b STRING)");
executeQuery("CREATE TABLE IF NOT EXISTS empty_table(a INT, b STRING)");
// delete the table location of empty table
File emptyTableLocation = new File(WH_DIR + "/empty_table");
if (emptyTableLocation.exists()) {
Expand Down Expand Up @@ -263,8 +251,10 @@ public void generateTestData() throws Exception {
// create a Hive view to test how its metadata is populated in Drill's INFORMATION_SCHEMA
executeQuery("CREATE VIEW IF NOT EXISTS hiveview AS SELECT * FROM kv");

// Generate data with date and timestamp data type
String testDateDataFile = generateTestDataFileWithDate();

// create partitioned hive table to test partition pruning
executeQuery("USE default");
executeQuery("CREATE TABLE IF NOT EXISTS default.partition_pruning_test(a DATE, b TIMESTAMP) "+
"partitioned by (c int, d int, e int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
executeQuery(String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE default.partition_pruning_test partition(c=1, d=1, e=1)", testDateDataFile));
Expand Down

0 comments on commit 375f841

Please sign in to comment.