Skip to content

Commit

Permalink
add test for csv table
Browse files Browse the repository at this point in the history
  • Loading branch information
lirui-apache committed Jul 30, 2019
1 parent 152412f commit 44d0b0c
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.ql.io.StorageFormatDescriptor;
import org.apache.hadoop.hive.ql.io.StorageFormatFactory;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.thrift.TException;
import org.slf4j.Logger;
Expand Down Expand Up @@ -311,7 +312,7 @@ public CatalogBaseTable getTable(ObjectPath tablePath) throws TableNotExistExcep
checkNotNull(tablePath, "tablePath cannot be null");

Table hiveTable = getHiveTable(tablePath);
return instantiateCatalogTable(hiveTable);
return instantiateCatalogTable(hiveTable, hiveConf);
}

@Override
Expand Down Expand Up @@ -382,7 +383,7 @@ public void alterTable(ObjectPath tablePath, CatalogBaseTable newCatalogTable, b
return;
}

CatalogBaseTable existingTable = instantiateCatalogTable(hiveTable);
CatalogBaseTable existingTable = instantiateCatalogTable(hiveTable, hiveConf);

if (existingTable.getClass() != newCatalogTable.getClass()) {
throw new CatalogException(
Expand Down Expand Up @@ -481,7 +482,7 @@ public Table getHiveTable(ObjectPath tablePath) throws TableNotExistException {
}
}

private static CatalogBaseTable instantiateCatalogTable(Table hiveTable) {
private static CatalogBaseTable instantiateCatalogTable(Table hiveTable, HiveConf hiveConf) {
boolean isView = TableType.valueOf(hiveTable.getTableType()) == TableType.VIRTUAL_VIEW;

// Table properties
Expand All @@ -494,8 +495,22 @@ private static CatalogBaseTable instantiateCatalogTable(Table hiveTable) {
String comment = properties.remove(HiveCatalogConfig.COMMENT);

// Table schema
List<FieldSchema> fields;
if (org.apache.hadoop.hive.ql.metadata.Table.hasMetastoreBasedSchema(hiveConf,
hiveTable.getSd().getSerdeInfo().getSerializationLib())) {
// get schema from metastore
fields = hiveTable.getSd().getCols();
} else {
// get schema from deserializer
try {
fields = MetaStoreUtils.getFieldsFromDeserializer(hiveTable.getTableName(),
MetaStoreUtils.getDeserializer(hiveConf, hiveTable, true));
} catch (SerDeException | MetaException e) {
throw new CatalogException("Failed to get Hive table schema from deserializer", e);
}
}
TableSchema tableSchema =
HiveTableUtil.createTableSchema(hiveTable.getSd().getCols(), hiveTable.getPartitionKeys());
HiveTableUtil.createTableSchema(fields, hiveTable.getPartitionKeys());

// Partition keys
List<String> partitionKeys = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,14 @@ private void readWriteFormat(String format) throws Exception {
hiveShell.execute("create database db1");

// create source and dest tables
hiveShell.execute("create table db1.src (i int,s string) stored as " + format);
hiveShell.execute("create table db1.dest (i int,s string) stored as " + format);
String suffix;
if (format.equals("csv")) {
suffix = "row format serde 'org.apache.hadoop.hive.serde2.OpenCSVSerde'";
} else {
suffix = "stored as " + format;
}
hiveShell.execute("create table db1.src (i int,s string) " + suffix);
hiveShell.execute("create table db1.dest (i int,s string) " + suffix);

// prepare source data with Hive
hiveShell.execute("insert into db1.src values (1,'a'),(2,'b')");
Expand Down

0 comments on commit 44d0b0c

Please sign in to comment.