Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HIVE-24954: MetastoreTransformer is disabled during testing #2139

Merged
merged 15 commits into from
Apr 21, 2021
4 changes: 4 additions & 0 deletions data/conf/hivemetastore-site.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,9 @@
<description>Using property defined in HiveConf.ConfVars to test System property overriding</description>
</property>

<property>
<name>metastore.metadata.transformer.class</name>
<value> </value>
</property>

</configuration>
6 changes: 6 additions & 0 deletions data/conf/llap/hivemetastore-site.xml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,10 @@
-->

<configuration>

<property>
<name>metastore.metadata.transformer.class</name>
<value> </value>
</property>

</configuration>
4 changes: 4 additions & 0 deletions ql/src/test/queries/clientpositive/translated_external_qopt.q
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
set metastore.metadata.transformer.class=org.apache.hadoop.hive.metastore.MetastoreDefaultTransformer;

create table t (a integer);
desc formatted t;
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
PREHOOK: query: create table t (a integer)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@t
POSTHOOK: query: create table t (a integer)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@t
PREHOOK: query: desc formatted t
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@t
POSTHOOK: query: desc formatted t
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@t
# col_name data_type comment
a int

# Detailed Table Information
Database: default
#### A masked pattern was here ####
Retention: 0
#### A masked pattern was here ####
Table Type: EXTERNAL_TABLE
Table Parameters:
COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\"}}
EXTERNAL TRUE
TRANSLATED_TO_EXTERNAL TRUE
bucketing_version 2
external.table.purge TRUE
numFiles 0
numRows 0
rawDataSize 0
totalSize 0
#### A masked pattern was here ####

# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1
Original file line number Diff line number Diff line change
Expand Up @@ -1583,7 +1583,7 @@ public Database get_database_req(GetDatabaseRequest request) throws NoSuchObject
try {
db = getMS().getDatabase(request.getCatalogName(), request.getName());
firePreEvent(new PreReadDatabaseEvent(db, this));
if (transformer != null && !isInTest) {
if (transformer != null) {
db = transformer.transformDatabase(db, processorCapabilities, processorId);
}
} catch (MetaException | NoSuchObjectException e) {
Expand Down Expand Up @@ -2357,7 +2357,7 @@ private void create_table_core(final RawStore ms, final CreateTableRequest req)
return;
}

if (transformer != null && !isInTest) {
if (transformer != null) {
tbl = transformer.transformCreateTable(tbl, processorCapabilities, processorId);
}
if (tbl.getParameters() != null) {
Expand Down Expand Up @@ -6020,7 +6020,7 @@ private void alter_table_core(String catName, String dbname, String name, Table
Exception ex = null;
try {
Table oldt = get_table_core(catName, dbname, name, null);
if (transformer != null && !isInTest) {
if (transformer != null) {
newTable = transformer.transformAlterTable(newTable, processorCapabilities, processorId);
}
firePreEvent(new PreAlterTableEvent(oldt, newTable, this));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ public abstract class NonCatCallsWithCatalog {
public void setUp() throws Exception {
conf = MetastoreConf.newMetastoreConf();
MetastoreConf.setBoolVar(this.conf, ConfVars.HIVE_IN_TEST, true);
MetastoreConf.setVar(conf, ConfVars.METASTORE_METADATA_TRANSFORMER_CLASS, " ");
MetaStoreTestUtils.setConfForStandloneMode(conf);

// Get new client
Expand Down Expand Up @@ -234,11 +235,15 @@ public void databases() throws TException, URISyntaxException {
Assert.assertEquals(expectedCatalog(), fetched.getCatalogName());

Set<String> fetchedDbs = new HashSet<>(client.getAllDatabases());
for (String dbName : dbNames) Assert.assertTrue(fetchedDbs.contains(dbName));
for (String dbName : dbNames) {
Assert.assertTrue(fetchedDbs.contains(dbName));
}

fetchedDbs = new HashSet<>(client.getDatabases("db*"));
Assert.assertEquals(2, fetchedDbs.size());
for (String dbName : dbNames) Assert.assertTrue(fetchedDbs.contains(dbName));
for (String dbName : dbNames) {
Assert.assertTrue(fetchedDbs.contains(dbName));
}

client.dropDatabase(dbNames[0], true, false, false);
dir = new File(db0Location);
Expand All @@ -249,7 +254,9 @@ public void databases() throws TException, URISyntaxException {
Assert.assertFalse(dir.exists());

fetchedDbs = new HashSet<>(client.getAllDatabases());
for (String dbName : dbNames) Assert.assertFalse(fetchedDbs.contains(dbName));
for (String dbName : dbNames) {
Assert.assertFalse(fetchedDbs.contains(dbName));
}
}

@Test
Expand All @@ -271,9 +278,13 @@ public void tablesCreateDropAlterTruncate() throws TException, URISyntaxExceptio
.addCol("col1_" + i, ColumnType.STRING_TYPE_NAME)
.addCol("col2_" + i, ColumnType.INT_TYPE_NAME);
// Make one have a non-standard location
if (i == 0) builder.setLocation(MetaStoreTestUtils.getTestWarehouseDir(tableNames[i]));
if (i == 0) {
builder.setLocation(MetaStoreTestUtils.getTestWarehouseDir(tableNames[i]));
}
// Make one partitioned
if (i == 2) builder.addPartCol("pcol1", ColumnType.STRING_TYPE_NAME);
if (i == 2) {
builder.addPartCol("pcol1", ColumnType.STRING_TYPE_NAME);
}
// Make one a materialized view
/*
// TODO HIVE-18991
Expand Down Expand Up @@ -328,10 +339,14 @@ public void tablesCreateDropAlterTruncate() throws TException, URISyntaxExceptio
// test getAllTables
Set<String> fetchedNames = new HashSet<>(client.getAllTables(dbName));
Assert.assertEquals(tableNames.length, fetchedNames.size());
for (String tableName : tableNames) Assert.assertTrue(fetchedNames.contains(tableName));
for (String tableName : tableNames) {
Assert.assertTrue(fetchedNames.contains(tableName));
}

fetchedNames = new HashSet<>(client.getAllTables(DEFAULT_DATABASE_NAME));
for (String tableName : tableNames) Assert.assertFalse(fetchedNames.contains(tableName));
for (String tableName : tableNames) {
Assert.assertFalse(fetchedNames.contains(tableName));
}

// test getMaterializedViewsForRewriting
/* TODO HIVE-18991
Expand Down Expand Up @@ -378,7 +393,9 @@ public void tablesCreateDropAlterTruncate() throws TException, URISyntaxExceptio
*/

List<String> partNames = new ArrayList<>();
for (String partVal : partVals) partNames.add("pcol1=" + partVal);
for (String partVal : partVals) {
partNames.add("pcol1=" + partVal);
}
// Truncate a table
client.truncateTable(dbName, tableNames[0], partNames);

Expand Down Expand Up @@ -424,7 +441,9 @@ public void tablesGetExists() throws TException {

Set<String> tables = new HashSet<>(client.getTables(dbName, "*e_in_other_*"));
Assert.assertEquals(4, tables.size());
for (String tableName : tableNames) Assert.assertTrue(tables.contains(tableName));
for (String tableName : tableNames) {
Assert.assertTrue(tables.contains(tableName));
}

List<String> fetchedNames = client.getTables(dbName, "*_3");
Assert.assertEquals(1, fetchedNames.size());
Expand Down Expand Up @@ -452,7 +471,9 @@ public void tablesList() throws TException {
.setTableName(tableNames[i])
.addCol("col1_" + i, ColumnType.STRING_TYPE_NAME)
.addCol("col2_" + i, ColumnType.INT_TYPE_NAME);
if (i == 0) builder.addTableParam("the_key", "the_value");
if (i == 0) {
builder.addTableParam("the_key", "the_value");
}
Table table = builder.build(conf);
table.unsetCatName();
client.createTable(table);
Expand Down Expand Up @@ -586,7 +607,9 @@ public void getPartitions() throws TException {
Arrays.asList("partcol=a0", "partcol=a1"));
Assert.assertEquals(2, fetchedParts.size());
Set<String> vals = new HashSet<>(fetchedParts.size());
for (Partition part : fetchedParts) vals.add(part.getValues().get(0));
for (Partition part : fetchedParts) {
vals.add(part.getValues().get(0));
}
Assert.assertTrue(vals.contains("a0"));
Assert.assertTrue(vals.contains("a1"));

Expand Down Expand Up @@ -961,47 +984,61 @@ public void createTableWithConstraints() throws TException {
.onTable(parentTable)
.addColumn("test_col1")
.build(conf);
for (SQLPrimaryKey pkcol : parentPk) pkcol.unsetCatName();
for (SQLPrimaryKey pkcol : parentPk) {
pkcol.unsetCatName();
}
client.addPrimaryKey(parentPk);

List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder()
.onTable(table)
.addColumn("col2")
.build(conf);
for (SQLPrimaryKey pkcol : pk) pkcol.unsetCatName();
for (SQLPrimaryKey pkcol : pk) {
pkcol.unsetCatName();
}

List<SQLForeignKey> fk = new SQLForeignKeyBuilder()
.fromPrimaryKey(parentPk)
.onTable(table)
.addColumn("col1")
.build(conf);
for (SQLForeignKey fkcol : fk) fkcol.unsetCatName();
for (SQLForeignKey fkcol : fk) {
fkcol.unsetCatName();
}

List<SQLDefaultConstraint> dv = new SQLDefaultConstraintBuilder()
.onTable(table)
.addColumn("col3")
.setDefaultVal(0)
.build(conf);
for (SQLDefaultConstraint dccol : dv) dccol.unsetCatName();
for (SQLDefaultConstraint dccol : dv) {
dccol.unsetCatName();
}

List<SQLNotNullConstraint> nn = new SQLNotNullConstraintBuilder()
.onTable(table)
.addColumn("col4")
.build(conf);
for (SQLNotNullConstraint nncol : nn) nncol.unsetCatName();
for (SQLNotNullConstraint nncol : nn) {
nncol.unsetCatName();
}

List<SQLUniqueConstraint> uc = new SQLUniqueConstraintBuilder()
.onTable(table)
.addColumn("col5")
.build(conf);
for (SQLUniqueConstraint uccol : uc) uccol.unsetCatName();
for (SQLUniqueConstraint uccol : uc) {
uccol.unsetCatName();
}

List<SQLCheckConstraint> cc = new SQLCheckConstraintBuilder()
.onTable(table)
.addColumn("col6")
.setCheckExpression("> 0")
.build(conf);
for (SQLCheckConstraint cccol : cc) cccol.unsetCatName();
for (SQLCheckConstraint cccol : cc) {
cccol.unsetCatName();
}

client.createTableWithConstraints(table, pk, fk, uc, nn, dv, cc);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ public void setUp() throws Exception {
conf.set("hive.key4", "0");
conf.set("datanucleus.autoCreateTables", "false");
conf.set("hive.in.test", "true");
MetastoreConf.setVar(conf, ConfVars.METASTORE_METADATA_TRANSFORMER_CLASS, " ");

MetaStoreTestUtils.setConfForStandloneMode(conf);
MetastoreConf.setLongVar(conf, ConfVars.BATCH_RETRIEVE_MAX, 2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
@Category(MetastoreUnitTest.class)
public class TestHiveMetaStoreTxns {

private static Configuration conf = MetastoreConf.newMetastoreConf();
private static Configuration conf;
private static IMetaStoreClient client;
private Connection conn;

Expand Down Expand Up @@ -314,9 +314,13 @@ public void stringifyValidTxns() throws Exception {
Assert.assertEquals(2, validTxns.getInvalidTransactions().length);
boolean sawThree = false, sawFive = false;
for (long tid : validTxns.getInvalidTransactions()) {
if (tid == 3) sawThree = true;
else if (tid == 5) sawFive = true;
else Assert.fail("Unexpected value " + tid);
if (tid == 3) {
sawThree = true;
} else if (tid == 5) {
sawFive = true;
} else {
Assert.fail("Unexpected value " + tid);
}
}
Assert.assertTrue(sawThree);
Assert.assertTrue(sawFive);
Expand Down Expand Up @@ -394,6 +398,8 @@ public void testGetValidWriteIds() throws TException {

@BeforeClass
public static void setUpDB() throws Exception {
conf = MetastoreConf.newMetastoreConf();
MetastoreConf.setVar(conf, ConfVars.METASTORE_METADATA_TRANSFORMER_CLASS, " ");
conf.setBoolean(ConfVars.HIVE_IN_TEST.getVarname(), true);
MetaStoreTestUtils.setConfForStandloneMode(conf);
TestTxnDbUtil.setConfValues(conf);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ public void testListener() throws Exception {
.addCol("a", "string")
.addPartCol("b", "string")
.create(msc, conf);
PreCreateTableEvent preTblEvent = (PreCreateTableEvent)(preNotifyList.get(preNotifyList.size() - 1));
PreCreateTableEvent preTblEvent = (PreCreateTableEvent) (preNotifyList.get(preNotifyList.size() - 1));
listSize++;
Table tbl = msc.getTable(dbName, tblName);
validateCreateTable(tbl, preTblEvent.getTable());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ public void setUp() throws Exception {
conf = MetastoreConf.newMetastoreConf();
conf.setClass(MetastoreConf.ConfVars.EXPRESSION_PROXY_CLASS.getVarname(),
MsckPartitionExpressionProxy.class, PartitionExpressionProxy.class);
MetastoreConf.setVar(conf, ConfVars.METASTORE_METADATA_TRANSFORMER_CLASS, " ");
MetaStoreTestUtils.setConfForStandloneMode(conf);
conf.setBoolean(ConfVars.MULTITHREADED.getVarname(), false);
conf.setBoolean(ConfVars.HIVE_IN_TEST.getVarname(), true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
Expand Down