Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HIVE-17887: Incremental REPL LOAD with Drop partition event on timestamp type partition column fails. #267

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -288,8 +288,8 @@ public void testBasic() throws IOException {
verifyRun("SELECT * from " + replicatedDbName + ".unptned", unptn_data, driverMirror);
verifyRun("SELECT a from " + replicatedDbName + ".ptned WHERE b=1", ptn_data_1, driverMirror);
verifyRun("SELECT a from " + replicatedDbName + ".ptned WHERE b=2", ptn_data_2, driverMirror);
verifyRun("SELECT a from " + dbName + ".ptned_empty", empty, driverMirror);
verifyRun("SELECT * from " + dbName + ".unptned_empty", empty, driverMirror);
verifyRun("SELECT a from " + replicatedDbName + ".ptned_empty", empty, driverMirror);
verifyRun("SELECT * from " + replicatedDbName + ".unptned_empty", empty, driverMirror);
}

@Test
Expand Down Expand Up @@ -1889,6 +1889,31 @@ public void testInsertOverwriteOnPartitionedTableWithCM() throws IOException {
verifyRun("SELECT a from " + dbName + "_dupe.ptned where (b=2) ORDER BY a", data_after_ovwrite, driverMirror);
}

@Test
public void testDropPartitionEventWithPartitionOnTimestampColumn() throws IOException {
String testName = "dropPartitionEventWithPartitionOnTimestampColumn";
String dbName = createDB(testName, driver);
run("CREATE TABLE " + dbName + ".ptned(a string) PARTITIONED BY (b timestamp)", driver);

// Bootstrap dump/load
String replDbName = dbName + "_dupe";
Tuple bootstrapDump = bootstrapLoadAndVerify(dbName, replDbName);

String[] ptn_data = new String[] { "fifteen" };
String ptnVal = "2017-10-24 00:00:00.0";
run("INSERT INTO TABLE " + dbName + ".ptned PARTITION(b=\"" + ptnVal +"\") values('" + ptn_data[0] + "')", driver);

// Replicate insert event and verify
Tuple incrDump = incrementalLoadAndVerify(dbName, bootstrapDump.lastReplId, replDbName);
verifyRun("SELECT a from " + replDbName + ".ptned where (b=\"" + ptnVal + "\") ORDER BY a", ptn_data, driverMirror);

run("ALTER TABLE " + dbName + ".ptned DROP PARTITION(b=\"" + ptnVal + "\")", driver);

// Replicate drop partition event and verify
incrementalLoadAndVerify(dbName, incrDump.lastReplId, replDbName);
verifyIfPartitionNotExist(replDbName, "ptned", new ArrayList<>(Arrays.asList(ptnVal)), metaStoreClientMirror);
}

@Test
public void testRenameTableWithCM() throws IOException {
String testName = "renameTableWithCM";
Expand Down
5 changes: 3 additions & 2 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
Expand Up @@ -4429,9 +4429,10 @@ private void dropPartitions(Hive db, Table tbl, DropTableDesc dropTbl) throws Hi
// to the metastore to allow it to do drop a partition or not, depending on a Predicate on the
// parameter key values.
for (DropTableDesc.PartSpec partSpec : dropTbl.getPartSpecs()){
List<Partition> partitions = new ArrayList<>();
try {
for (Partition p : Iterables.filter(
db.getPartitionsByFilter(tbl, partSpec.getPartSpec().getExprString()),
db.getPartitionsByExpr(tbl, partSpec.getPartSpec(), conf, partitions);
for (Partition p : Iterables.filter(partitions,
replicationSpec.allowEventReplacementInto())){
db.dropPartition(tbl.getDbName(),tbl.getTableName(),p.getValues(),true);
}
Expand Down
Expand Up @@ -92,7 +92,7 @@ private Map<Integer, List<ExprNodeGenericFuncDesc>> genPartSpecs(Table table,
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
ExprNodeGenericFuncDesc op = DDLSemanticAnalyzer.makeBinaryPredicate(
"=", column, new ExprNodeConstantDesc(pti, val));
"=", column, new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, val));
expr = (expr == null) ? op : DDLSemanticAnalyzer.makeBinaryPredicate("and", expr, op);
}
if (expr != null) {
Expand Down