From 9f284a0125e8aedc68b19c07ce57789db33ff584 Mon Sep 17 00:00:00 2001 From: Sivabalan Narayanan Date: Tue, 2 Aug 2022 18:20:38 -0400 Subject: [PATCH] [HUDI-4501] Throwing exception when restore is attempted with hoodie.arhive.beyond.savepoint is enabled (#6239) --- .../hudi/client/BaseHoodieWriteClient.java | 3 + .../TestHoodieClientOnCopyOnWriteStorage.java | 57 +++++++++++++++++++ 2 files changed, 60 insertions(+) diff --git a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java index ab14f2216a5a..4cf58f1c3009 100644 --- a/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java +++ b/hudi-client/hudi-client-common/src/main/java/org/apache/hudi/client/BaseHoodieWriteClient.java @@ -61,6 +61,7 @@ import org.apache.hudi.common.util.Option; import org.apache.hudi.common.util.ValidationUtils; import org.apache.hudi.common.util.collection.Pair; +import org.apache.hudi.config.HoodieArchivalConfig; import org.apache.hudi.config.HoodieClusteringConfig; import org.apache.hudi.config.HoodieCompactionConfig; import org.apache.hudi.config.HoodieWriteConfig; @@ -721,6 +722,8 @@ public void restoreToSavepoint(String savepointTime) { HoodieTable table = initTable(WriteOperationType.UNKNOWN, Option.empty(), initialMetadataTableIfNecessary); SavepointHelpers.validateSavepointPresence(table, savepointTime); + ValidationUtils.checkArgument(!config.shouldArchiveBeyondSavepoint(), "Restore is not supported when " + HoodieArchivalConfig.ARCHIVE_BEYOND_SAVEPOINT.key() + + " is enabled"); restoreToInstant(savepointTime, initialMetadataTableIfNecessary); SavepointHelpers.validateSavepointRestore(table, savepointTime); } diff --git a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java index 8ba459b77226..28108b793aca 100644 --- a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java +++ b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java @@ -71,6 +71,7 @@ import org.apache.hudi.common.util.Option; import org.apache.hudi.common.util.StringUtils; import org.apache.hudi.common.util.collection.Pair; +import org.apache.hudi.config.HoodieArchivalConfig; import org.apache.hudi.config.HoodieCompactionConfig; import org.apache.hudi.config.HoodieIndexConfig; import org.apache.hudi.config.HoodieClusteringConfig; @@ -675,6 +676,62 @@ private void testUpsertsInternal(HoodieWriteConfig config, }).collect(); } + @Test + public void testRestoreWithSavepointBeyondArchival() throws Exception { + HoodieWriteConfig config = getConfigBuilder().withRollbackUsingMarkers(true).build(); + HoodieWriteConfig hoodieWriteConfig = getConfigBuilder(EAGER) + .withRollbackUsingMarkers(true) + .withArchivalConfig(HoodieArchivalConfig.newBuilder().withArchiveBeyondSavepoint(true).build()) + .withProps(config.getProps()).withTimelineLayoutVersion( + VERSION_0).build(); + + HoodieTableMetaClient.withPropertyBuilder() + .fromMetaClient(metaClient) + .setTimelineLayoutVersion(VERSION_0) + .setPopulateMetaFields(config.populateMetaFields()) + .initTable(metaClient.getHadoopConf(), metaClient.getBasePath()); + + SparkRDDWriteClient client = getHoodieWriteClient(hoodieWriteConfig); + + // Write 1 (only inserts) + String newCommitTime = "001"; + String initCommitTime = "000"; + int numRecords = 200; + insertFirstBatch(hoodieWriteConfig, client, newCommitTime, initCommitTime, numRecords, SparkRDDWriteClient::insert, + false, true, numRecords, config.populateMetaFields()); + + // Write 2 (updates) + String prevCommitTime = newCommitTime; + newCommitTime = "004"; + numRecords = 100; + String commitTimeBetweenPrevAndNew = "002"; + updateBatch(hoodieWriteConfig, client, newCommitTime, prevCommitTime, + Option.of(Arrays.asList(commitTimeBetweenPrevAndNew)), initCommitTime, numRecords, SparkRDDWriteClient::upsert, false, true, + numRecords, 200, 2, config.populateMetaFields()); + + // Delete 1 + prevCommitTime = newCommitTime; + newCommitTime = "005"; + numRecords = 50; + + deleteBatch(hoodieWriteConfig, client, newCommitTime, prevCommitTime, + initCommitTime, numRecords, SparkRDDWriteClient::delete, false, true, + 0, 150, config.populateMetaFields()); + + HoodieWriteConfig newConfig = getConfigBuilder().withProps(config.getProps()).withTimelineLayoutVersion( + TimelineLayoutVersion.CURR_VERSION) + .withArchivalConfig(HoodieArchivalConfig.newBuilder().withArchiveBeyondSavepoint(true).build()).build(); + client = getHoodieWriteClient(newConfig); + + client.savepoint("004", "user1", "comment1"); + + // verify that restore fails when "hoodie.archive.beyond.savepoint" is enabled. + SparkRDDWriteClient finalClient = client; + assertThrows(IllegalArgumentException.class, () -> { + finalClient.restoreToSavepoint("004"); + }, "Restore should not be supported when " + HoodieArchivalConfig.ARCHIVE_BEYOND_SAVEPOINT.key() + " is enabled"); + } + /** * Test Insert API for HoodieConcatHandle. */