From 74b3a61164d9d560ce5bb97fb379c3d67eb3c06b Mon Sep 17 00:00:00 2001 From: Sivabalan Narayanan Date: Thu, 16 Dec 2021 09:19:02 -0800 Subject: [PATCH] Addressing comments --- .../org/apache/hudi/functional/TestCOWDataSource.scala | 4 ++-- .../apache/hudi/functional/TestCOWDataSourceStorage.scala | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala index 60a946fa34533..663493438a9ef 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala @@ -19,7 +19,7 @@ package org.apache.hudi.functional import org.apache.hadoop.fs.FileSystem import org.apache.hudi.common.config.HoodieMetadataConfig -import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline} +import org.apache.hudi.common.table.timeline.HoodieInstant import org.apache.hudi.common.table.{HoodieTableMetaClient, TableSchemaResolver} import org.apache.hudi.common.testutils.HoodieTestDataGenerator import org.apache.hudi.common.testutils.RawTripTestPayload.{deleteRecordsToStrings, recordsToStrings} @@ -34,7 +34,7 @@ import org.apache.spark.sql.functions.{col, concat, lit, udf} import org.apache.spark.sql.types._ import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat -import org.junit.jupiter.api.Assertions.{assertEquals, assertFalse, assertTrue, fail} +import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue, fail} import org.junit.jupiter.api.{AfterEach, BeforeEach, Test} import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.{CsvSource, ValueSource} diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSourceStorage.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSourceStorage.scala index 1ee492808e03b..af2bc69804742 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSourceStorage.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSourceStorage.scala @@ -195,6 +195,7 @@ class TestCOWDataSourceStorage extends SparkClientFunctionalTestHarness { .option("hoodie.keep.min.commits", "2") .option("hoodie.keep.max.commits", "3") .option("hoodie.cleaner.commits.retained", "1") + .option("hoodie.metadata.enable","false") .option(DataSourceWriteOptions.OPERATION.key, DataSourceWriteOptions.BULK_INSERT_OPERATION_OPT_VAL) .mode(SaveMode.Overwrite) .save(basePath) @@ -224,6 +225,10 @@ class TestCOWDataSourceStorage extends SparkClientFunctionalTestHarness { .map(instant => instant.asInstanceOf[HoodieInstant].getAction) // assert replace commit is archived and not part of active timeline. assertFalse(commits.contains(HoodieTimeline.REPLACE_COMMIT_ACTION)) + // assert that archival timeline has replace commit actions. + val archivedTimeline = metaClient.getArchivedTimeline(); + assertTrue(archivedTimeline.getInstants.toArray.map(instant => instant.asInstanceOf[HoodieInstant].getAction) + .filter(action => action.equals(HoodieTimeline.REPLACE_COMMIT_ACTION)).size > 0) } def writeRecords(commitTime: Int, dataGen: HoodieTestDataGenerator, writeOperation: String, basePath: String): Unit = { @@ -234,6 +239,7 @@ class TestCOWDataSourceStorage extends SparkClientFunctionalTestHarness { .option("hoodie.keep.min.commits", "2") .option("hoodie.keep.max.commits", "3") .option("hoodie.cleaner.commits.retained", "1") + .option("hoodie.metadata.enable","false") .option(DataSourceWriteOptions.OPERATION.key, writeOperation) .mode(SaveMode.Append) .save(basePath)