diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala index 8a9486ea45dd..732367cf5a5e 100644 --- a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala +++ b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.hudi import org.apache.hudi.DataSourceWriteOptions import org.apache.hudi.avro.HoodieAvroUtils.getRootLevelFieldName -import org.apache.hudi.common.model.DefaultHoodieRecordPayload import org.apache.hudi.common.table.HoodieTableConfig import org.apache.hudi.common.util.ValidationUtils import org.apache.spark.sql.SparkSession @@ -67,7 +66,7 @@ object HoodieOptionConfig { .withSqlKey("payloadClass") .withHoodieKey(DataSourceWriteOptions.PAYLOAD_CLASS_NAME.key) .withTableConfigKey(HoodieTableConfig.PAYLOAD_CLASS_NAME.key) - .defaultValue(classOf[DefaultHoodieRecordPayload].getName) + .defaultValue(DataSourceWriteOptions.PAYLOAD_CLASS_NAME.defaultValue()) .build() /** diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala index 14c2245d5be3..44c23d146c68 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestHoodieOptionConfig.scala @@ -34,12 +34,12 @@ class TestHoodieOptionConfig extends SparkClientFunctionalTestHarness { assertTrue(with1.size == 3) assertTrue(with1("primaryKey") == "id") assertTrue(with1("type") == "cow") - assertTrue(with1("payloadClass") == classOf[DefaultHoodieRecordPayload].getName) + assertTrue(with1("payloadClass") == classOf[OverwriteWithLatestAvroPayload].getName) val ops2 = Map("primaryKey" -> "id", "preCombineField" -> "timestamp", "type" -> "mor", - "payloadClass" -> classOf[OverwriteWithLatestAvroPayload].getName + "payloadClass" -> classOf[DefaultHoodieRecordPayload].getName ) val with2 = HoodieOptionConfig.withDefaultSqlOptions(ops2) assertTrue(ops2 == with2) diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala index 58c808d28a70..5e826973d24e 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala @@ -372,7 +372,8 @@ class TestMergeIntoTable extends HoodieSparkSqlTestBase { | tblproperties ( | type = '$tableType', | primaryKey = 'id', - | preCombineField = 'v' + | preCombineField = 'v', + | hoodie.compaction.payload.class = 'org.apache.hudi.common.model.DefaultHoodieRecordPayload' | ) | partitioned by(dt) | location '${tmp.getCanonicalPath}/$tableName1'