diff --git a/hudi-spark-datasource/hudi-spark2/src/main/scala/org/apache/spark/sql/adapter/Spark2Adapter.scala b/hudi-spark-datasource/hudi-spark2/src/main/scala/org/apache/spark/sql/adapter/Spark2Adapter.scala index f89e188bef45a..1a27fd85d881b 100644 --- a/hudi-spark-datasource/hudi-spark2/src/main/scala/org/apache/spark/sql/adapter/Spark2Adapter.scala +++ b/hudi-spark-datasource/hudi-spark2/src/main/scala/org/apache/spark/sql/adapter/Spark2Adapter.scala @@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression import org.apache.spark.sql.catalyst.parser.ParserInterface import org.apache.spark.sql.catalyst.plans.JoinType import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoTable, Join, LogicalPlan} +import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoTable, DeleteFromTable, Join, LogicalPlan} import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier} import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat, Spark24HoodieParquetFileFormat} @@ -177,7 +177,7 @@ class Spark2Adapter extends SparkAdapter { } override def getDeleteFromTable(table: LogicalPlan, condition: Option[Expression]): LogicalPlan = { - throw new UnsupportedOperationException(s"DeleteFromTable LogicalPlan is not supported on Spark 2.x!") + DeleteFromTable(table, condition) } override def getQueryParserFromExtendedSqlParser(session: SparkSession, delegate: ParserInterface,