Skip to content

Commit

Permalink
[SPARK-36902][SQL] Migrate CreateTableAsSelectStatement to v2 command
Browse files Browse the repository at this point in the history
  • Loading branch information
dchvn committed Nov 30, 2021
1 parent e3256b8 commit b5c2787
Show file tree
Hide file tree
Showing 14 changed files with 160 additions and 192 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,18 +37,6 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
case UnresolvedDBObjectName(CatalogAndIdentifier(catalog, identifier), _) =>
ResolvedDBObjectName(catalog, identifier.namespace :+ identifier.name())

case c @ CreateTableAsSelectStatement(
NonSessionCatalogAndTable(catalog, tbl), _, _, _, _, _, _, _, _, _, _, _, _) =>
CreateTableAsSelect(
catalog.asTableCatalog,
tbl.asIdentifier,
// convert the bucket spec and add it as a transform
c.partitioning ++ c.bucketSpec.map(_.asTransform),
c.asSelect,
convertTableProperties(c),
writeOptions = c.writeOptions,
ignoreIfExists = c.ifNotExists)

case c @ ReplaceTableStatement(
NonSessionCatalogAndTable(catalog, tbl), _, _, _, _, _, _, _, _, _, _) =>
ReplaceTable(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3410,7 +3410,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
}

/**
* Create a table, returning a [[CreateTable]] or [[CreateTableAsSelectStatement]] logical plan.
* Create a table, returning a [[CreateTable]] or [[CreateTableAsSelect]] logical plan.
*
* Expected format:
* {{{
Expand Down Expand Up @@ -3470,9 +3470,11 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
ctx)

case Some(query) =>
CreateTableAsSelectStatement(
table, query, partitioning, bucketSpec, properties, provider, options, location, comment,
writeOptions = Map.empty, serdeInfo, external = external, ifNotExists = ifNotExists)
val tableSpec = TableSpec(bucketSpec, properties, provider, options, location, comment,
serdeInfo, external)
CreateTableAsSelect(
UnresolvedDBObjectName(table, isNamespace = false),
partitioning, query, tableSpec, Map.empty, ifNotExists)

case _ =>
// Note: table schema includes both the table columns list and the partition columns
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,29 +123,6 @@ object SerdeInfo {
}
}

/**
* A CREATE TABLE AS SELECT command, as parsed from SQL.
*/
case class CreateTableAsSelectStatement(
tableName: Seq[String],
asSelect: LogicalPlan,
partitioning: Seq[Transform],
bucketSpec: Option[BucketSpec],
properties: Map[String, String],
provider: Option[String],
options: Map[String, String],
location: Option[String],
comment: Option[String],
writeOptions: Map[String, String],
serde: Option[SerdeInfo],
external: Boolean,
ifNotExists: Boolean) extends UnaryParsedStatement {

override def child: LogicalPlan = asSelect
override protected def withNewChildInternal(newChild: LogicalPlan): CreateTableAsSelectStatement =
copy(asSelect = newChild)
}

/**
* A REPLACE TABLE command, as parsed from SQL.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,16 +221,22 @@ case class CreateTable(
* Create a new table from a select query with a v2 catalog.
*/
case class CreateTableAsSelect(
catalog: TableCatalog,
tableName: Identifier,
name: LogicalPlan,
partitioning: Seq[Transform],
query: LogicalPlan,
properties: Map[String, String],
tableSpec: TableSpec,
writeOptions: Map[String, String],
ignoreIfExists: Boolean) extends UnaryCommand with V2CreateTablePlan {
ignoreIfExists: Boolean) extends BinaryCommand with V2CreateTablePlan {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.MultipartIdentifierHelper

override def tableSchema: StructType = query.schema
override def child: LogicalPlan = query
override def left: LogicalPlan = name
override def right: LogicalPlan = query

override def tableName: Identifier = {
assert(left.resolved)
left.asInstanceOf[ResolvedDBObjectName].nameParts.asIdentifier
}

override lazy val resolved: Boolean = childrenResolved && {
// the table schema is created from the query schema, so the only resolution needed is to check
Expand All @@ -243,8 +249,11 @@ case class CreateTableAsSelect(
this.copy(partitioning = rewritten)
}

override protected def withNewChildInternal(newChild: LogicalPlan): CreateTableAsSelect =
copy(query = newChild)
override protected def withNewChildrenInternal(
newLeft: LogicalPlan,
newRight: LogicalPlan
): CreateTableAsSelect =
copy(name = newLeft, query = newRight)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.util.Collections
import scala.collection.JavaConverters._

import org.apache.spark.sql.catalyst.analysis.{AsOfTimestamp, AsOfVersion, NamedRelation, NoSuchDatabaseException, NoSuchNamespaceException, NoSuchTableException, TimeTravelSpec}
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelectStatement, ReplaceTableAsSelectStatement, ReplaceTableStatement, SerdeInfo}
import org.apache.spark.sql.catalyst.plans.logical.{ReplaceTableAsSelectStatement, ReplaceTableStatement, SerdeInfo}
import org.apache.spark.sql.connector.catalog.TableChange._
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.types.{ArrayType, MapType, StructField, StructType}
Expand Down Expand Up @@ -305,11 +305,6 @@ private[sql] object CatalogV2Util {
catalog.name().equalsIgnoreCase(CatalogManager.SESSION_CATALOG_NAME)
}

def convertTableProperties(c: CreateTableAsSelectStatement): Map[String, String] = {
convertTableProperties(
c.properties, c.options, c.serde, c.location, c.comment, c.provider, c.external)
}

def convertTableProperties(r: ReplaceTableStatement): Map[String, String] = {
convertTableProperties(r.properties, r.options, r.serde, r.location, r.comment, r.provider)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,22 @@ package org.apache.spark.sql.catalyst.analysis
import java.util

import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, LeafNode}
import org.apache.spark.sql.connector.catalog.{Identifier, InMemoryTableCatalog, Table, TableCapability, TableCatalog}
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, LeafNode, TableSpec}
import org.apache.spark.sql.connector.catalog.{InMemoryTableCatalog, Table, TableCapability, TableCatalog}
import org.apache.spark.sql.connector.expressions.Expressions
import org.apache.spark.sql.types.{DoubleType, LongType, StringType, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap

class CreateTablePartitioningValidationSuite extends AnalysisTest {
import CreateTablePartitioningValidationSuite._

test("CreateTableAsSelect: fail missing top-level column") {
val tableSpec = TableSpec(None, Map.empty, None, Map.empty,
None, None, None, false)
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
UnresolvedDBObjectName(Array("table_name"), isNamespace = false),
Expressions.bucket(4, "does_not_exist") :: Nil,
TestRelation2,
Map.empty,
tableSpec,
Map.empty,
ignoreIfExists = false)

Expand All @@ -46,12 +46,13 @@ class CreateTablePartitioningValidationSuite extends AnalysisTest {
}

test("CreateTableAsSelect: fail missing top-level column nested reference") {
val tableSpec = TableSpec(None, Map.empty, None, Map.empty,
None, None, None, false)
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
UnresolvedDBObjectName(Array("table_name"), isNamespace = false),
Expressions.bucket(4, "does_not_exist.z") :: Nil,
TestRelation2,
Map.empty,
tableSpec,
Map.empty,
ignoreIfExists = false)

Expand All @@ -62,12 +63,13 @@ class CreateTablePartitioningValidationSuite extends AnalysisTest {
}

test("CreateTableAsSelect: fail missing nested column") {
val tableSpec = TableSpec(None, Map.empty, None, Map.empty,
None, None, None, false)
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
UnresolvedDBObjectName(Array("table_name"), isNamespace = false),
Expressions.bucket(4, "point.z") :: Nil,
TestRelation2,
Map.empty,
tableSpec,
Map.empty,
ignoreIfExists = false)

Expand All @@ -78,12 +80,13 @@ class CreateTablePartitioningValidationSuite extends AnalysisTest {
}

test("CreateTableAsSelect: fail with multiple errors") {
val tableSpec = TableSpec(None, Map.empty, None, Map.empty,
None, None, None, false)
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
UnresolvedDBObjectName(Array("table_name"), isNamespace = false),
Expressions.bucket(4, "does_not_exist", "point.z") :: Nil,
TestRelation2,
Map.empty,
tableSpec,
Map.empty,
ignoreIfExists = false)

Expand All @@ -95,38 +98,41 @@ class CreateTablePartitioningValidationSuite extends AnalysisTest {
}

test("CreateTableAsSelect: success with top-level column") {
val tableSpec = TableSpec(None, Map.empty, None, Map.empty,
None, None, None, false)
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
UnresolvedDBObjectName(Array("table_name"), isNamespace = false),
Expressions.bucket(4, "id") :: Nil,
TestRelation2,
Map.empty,
tableSpec,
Map.empty,
ignoreIfExists = false)

assertAnalysisSuccess(plan)
}

test("CreateTableAsSelect: success using nested column") {
val tableSpec = TableSpec(None, Map.empty, None, Map.empty,
None, None, None, false)
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
UnresolvedDBObjectName(Array("table_name"), isNamespace = false),
Expressions.bucket(4, "point.x") :: Nil,
TestRelation2,
Map.empty,
tableSpec,
Map.empty,
ignoreIfExists = false)

assertAnalysisSuccess(plan)
}

test("CreateTableAsSelect: success using complex column") {
val tableSpec = TableSpec(None, Map.empty, None, Map.empty,
None, None, None, false)
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
UnresolvedDBObjectName(Array("table_name"), isNamespace = false),
Expressions.bucket(4, "point") :: Nil,
TestRelation2,
Map.empty,
tableSpec,
Map.empty,
ignoreIfExists = false)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -719,8 +719,8 @@ class DDLParserSuite extends AnalysisTest {
parsedPlan match {
case create: CreateTable if newTableToken == "CREATE" =>
assert(create.ignoreIfExists == expectedIfNotExists)
case ctas: CreateTableAsSelectStatement if newTableToken == "CREATE" =>
assert(ctas.ifNotExists == expectedIfNotExists)
case ctas: CreateTableAsSelect if newTableToken == "CREATE" =>
assert(ctas.ignoreIfExists == expectedIfNotExists)
case replace: ReplaceTableStatement if newTableToken == "REPLACE" =>
case replace: ReplaceTableAsSelectStatement if newTableToken == "REPLACE" =>
case other =>
Expand Down Expand Up @@ -2310,19 +2310,19 @@ class DDLParserSuite extends AnalysisTest {
replace.location,
replace.comment,
replace.serde)
case ctas: CreateTableAsSelectStatement =>
case ctas: CreateTableAsSelect =>
TableSpec(
ctas.tableName,
Some(ctas.asSelect).filter(_.resolved).map(_.schema),
ctas.name.asInstanceOf[UnresolvedDBObjectName].nameParts,
Some(ctas.query).filter(_.resolved).map(_.schema),
ctas.partitioning,
ctas.bucketSpec,
ctas.properties,
ctas.provider,
ctas.options,
ctas.location,
ctas.comment,
ctas.serde,
ctas.external)
ctas.tableSpec.bucketSpec,
ctas.tableSpec.properties,
ctas.tableSpec.provider,
ctas.tableSpec.options,
ctas.tableSpec.location,
ctas.tableSpec.comment,
ctas.tableSpec.serde,
ctas.tableSpec.external)
case rtas: ReplaceTableAsSelectStatement =>
TableSpec(
rtas.tableName,
Expand Down
44 changes: 24 additions & 20 deletions sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ import scala.collection.JavaConverters._

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, NoSuchTableException, UnresolvedRelation}
import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, NoSuchTableException, UnresolvedDBObjectName, UnresolvedRelation}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, CreateTableAsSelect, CreateTableAsSelectStatement, InsertIntoStatement, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, ReplaceTableAsSelectStatement}
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, CreateTableAsSelect, InsertIntoStatement, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, ReplaceTableAsSelectStatement, TableSpec}
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.connector.catalog.{CatalogPlugin, CatalogV2Implicits, CatalogV2Util, Identifier, SupportsCatalogOptions, Table, TableCatalog, TableProvider, V1Table}
import org.apache.spark.sql.connector.catalog.TableCapability._
Expand Down Expand Up @@ -323,18 +323,25 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
provider match {
case supportsExtract: SupportsCatalogOptions =>
val ident = supportsExtract.extractIdentifier(dsOptions)
val catalog = CatalogV2Util.getTableProviderCatalog(
supportsExtract, catalogManager, dsOptions)
val catalog = if (ident.namespace().isEmpty) {
Array(dsOptions.get("catalog"))
} else {
ident.namespace()
}

val location = Option(dsOptions.get("path")).map(TableCatalog.PROP_LOCATION -> _)

val tableSpec = TableSpec(None, Map.empty, Some(source), Map.empty,
extraOptions.get("path"), extraOptions.get(TableCatalog.PROP_COMMENT),
None, false)
runCommand(df.sparkSession) {
CreateTableAsSelect(
catalog,
ident,
UnresolvedDBObjectName(
catalog.toSeq :+ ident.name,
isNamespace = false
),
partitioningAsV2,
df.queryExecution.analyzed,
Map(TableCatalog.PROP_PROVIDER -> source) ++ location,
tableSpec,
finalOptions,
ignoreIfExists = createMode == SaveMode.Ignore)
}
Expand Down Expand Up @@ -604,20 +611,17 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
// We have a potential race condition here in AppendMode, if the table suddenly gets
// created between our existence check and physical execution, but this can't be helped
// in any case.
CreateTableAsSelectStatement(
nameParts,
df.queryExecution.analyzed,
val tableSpec = TableSpec(None, Map.empty, Some(source), Map.empty,
extraOptions.get("path"), extraOptions.get(TableCatalog.PROP_COMMENT),
None, false)

CreateTableAsSelect(
UnresolvedDBObjectName(nameParts, isNamespace = false),
partitioningAsV2,
None,
Map.empty,
Some(source),
df.queryExecution.analyzed,
tableSpec,
Map.empty,
extraOptions.get("path"),
extraOptions.get(TableCatalog.PROP_COMMENT),
extraOptions.toMap,
None,
ifNotExists = other == SaveMode.Ignore,
external = false)
other == SaveMode.Ignore)
}

runCommand(df.sparkSession) {
Expand Down
Loading

0 comments on commit b5c2787

Please sign in to comment.