From 16efafaa45e993774424f32845e7a6248f389115 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Wed, 7 Sep 2016 15:23:53 -0700 Subject: [PATCH 1/6] fix. --- .../spark/sql/execution/command/ddl.scala | 30 ++++++-- .../sql/execution/command/DDLSuite.scala | 43 +++++++---- .../sql/hive/execution/HiveDDLSuite.scala | 77 +++++++++++-------- 3 files changed, 101 insertions(+), 49 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 53fb684eb5ce3..fed6e0218309c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -236,8 +236,8 @@ case class AlterTableSetPropertiesCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - DDLUtils.verifyAlterTableType(catalog, tableName, isView) val table = catalog.getTableMetadata(tableName) + DDLUtils.verifyAlterTableType(table, tableName, isView) // This overrides old properties val newTable = table.copy(properties = table.properties ++ properties) catalog.alterTable(newTable) @@ -264,8 +264,8 @@ case class AlterTableUnsetPropertiesCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - DDLUtils.verifyAlterTableType(catalog, tableName, isView) val table = catalog.getTableMetadata(tableName) + DDLUtils.verifyAlterTableType(table, tableName, isView) if (!ifExists) { propKeys.foreach { k => if (!table.properties.contains(k)) { @@ -305,6 +305,7 @@ case class AlterTableSerDePropertiesCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) + DDLUtils.verifyAlterTableType(table, tableName, isView = false) // For datasource tables, disallow setting serde or specifying partition if (partSpec.isDefined && DDLUtils.isDatasourceTable(table)) { throw new AnalysisException("Operation not allowed: ALTER TABLE SET " + @@ -354,6 +355,7 @@ case class AlterTableAddPartitionCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) + DDLUtils.verifyAlterTableType(table, tableName, isView = false) if (DDLUtils.isDatasourceTable(table)) { throw new AnalysisException( "ALTER TABLE ADD PARTITION is not allowed for tables defined using the datasource API") @@ -383,7 +385,14 @@ case class AlterTableRenamePartitionCommand( extends RunnableCommand { override def run(sparkSession: SparkSession): Seq[Row] = { - sparkSession.sessionState.catalog.renamePartitions( + val catalog = sparkSession.sessionState.catalog + val table = catalog.getTableMetadata(tableName) + if (DDLUtils.isDatasourceTable(table)) { + throw new AnalysisException( + "ALTER TABLE RENAME PARTITION is not allowed for tables defined using the datasource API") + } + DDLUtils.verifyAlterTableType(catalog, tableName, isView = false) + catalog.renamePartitions( tableName, Seq(oldPartition), Seq(newPartition)) Seq.empty[Row] } @@ -414,6 +423,7 @@ case class AlterTableDropPartitionCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) + DDLUtils.verifyAlterTableType(table, tableName, isView = false) if (DDLUtils.isDatasourceTable(table)) { throw new AnalysisException( "ALTER TABLE DROP PARTITIONS is not allowed for tables defined using the datasource API") @@ -475,6 +485,7 @@ case class AlterTableRecoverPartitionsCommand( s"Operation not allowed: $cmd on temporary tables: $tableName") } val table = catalog.getTableMetadata(tableName) + DDLUtils.verifyAlterTableType(table, tableName, isView = false) if (DDLUtils.isDatasourceTable(table)) { throw new AnalysisException( s"Operation not allowed: $cmd on datasource tables: $tableName") @@ -650,6 +661,7 @@ case class AlterTableSetLocationCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) + DDLUtils.verifyAlterTableType(table, tableName, isView = false) partitionSpec match { case Some(spec) => // Partition spec is specified, so we set the location only for this partition @@ -693,7 +705,15 @@ object DDLUtils { catalog: SessionCatalog, tableIdentifier: TableIdentifier, isView: Boolean): Unit = { - catalog.getTableMetadataOption(tableIdentifier).map(_.tableType match { + catalog.getTableMetadataOption(tableIdentifier).foreach( + verifyAlterTableType(_, tableIdentifier, isView)) + } + + def verifyAlterTableType( + tableMetadata: CatalogTable, + tableIdentifier: TableIdentifier, + isView: Boolean): Unit = { + tableMetadata.tableType match { case CatalogTableType.VIEW if !isView => throw new AnalysisException( "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead") @@ -701,6 +721,6 @@ object DDLUtils { throw new AnalysisException( s"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead") case _ => - }) + } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index fd35c987cab59..28a1a22606e9d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -869,6 +869,14 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { } test("alter table: rename partition") { + testRenamePartition(isDatasourceTable = false) + } + + test("alter table: rename partition (datasource table)") { + testRenamePartition(isDatasourceTable = true) + } + + private def testRenamePartition(isDatasourceTable: Boolean): Unit = { val catalog = spark.sessionState.catalog val tableIdent = TableIdentifier("tab1", Some("dbx")) val part1 = Map("a" -> "1", "b" -> "q") @@ -881,23 +889,30 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { createTablePartition(catalog, part3, tableIdent) assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == Set(part1, part2, part3)) - sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')") - sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='200', b='c')") - assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == - Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3)) - // rename without explicitly specifying database - catalog.setCurrentDatabase("dbx") - sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')") - assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == - Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3)) + if (isDatasourceTable) { + convertToDatasourceTable(catalog, tableIdent) + } + maybeWrapException(isDatasourceTable) { + sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')") + } + if (!isDatasourceTable) { + sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='200', b='c')") + assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == + Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3)) + // rename without explicitly specifying database + catalog.setCurrentDatabase("dbx") + sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')") + assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == + Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3)) + // partition to rename does not exist + intercept[NoSuchPartitionException] { + sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')") + } + } // table to alter does not exist intercept[NoSuchTableException] { sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')") } - // partition to rename does not exist - intercept[NoSuchPartitionException] { - sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')") - } } test("show tables") { @@ -1246,7 +1261,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { } // table to alter does not exist intercept[AnalysisException] { - sql("ALTER TABLE does_not_exist SET SERDEPROPERTIES ('x' = 'y')") + sql("ALTER TABLE does_not_exist PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y')") } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 3cba5b2a097f1..dbbf1b9321698 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -305,6 +305,19 @@ class HiveDDLSuite } } + private def checkMisuseForAlterTableOrView(sqlText: String, isAlterView: Boolean): Unit = { + val message = intercept[AnalysisException] { + val alterDDLHeader = if (isAlterView) "ALTER VIEW " else "ALTER TABLE " + sql(alterDDLHeader + sqlText) + }.getMessage + val expectedExceptionMsg = if (isAlterView) { + "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead" + } else { + "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead" + } + assert(message.contains(expectedExceptionMsg)) + } + test("alter views and alter table - misuse") { val tabName = "tab1" withTable(tabName) { @@ -318,44 +331,48 @@ class HiveDDLSuite assert(catalog.tableExists(TableIdentifier(tabName))) assert(catalog.tableExists(TableIdentifier(oldViewName))) - var message = intercept[AnalysisException] { - sql(s"ALTER VIEW $tabName RENAME TO $newViewName") - }.getMessage - assert(message.contains( - "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")) + checkMisuseForAlterTableOrView( + s"$tabName RENAME TO $newViewName", isAlterView = true) - message = intercept[AnalysisException] { - sql(s"ALTER VIEW $tabName SET TBLPROPERTIES ('p' = 'an')") - }.getMessage - assert(message.contains( - "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")) + checkMisuseForAlterTableOrView( + s"$oldViewName RENAME TO $newViewName", isAlterView = false) - message = intercept[AnalysisException] { - sql(s"ALTER VIEW $tabName UNSET TBLPROPERTIES ('p')") - }.getMessage - assert(message.contains( - "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")) + checkMisuseForAlterTableOrView( + s"$tabName SET TBLPROPERTIES ('p' = 'an')", isAlterView = true) - message = intercept[AnalysisException] { - sql(s"ALTER TABLE $oldViewName RENAME TO $newViewName") - }.getMessage - assert(message.contains( - "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")) + checkMisuseForAlterTableOrView( + s"$oldViewName SET TBLPROPERTIES ('p' = 'an')", isAlterView = false) - message = intercept[AnalysisException] { - sql(s"ALTER TABLE $oldViewName SET TBLPROPERTIES ('p' = 'an')") - }.getMessage - assert(message.contains( - "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")) + checkMisuseForAlterTableOrView( + s"$tabName UNSET TBLPROPERTIES ('p')", isAlterView = true) - message = intercept[AnalysisException] { - sql(s"ALTER TABLE $oldViewName UNSET TBLPROPERTIES ('p')") - }.getMessage - assert(message.contains( - "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")) + checkMisuseForAlterTableOrView( + s"$oldViewName UNSET TBLPROPERTIES ('p')", isAlterView = false) + + checkMisuseForAlterTableOrView( + s"$oldViewName SET LOCATION '/path/to/your/lovely/heart'", isAlterView = false) + + checkMisuseForAlterTableOrView( + s"$oldViewName SET SERDE 'whatever'", isAlterView = false) + + checkMisuseForAlterTableOrView( + s"$oldViewName SET SERDEPROPERTIES ('x' = 'y')", isAlterView = false) + + checkMisuseForAlterTableOrView( + s"$oldViewName PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y')", isAlterView = false) + + checkMisuseForAlterTableOrView( + s"$oldViewName ADD IF NOT EXISTS PARTITION (a='4', b='8')", isAlterView = false) + + checkMisuseForAlterTableOrView( + s"$oldViewName DROP IF EXISTS PARTITION (a='2')", isAlterView = false) + + checkMisuseForAlterTableOrView( + s"$oldViewName RECOVER PARTITIONS", isAlterView = false) assert(catalog.tableExists(TableIdentifier(tabName))) assert(catalog.tableExists(TableIdentifier(oldViewName))) + assert(!catalog.tableExists(TableIdentifier(newViewName))) } } } From ea630f89ca3a3c0a3ae97bf2b9a14e4956657a9c Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Wed, 7 Sep 2016 15:32:58 -0700 Subject: [PATCH 2/6] fix. --- .../org/apache/spark/sql/hive/execution/HiveDDLSuite.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index dbbf1b9321698..067d780a7ce2c 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -370,6 +370,9 @@ class HiveDDLSuite checkMisuseForAlterTableOrView( s"$oldViewName RECOVER PARTITIONS", isAlterView = false) + checkMisuseForAlterTableOrView( + s"$oldViewName PARTITION (a='1') RENAME TO PARTITION (a='100')", isAlterView = false) + assert(catalog.tableExists(TableIdentifier(tabName))) assert(catalog.tableExists(TableIdentifier(oldViewName))) assert(!catalog.tableExists(TableIdentifier(newViewName))) From 5d0cffe6b59dc36d63f7d1e055903b53ce2fec42 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Thu, 8 Sep 2016 23:20:12 -0700 Subject: [PATCH 3/6] address comments --- .../spark/sql/execution/command/ddl.scala | 10 +-- .../spark/sql/execution/command/tables.scala | 4 +- .../sql/execution/command/DDLSuite.scala | 60 +++++++++-------- .../sql/hive/execution/HiveDDLSuite.scala | 65 ++++++++----------- 4 files changed, 63 insertions(+), 76 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 6ba04e6cbe473..8204a6a675589 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -391,7 +391,7 @@ case class AlterTableRenamePartitionCommand( throw new AnalysisException( "ALTER TABLE RENAME PARTITION is not allowed for tables defined using the datasource API") } - DDLUtils.verifyAlterTableType(catalog, tableName, isView = false) + DDLUtils.verifyAlterTableType(table, tableName, isView = false) catalog.renamePartitions( tableName, Seq(oldPartition), Seq(newPartition)) Seq.empty[Row] @@ -701,14 +701,6 @@ object DDLUtils { * If the command ALTER VIEW is to alter a table or ALTER TABLE is to alter a view, * issue an exception [[AnalysisException]]. */ - def verifyAlterTableType( - catalog: SessionCatalog, - tableIdentifier: TableIdentifier, - isView: Boolean): Unit = { - catalog.getTableMetadataOption(tableIdentifier).foreach( - verifyAlterTableType(_, tableIdentifier, isView)) - } - def verifyAlterTableType( tableMetadata: CatalogTable, tableIdentifier: TableIdentifier, diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 027f3588e2922..ea7678ccb1b6e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -158,7 +158,8 @@ case class AlterTableRenameCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - DDLUtils.verifyAlterTableType(catalog, oldName, isView) + val table = catalog.getTableMetadata(oldName) + DDLUtils.verifyAlterTableType(table, oldName, isView) // If this is a temp view, just rename the view. // Otherwise, if this is a real table, we also need to uncache and invalidate the table. val isTemporary = catalog.isTemporaryTable(oldName) @@ -177,7 +178,6 @@ case class AlterTableRenameCommand( } } // For datasource tables, we also need to update the "path" serde property - val table = catalog.getTableMetadata(oldName) if (DDLUtils.isDatasourceTable(table) && table.tableType == CatalogTableType.MANAGED) { val newPath = catalog.defaultTablePath(newTblName) val newTable = table.withNewStorage( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 28a1a22606e9d..ae65f5e2bb474 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -712,7 +712,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { """.stripMargin) val e = intercept[AnalysisException] { - sql("ALTER TABLE tab1 RENAME TO tab2") + sql("ALTER VIEW tab1 RENAME TO tab2") } assert(e.getMessage.contains( "RENAME TEMPORARY TABLE from '`tab1`' to 'tab2': destination table already exists")) @@ -869,16 +869,45 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { } test("alter table: rename partition") { - testRenamePartition(isDatasourceTable = false) + val catalog = spark.sessionState.catalog + val tableIdent = TableIdentifier("tab1", Some("dbx")) + createPartitionedTable(tableIdent, isDatasourceTable = false) + sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')") + sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='20', b='c')") + assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == + Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p"))) + // rename without explicitly specifying database + catalog.setCurrentDatabase("dbx") + sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')") + assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == + Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p"))) + // partition to rename does not exist + intercept[NoSuchPartitionException] { + sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')") + } + // table to alter does not exist + intercept[NoSuchTableException] { + sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')") + } } test("alter table: rename partition (datasource table)") { - testRenamePartition(isDatasourceTable = true) + createPartitionedTable(TableIdentifier("tab1", Some("dbx")), isDatasourceTable = true) + val e = intercept[AnalysisException] { + sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')") + }.getMessage + assert(e.contains( + "ALTER TABLE RENAME PARTITION is not allowed for tables defined using the datasource API")) + // table to alter does not exist + intercept[NoSuchTableException] { + sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')") + } } - private def testRenamePartition(isDatasourceTable: Boolean): Unit = { + private def createPartitionedTable( + tableIdent: TableIdentifier, + isDatasourceTable: Boolean): Unit = { val catalog = spark.sessionState.catalog - val tableIdent = TableIdentifier("tab1", Some("dbx")) val part1 = Map("a" -> "1", "b" -> "q") val part2 = Map("a" -> "2", "b" -> "c") val part3 = Map("a" -> "3", "b" -> "p") @@ -892,27 +921,6 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { if (isDatasourceTable) { convertToDatasourceTable(catalog, tableIdent) } - maybeWrapException(isDatasourceTable) { - sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')") - } - if (!isDatasourceTable) { - sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='200', b='c')") - assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == - Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3)) - // rename without explicitly specifying database - catalog.setCurrentDatabase("dbx") - sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')") - assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == - Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3)) - // partition to rename does not exist - intercept[NoSuchPartitionException] { - sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')") - } - } - // table to alter does not exist - intercept[NoSuchTableException] { - sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')") - } } test("show tables") { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 067d780a7ce2c..aa35a335facbf 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -305,17 +305,14 @@ class HiveDDLSuite } } - private def checkMisuseForAlterTableOrView(sqlText: String, isAlterView: Boolean): Unit = { - val message = intercept[AnalysisException] { - val alterDDLHeader = if (isAlterView) "ALTER VIEW " else "ALTER TABLE " - sql(alterDDLHeader + sqlText) - }.getMessage - val expectedExceptionMsg = if (isAlterView) { - "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead" - } else { - "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead" - } - assert(message.contains(expectedExceptionMsg)) + private def assertErrorForAlterTableOnView(sqlText: String): Unit = { + val message = intercept[AnalysisException](sql(sqlText)).getMessage + assert(message.contains("Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")) + } + + private def assertErrorForAlterViewOnTable(sqlText: String): Unit = { + val message = intercept[AnalysisException](sql(sqlText)).getMessage + assert(message.contains("Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")) } test("alter views and alter table - misuse") { @@ -330,48 +327,38 @@ class HiveDDLSuite assert(catalog.tableExists(TableIdentifier(tabName))) assert(catalog.tableExists(TableIdentifier(oldViewName))) + assert(!catalog.tableExists(TableIdentifier(newViewName))) - checkMisuseForAlterTableOrView( - s"$tabName RENAME TO $newViewName", isAlterView = true) + assertErrorForAlterViewOnTable(s"ALTER VIEW $tabName RENAME TO $newViewName") - checkMisuseForAlterTableOrView( - s"$oldViewName RENAME TO $newViewName", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName RENAME TO $newViewName") - checkMisuseForAlterTableOrView( - s"$tabName SET TBLPROPERTIES ('p' = 'an')", isAlterView = true) + assertErrorForAlterViewOnTable(s"ALTER VIEW $tabName SET TBLPROPERTIES ('p' = 'an')") - checkMisuseForAlterTableOrView( - s"$oldViewName SET TBLPROPERTIES ('p' = 'an')", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName SET TBLPROPERTIES ('p' = 'an')") - checkMisuseForAlterTableOrView( - s"$tabName UNSET TBLPROPERTIES ('p')", isAlterView = true) + assertErrorForAlterViewOnTable(s"ALTER VIEW $tabName UNSET TBLPROPERTIES ('p')") - checkMisuseForAlterTableOrView( - s"$oldViewName UNSET TBLPROPERTIES ('p')", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName UNSET TBLPROPERTIES ('p')") - checkMisuseForAlterTableOrView( - s"$oldViewName SET LOCATION '/path/to/your/lovely/heart'", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName SET LOCATION '/path/to/home'") - checkMisuseForAlterTableOrView( - s"$oldViewName SET SERDE 'whatever'", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName SET SERDE 'whatever'") - checkMisuseForAlterTableOrView( - s"$oldViewName SET SERDEPROPERTIES ('x' = 'y')", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName SET SERDEPROPERTIES ('x' = 'y')") - checkMisuseForAlterTableOrView( - s"$oldViewName PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y')", isAlterView = false) + assertErrorForAlterTableOnView( + s"ALTER TABLE $oldViewName PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y')") - checkMisuseForAlterTableOrView( - s"$oldViewName ADD IF NOT EXISTS PARTITION (a='4', b='8')", isAlterView = false) + assertErrorForAlterTableOnView( + s"ALTER TABLE $oldViewName ADD IF NOT EXISTS PARTITION (a='4', b='8')") - checkMisuseForAlterTableOrView( - s"$oldViewName DROP IF EXISTS PARTITION (a='2')", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName DROP IF EXISTS PARTITION (a='2')") - checkMisuseForAlterTableOrView( - s"$oldViewName RECOVER PARTITIONS", isAlterView = false) + assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName RECOVER PARTITIONS") - checkMisuseForAlterTableOrView( - s"$oldViewName PARTITION (a='1') RENAME TO PARTITION (a='100')", isAlterView = false) + assertErrorForAlterTableOnView( + s"ALTER TABLE $oldViewName PARTITION (a='1') RENAME TO PARTITION (a='100')") assert(catalog.tableExists(TableIdentifier(tabName))) assert(catalog.tableExists(TableIdentifier(oldViewName))) From 8b2ed943d13ba64f66eb1c867c05ff1f1ab40482 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Fri, 9 Sep 2016 07:14:41 -0700 Subject: [PATCH 4/6] address comments --- .../org/apache/spark/sql/execution/command/DDLSuite.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index ae65f5e2bb474..e75cd765c33d4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -881,14 +881,14 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')") assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p"))) - // partition to rename does not exist - intercept[NoSuchPartitionException] { - sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')") - } // table to alter does not exist intercept[NoSuchTableException] { sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')") } + // partition to rename does not exist + intercept[NoSuchPartitionException] { + sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')") + } } test("alter table: rename partition (datasource table)") { From 159f10106db12c8d47bb8b8ec26118dec286a4e0 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Wed, 14 Sep 2016 18:49:58 -0700 Subject: [PATCH 5/6] fix --- .../spark/sql/execution/command/ddl.scala | 39 ++++++++++--------- .../spark/sql/execution/command/tables.scala | 2 +- .../sql/execution/command/DDLSuite.scala | 14 ++++++- 3 files changed, 35 insertions(+), 20 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 8204a6a675589..0de4288071fb5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -237,7 +237,7 @@ case class AlterTableSetPropertiesCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) - DDLUtils.verifyAlterTableType(table, tableName, isView) + DDLUtils.verifyAlterTableType(catalog, table, isView) // This overrides old properties val newTable = table.copy(properties = table.properties ++ properties) catalog.alterTable(newTable) @@ -265,7 +265,7 @@ case class AlterTableUnsetPropertiesCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) - DDLUtils.verifyAlterTableType(table, tableName, isView) + DDLUtils.verifyAlterTableType(catalog, table, isView) if (!ifExists) { propKeys.foreach { k => if (!table.properties.contains(k)) { @@ -305,7 +305,7 @@ case class AlterTableSerDePropertiesCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) - DDLUtils.verifyAlterTableType(table, tableName, isView = false) + DDLUtils.verifyAlterTableType(catalog, table, isView = false) // For datasource tables, disallow setting serde or specifying partition if (partSpec.isDefined && DDLUtils.isDatasourceTable(table)) { throw new AnalysisException("Operation not allowed: ALTER TABLE SET " + @@ -355,7 +355,7 @@ case class AlterTableAddPartitionCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) - DDLUtils.verifyAlterTableType(table, tableName, isView = false) + DDLUtils.verifyAlterTableType(catalog, table, isView = false) if (DDLUtils.isDatasourceTable(table)) { throw new AnalysisException( "ALTER TABLE ADD PARTITION is not allowed for tables defined using the datasource API") @@ -391,7 +391,7 @@ case class AlterTableRenamePartitionCommand( throw new AnalysisException( "ALTER TABLE RENAME PARTITION is not allowed for tables defined using the datasource API") } - DDLUtils.verifyAlterTableType(table, tableName, isView = false) + DDLUtils.verifyAlterTableType(catalog, table, isView = false) catalog.renamePartitions( tableName, Seq(oldPartition), Seq(newPartition)) Seq.empty[Row] @@ -423,7 +423,7 @@ case class AlterTableDropPartitionCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) - DDLUtils.verifyAlterTableType(table, tableName, isView = false) + DDLUtils.verifyAlterTableType(catalog, table, isView = false) if (DDLUtils.isDatasourceTable(table)) { throw new AnalysisException( "ALTER TABLE DROP PARTITIONS is not allowed for tables defined using the datasource API") @@ -485,7 +485,7 @@ case class AlterTableRecoverPartitionsCommand( s"Operation not allowed: $cmd on temporary tables: $tableName") } val table = catalog.getTableMetadata(tableName) - DDLUtils.verifyAlterTableType(table, tableName, isView = false) + DDLUtils.verifyAlterTableType(catalog, table, isView = false) if (DDLUtils.isDatasourceTable(table)) { throw new AnalysisException( s"Operation not allowed: $cmd on datasource tables: $tableName") @@ -661,7 +661,7 @@ case class AlterTableSetLocationCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) - DDLUtils.verifyAlterTableType(table, tableName, isView = false) + DDLUtils.verifyAlterTableType(catalog, table, isView = false) partitionSpec match { case Some(spec) => // Partition spec is specified, so we set the location only for this partition @@ -699,20 +699,23 @@ object DDLUtils { /** * If the command ALTER VIEW is to alter a table or ALTER TABLE is to alter a view, - * issue an exception [[AnalysisException]]. + * issue an exception [[AnalysisException]]. Temporary views can be altered by both + * ALTER VIEW and ALTER TABLE commands. */ def verifyAlterTableType( + catalog: SessionCatalog, tableMetadata: CatalogTable, - tableIdentifier: TableIdentifier, isView: Boolean): Unit = { - tableMetadata.tableType match { - case CatalogTableType.VIEW if !isView => - throw new AnalysisException( - "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead") - case o if o != CatalogTableType.VIEW && isView => - throw new AnalysisException( - s"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead") - case _ => + if (!catalog.isTemporaryTable(tableMetadata.identifier)) { + tableMetadata.tableType match { + case CatalogTableType.VIEW if !isView => + throw new AnalysisException( + "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead") + case o if o != CatalogTableType.VIEW && isView => + throw new AnalysisException( + s"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead") + case _ => + } } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index ea7678ccb1b6e..d668bc5fc0d28 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -159,7 +159,7 @@ case class AlterTableRenameCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(oldName) - DDLUtils.verifyAlterTableType(table, oldName, isView) + DDLUtils.verifyAlterTableType(catalog, table, isView) // If this is a temp view, just rename the view. // Otherwise, if this is a real table, we also need to uncache and invalidate the table. val isTemporary = catalog.isTemporaryTable(oldName) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index e75cd765c33d4..6dc2e75da7960 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -687,6 +687,18 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { assert(spark.table("teachers").collect().toSeq == df.collect().toSeq) } + test("rename temporary table") { + withTempView("tab1", "tab2") { + spark.range(10).createOrReplaceTempView("tab1") + sql("ALTER TABLE tab1 RENAME TO tab2") + checkAnswer(spark.table("tab2"), spark.range(10).toDF()) + intercept[NoSuchTableException] { spark.table("tab1") } + sql("ALTER VIEW tab2 RENAME TO tab1") + checkAnswer(spark.table("tab1"), spark.range(10).toDF()) + intercept[NoSuchTableException] { spark.table("tab2") } + } + } + test("rename temporary table - destination table already exists") { withTempView("tab1", "tab2") { sql( @@ -712,7 +724,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { """.stripMargin) val e = intercept[AnalysisException] { - sql("ALTER VIEW tab1 RENAME TO tab2") + sql("ALTER TABLE tab1 RENAME TO tab2") } assert(e.getMessage.contains( "RENAME TEMPORARY TABLE from '`tab1`' to 'tab2': destination table already exists")) From 91c3a2815346c48190342bca474c514b67d6e6ad Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Wed, 14 Sep 2016 21:01:33 -0700 Subject: [PATCH 6/6] address comments --- .../org/apache/spark/sql/execution/command/ddl.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 0de4288071fb5..916b79dbed3cc 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -699,8 +699,12 @@ object DDLUtils { /** * If the command ALTER VIEW is to alter a table or ALTER TABLE is to alter a view, - * issue an exception [[AnalysisException]]. Temporary views can be altered by both - * ALTER VIEW and ALTER TABLE commands. + * issue an exception [[AnalysisException]]. + * + * Note: temporary views can be altered by both ALTER VIEW and ALTER TABLE commands, + * since temporary views can be also created by CREATE TEMPORARY TABLE. In the future, + * when we decided to drop the support, we should disallow users to alter temporary views + * by ALTER TABLE. */ def verifyAlterTableType( catalog: SessionCatalog,