-
Notifications
You must be signed in to change notification settings - Fork 29.1k
[SPARK-14129][SPARK-14128][SQL] Alter table DDL commands #12121
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
f5aed60
c134da5
a74a4ef
759faff
5191e9a
6402d5a
68951fa
00d2513
6684909
0ee46b9
d030ae0
59e0504
79c86aa
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -18,7 +18,7 @@ package org.apache.spark.sql.execution | |
|
|
||
| import scala.collection.JavaConverters._ | ||
|
|
||
| import org.apache.spark.sql.SaveMode | ||
| import org.apache.spark.sql.{AnalysisException, SaveMode} | ||
| import org.apache.spark.sql.catalyst.TableIdentifier | ||
| import org.apache.spark.sql.catalyst.parser.{AbstractSqlParser, AstBuilder, ParseException} | ||
| import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ | ||
|
|
@@ -378,8 +378,7 @@ class SparkSqlAstBuilder extends AstBuilder { | |
| override def visitRenameTable(ctx: RenameTableContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableRename( | ||
| visitTableIdentifier(ctx.from), | ||
| visitTableIdentifier(ctx.to))( | ||
| command(ctx)) | ||
| visitTableIdentifier(ctx.to)) | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -395,26 +394,24 @@ class SparkSqlAstBuilder extends AstBuilder { | |
| ctx: SetTablePropertiesContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableSetProperties( | ||
| visitTableIdentifier(ctx.tableIdentifier), | ||
| visitTablePropertyList(ctx.tablePropertyList))( | ||
| command(ctx)) | ||
| visitTablePropertyList(ctx.tablePropertyList)) | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableUnsetProperties]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table UNSET TBLPROPERTIES IF EXISTS ('comment', 'key'); | ||
| * ALTER VIEW view UNSET TBLPROPERTIES IF EXISTS ('comment', 'key'); | ||
| * ALTER TABLE table UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key'); | ||
| * ALTER VIEW view UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key'); | ||
| * }}} | ||
| */ | ||
| override def visitUnsetTableProperties( | ||
| ctx: UnsetTablePropertiesContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableUnsetProperties( | ||
| visitTableIdentifier(ctx.tableIdentifier), | ||
| visitTablePropertyList(ctx.tablePropertyList), | ||
| ctx.EXISTS != null)( | ||
| command(ctx)) | ||
| visitTablePropertyList(ctx.tablePropertyList).keys.toSeq, | ||
| ctx.EXISTS != null) | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -432,116 +429,41 @@ class SparkSqlAstBuilder extends AstBuilder { | |
| Option(ctx.STRING).map(string), | ||
| Option(ctx.tablePropertyList).map(visitTablePropertyList), | ||
| // TODO a partition spec is allowed to have optional values. This is currently violated. | ||
| Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))( | ||
| command(ctx)) | ||
| Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)) | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableStorageProperties]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table CLUSTERED BY (col, ...) [SORTED BY (col, ...)] INTO n BUCKETS; | ||
| * }}} | ||
| */ | ||
| // TODO: don't even bother parsing alter table commands related to bucketing and skewing | ||
|
|
||
| override def visitBucketTable(ctx: BucketTableContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableStorageProperties( | ||
| visitTableIdentifier(ctx.tableIdentifier), | ||
| visitBucketSpec(ctx.bucketSpec))( | ||
| command(ctx)) | ||
| throw new AnalysisException( | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could you throw ParseExceptions instead of AnalysisExceptions. You could also move these rules to the
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I tried doing that, but the thing is some tests are in SQL so they'll end up using the SQL parser, which doesn't understand hive native commands. Let's fix that separately (see the TODO I left).
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Owww... Forgot about that. +1 for fixing it separately.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||
| "Operation not allowed: ALTER TABLE ... CLUSTERED BY ... INTO N BUCKETS") | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableNotClustered]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table NOT CLUSTERED; | ||
| * }}} | ||
| */ | ||
| override def visitUnclusterTable(ctx: UnclusterTableContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableNotClustered(visitTableIdentifier(ctx.tableIdentifier))(command(ctx)) | ||
| throw new AnalysisException("Operation not allowed: ALTER TABLE ... NOT CLUSTERED") | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableNotSorted]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table NOT SORTED; | ||
| * }}} | ||
| */ | ||
| override def visitUnsortTable(ctx: UnsortTableContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableNotSorted(visitTableIdentifier(ctx.tableIdentifier))(command(ctx)) | ||
| throw new AnalysisException("Operation not allowed: ALTER TABLE ... NOT SORTED") | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableSkewed]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table SKEWED BY (col1, col2) | ||
| * ON ((col1_value, col2_value) [, (col1_value, col2_value), ...]) | ||
| * [STORED AS DIRECTORIES]; | ||
| * }}} | ||
| */ | ||
| override def visitSkewTable(ctx: SkewTableContext): LogicalPlan = withOrigin(ctx) { | ||
| val table = visitTableIdentifier(ctx.tableIdentifier) | ||
| val (cols, values, storedAsDirs) = visitSkewSpec(ctx.skewSpec) | ||
| AlterTableSkewed(table, cols, values, storedAsDirs)(command(ctx)) | ||
| throw new AnalysisException("Operation not allowed: ALTER TABLE ... SKEWED BY ...") | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableNotSorted]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table NOT SKEWED; | ||
| * }}} | ||
| */ | ||
| override def visitUnskewTable(ctx: UnskewTableContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableNotSkewed(visitTableIdentifier(ctx.tableIdentifier))(command(ctx)) | ||
| throw new AnalysisException("Operation not allowed: ALTER TABLE ... NOT SKEWED") | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableNotStoredAsDirs]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table NOT STORED AS DIRECTORIES | ||
| * }}} | ||
| */ | ||
| override def visitUnstoreTable(ctx: UnstoreTableContext): LogicalPlan = withOrigin(ctx) { | ||
| AlterTableNotStoredAsDirs(visitTableIdentifier(ctx.tableIdentifier))(command(ctx)) | ||
| throw new AnalysisException( | ||
| "Operation not allowed: ALTER TABLE ... NOT STORED AS DIRECTORIES") | ||
| } | ||
|
|
||
| /** | ||
| * Create an [[AlterTableSkewedLocation]] command. | ||
| * | ||
| * For example: | ||
| * {{{ | ||
| * ALTER TABLE table SET SKEWED LOCATION (col1="loc1" [, (col2, col3)="loc2", ...] ); | ||
| * }}} | ||
| */ | ||
| override def visitSetTableSkewLocations( | ||
| ctx: SetTableSkewLocationsContext): LogicalPlan = withOrigin(ctx) { | ||
| val skewedMap = ctx.skewedLocationList.skewedLocation.asScala.flatMap { | ||
| slCtx => | ||
| val location = string(slCtx.STRING) | ||
| if (slCtx.constant != null) { | ||
| Seq(visitStringConstant(slCtx.constant) -> location) | ||
| } else { | ||
| // TODO this is similar to what was in the original implementation. However this does not | ||
| // make to much sense to me since we should be storing a tuple of values (not column | ||
| // names) for which we want a dedicated storage location. | ||
| visitConstantList(slCtx.constantList).map(_ -> location) | ||
| } | ||
| }.toMap | ||
|
|
||
| AlterTableSkewedLocation( | ||
| visitTableIdentifier(ctx.tableIdentifier), | ||
| skewedMap)( | ||
| command(ctx)) | ||
| throw new AnalysisException( | ||
| "Operation not allowed: ALTER TABLE ... SET SKEWED LOCATION ...") | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -703,8 +625,7 @@ class SparkSqlAstBuilder extends AstBuilder { | |
| AlterTableSetLocation( | ||
| visitTableIdentifier(ctx.tableIdentifier), | ||
| Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec), | ||
| visitLocationSpec(ctx.locationSpec))( | ||
| command(ctx)) | ||
| visitLocationSpec(ctx.locationSpec)) | ||
| } | ||
|
|
||
| /** | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Since a
FunctionRegistryalso hold all builtin functions, is it call for removing all builders or just removing all non-builtin builders?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
right now this removes all functions (builtin or not). This is mainly used for tests only so I think it's OK.