diff --git a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java index c58e7d07fc50..ab75665aed04 100644 --- a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java +++ b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/Catalogs.java @@ -222,6 +222,19 @@ public static Table registerTable(Configuration conf, Properties props, String m return new HadoopTables(conf).create(schema, spec, map, location); } + public static void renameTable(Configuration conf, Properties props, TableIdentifier to) { + String catalogName = props.getProperty(InputFormatConfig.CATALOG_NAME); + + Optional catalog = loadCatalog(conf, catalogName); + if (catalog.isPresent()) { + String name = props.getProperty(NAME); + Preconditions.checkNotNull(name, "Table identifier not set"); + catalog.get().renameTable(TableIdentifier.parse(name), to); + } else { + throw new RuntimeException("Rename from " + props.getProperty(NAME) + " to " + to + " failed"); + } + } + static Optional loadCatalog(Configuration conf, String catalogName) { String catalogType = getCatalogType(conf, catalogName); if (NO_CATALOG_TYPE.equalsIgnoreCase(catalogType)) { diff --git a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java index 174022e5ea2a..229e0490f5dc 100644 --- a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java +++ b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergMetaHook.java @@ -120,7 +120,7 @@ public class HiveIcebergMetaHook implements HiveMetaHook { static final EnumSet SUPPORTED_ALTER_OPS = EnumSet.of( AlterTableType.ADDCOLS, AlterTableType.REPLACE_COLUMNS, AlterTableType.RENAME_COLUMN, AlterTableType.ADDPROPS, AlterTableType.DROPPROPS, AlterTableType.SETPARTITIONSPEC, - AlterTableType.UPDATE_COLUMNS, AlterTableType.SETPARTITIONSPEC, AlterTableType.EXECUTE); + AlterTableType.UPDATE_COLUMNS, AlterTableType.RENAME, AlterTableType.EXECUTE); private static final List MIGRATION_ALLOWED_SOURCE_FORMATS = ImmutableList.of( FileFormat.PARQUET.name().toLowerCase(), FileFormat.ORC.name().toLowerCase(), @@ -319,6 +319,10 @@ public void preAlterTable(org.apache.hadoop.hive.metastore.api.Table hmsTable, E throws MetaException { catalogProperties = getCatalogProperties(hmsTable); setupAlterOperationType(hmsTable, context); + if (AlterTableType.RENAME.equals(currentAlterTableOp)) { + catalogProperties.put(Catalogs.NAME, TableIdentifier.of(context.getProperties().get(OLD_DB_NAME), + context.getProperties().get(OLD_TABLE_NAME)).toString()); + } if (commitLock == null) { commitLock = new HiveCommitLock(conf, new CachedClientPool(conf, Maps.fromProperties(catalogProperties)), catalogProperties.getProperty(Catalogs.NAME), hmsTable.getDbName(), hmsTable.getTableName()); @@ -549,6 +553,10 @@ public void commitAlterTable(org.apache.hadoop.hive.metastore.api.Table hmsTable case SETPARTITIONSPEC: IcebergTableUtil.updateSpec(conf, icebergTable); break; + case RENAME: + Catalogs.renameTable(conf, catalogProperties, TableIdentifier.of(hmsTable.getDbName(), + hmsTable.getTableName())); + break; } } } diff --git a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rename.q b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rename.q new file mode 100644 index 000000000000..88f9765030b4 --- /dev/null +++ b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_rename.q @@ -0,0 +1,65 @@ + +-- create a v1 table +create table icev1 (id int, name string) Stored by Iceberg; + +-- insert some values +insert into icev1 values (1, 'SSD'),(2, 'RAM'); + +select * from icev1 order by id; + +-- do the rename +explain alter table icev1 rename to icev1renamed; + +alter table icev1 rename to icev1renamed; + +select * from icev1renamed order by id; + +-- create an unpartitioned v2 table +create table iceorgin (id int, name string) Stored by Iceberg TBLPROPERTIES ('format-version'='2'); + +-- insert some values +insert into iceorgin values (1, 'ABC'),(2, 'CBS'),(3, null),(4, 'POPI'),(5, 'AQWR'),(6, 'POIU'), +(9, null),(8,'POIKL'),(10, 'YUIO'); + +-- do some deletes +delete from iceorgin where id>9 OR id=8; + +select * from iceorgin order by id; + +-- do the rename + +alter table iceorgin rename to icerenamed; + +select * from icerenamed order by id; + +-- create a partitioned v2 table +create table iceorginpart (id int) partitioned by (part string) Stored by Iceberg TBLPROPERTIES ('format-version'='2'); + +insert into iceorginpart values (1, 'ABC'),(2, 'CBS'),(3,'CBS'),(4, 'ABC'),(5, 'AQWR'),(6, 'ABC'), +(9, 'AQWR'),(8,'ABC'),(10, 'YUIO'); + +-- do some deletes +delete from iceorginpart where id<3 OR id=7; + +select * from iceorginpart order by id; + +explain alter table iceorginpart rename to icerenamedpart; + +alter table iceorginpart rename to icerenamedpart; + +select * from icerenamedpart order by id; + +-- create a new unpartitioned table with old name +create table iceorgin (id int, name string) Stored by Iceberg TBLPROPERTIES ('format-version'='2'); + +insert into iceorgin values (100, 'ABCDWC'); + +select * from iceorgin order by id; + +-- create a new partitioned table with old name + +create table iceorginpart (id int) partitioned by (part string) Stored by Iceberg TBLPROPERTIES ('format-version'='2'); + +insert into iceorginpart values (22, 'DER'),(2, 'KLM'); + +select * from iceorginpart order by id; diff --git a/iceberg/iceberg-handler/src/test/results/positive/iceberg_rename.q.out b/iceberg/iceberg-handler/src/test/results/positive/iceberg_rename.q.out new file mode 100644 index 000000000000..76d5d2b0ce41 --- /dev/null +++ b/iceberg/iceberg-handler/src/test/results/positive/iceberg_rename.q.out @@ -0,0 +1,265 @@ +PREHOOK: query: create table icev1 (id int, name string) Stored by Iceberg +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@icev1 +POSTHOOK: query: create table icev1 (id int, name string) Stored by Iceberg +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@icev1 +PREHOOK: query: insert into icev1 values (1, 'SSD'),(2, 'RAM') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@icev1 +POSTHOOK: query: insert into icev1 values (1, 'SSD'),(2, 'RAM') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@icev1 +PREHOOK: query: select * from icev1 order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@icev1 +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from icev1 order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@icev1 +POSTHOOK: Output: hdfs://### HDFS PATH ### +1 SSD +2 RAM +PREHOOK: query: explain alter table icev1 rename to icev1renamed +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: default@icev1 +PREHOOK: Output: database:default +PREHOOK: Output: default@icev1 +PREHOOK: Output: default@icev1renamed +POSTHOOK: query: explain alter table icev1 rename to icev1renamed +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: default@icev1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@icev1 +POSTHOOK: Output: default@icev1renamed +Stage-0 + Rename Table{"table name:":"default.icev1","new table name:":"default.icev1renamed"} + +PREHOOK: query: alter table icev1 rename to icev1renamed +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: default@icev1 +PREHOOK: Output: database:default +PREHOOK: Output: default@icev1 +PREHOOK: Output: default@icev1renamed +POSTHOOK: query: alter table icev1 rename to icev1renamed +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: default@icev1 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@icev1 +POSTHOOK: Output: default@icev1renamed +PREHOOK: query: select * from icev1renamed order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@icev1renamed +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from icev1renamed order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@icev1renamed +POSTHOOK: Output: hdfs://### HDFS PATH ### +1 SSD +2 RAM +PREHOOK: query: create table iceorgin (id int, name string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@iceorgin +POSTHOOK: query: create table iceorgin (id int, name string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@iceorgin +PREHOOK: query: insert into iceorgin values (1, 'ABC'),(2, 'CBS'),(3, null),(4, 'POPI'),(5, 'AQWR'),(6, 'POIU'), +(9, null),(8,'POIKL'),(10, 'YUIO') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@iceorgin +POSTHOOK: query: insert into iceorgin values (1, 'ABC'),(2, 'CBS'),(3, null),(4, 'POPI'),(5, 'AQWR'),(6, 'POIU'), +(9, null),(8,'POIKL'),(10, 'YUIO') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@iceorgin +PREHOOK: query: delete from iceorgin where id>9 OR id=8 +PREHOOK: type: QUERY +PREHOOK: Input: default@iceorgin +PREHOOK: Output: default@iceorgin +POSTHOOK: query: delete from iceorgin where id>9 OR id=8 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@iceorgin +POSTHOOK: Output: default@iceorgin +PREHOOK: query: select * from iceorgin order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@iceorgin +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from iceorgin order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@iceorgin +POSTHOOK: Output: hdfs://### HDFS PATH ### +1 ABC +2 CBS +3 NULL +4 POPI +5 AQWR +6 POIU +9 NULL +PREHOOK: query: alter table iceorgin rename to icerenamed +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: default@iceorgin +PREHOOK: Output: database:default +PREHOOK: Output: default@iceorgin +PREHOOK: Output: default@icerenamed +POSTHOOK: query: alter table iceorgin rename to icerenamed +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: default@iceorgin +POSTHOOK: Output: database:default +POSTHOOK: Output: default@iceorgin +POSTHOOK: Output: default@icerenamed +PREHOOK: query: select * from icerenamed order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@icerenamed +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from icerenamed order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@icerenamed +POSTHOOK: Output: hdfs://### HDFS PATH ### +1 ABC +2 CBS +3 NULL +4 POPI +5 AQWR +6 POIU +9 NULL +PREHOOK: query: create table iceorginpart (id int) partitioned by (part string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@iceorginpart +POSTHOOK: query: create table iceorginpart (id int) partitioned by (part string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@iceorginpart +PREHOOK: query: insert into iceorginpart values (1, 'ABC'),(2, 'CBS'),(3,'CBS'),(4, 'ABC'),(5, 'AQWR'),(6, 'ABC'), +(9, 'AQWR'),(8,'ABC'),(10, 'YUIO') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@iceorginpart +POSTHOOK: query: insert into iceorginpart values (1, 'ABC'),(2, 'CBS'),(3,'CBS'),(4, 'ABC'),(5, 'AQWR'),(6, 'ABC'), +(9, 'AQWR'),(8,'ABC'),(10, 'YUIO') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@iceorginpart +PREHOOK: query: delete from iceorginpart where id<3 OR id=7 +PREHOOK: type: QUERY +PREHOOK: Input: default@iceorginpart +PREHOOK: Output: default@iceorginpart +POSTHOOK: query: delete from iceorginpart where id<3 OR id=7 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@iceorginpart +POSTHOOK: Output: default@iceorginpart +PREHOOK: query: select * from iceorginpart order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@iceorginpart +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from iceorginpart order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@iceorginpart +POSTHOOK: Output: hdfs://### HDFS PATH ### +3 CBS +4 ABC +5 AQWR +6 ABC +8 ABC +9 AQWR +10 YUIO +PREHOOK: query: explain alter table iceorginpart rename to icerenamedpart +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: default@iceorginpart +PREHOOK: Output: database:default +PREHOOK: Output: default@iceorginpart +PREHOOK: Output: default@icerenamedpart +POSTHOOK: query: explain alter table iceorginpart rename to icerenamedpart +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: default@iceorginpart +POSTHOOK: Output: database:default +POSTHOOK: Output: default@iceorginpart +POSTHOOK: Output: default@icerenamedpart +Stage-0 + Rename Table{"table name:":"default.iceorginpart","new table name:":"default.icerenamedpart"} + +PREHOOK: query: alter table iceorginpart rename to icerenamedpart +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: default@iceorginpart +PREHOOK: Output: database:default +PREHOOK: Output: default@iceorginpart +PREHOOK: Output: default@icerenamedpart +POSTHOOK: query: alter table iceorginpart rename to icerenamedpart +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: default@iceorginpart +POSTHOOK: Output: database:default +POSTHOOK: Output: default@iceorginpart +POSTHOOK: Output: default@icerenamedpart +PREHOOK: query: select * from icerenamedpart order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@icerenamedpart +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from icerenamedpart order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@icerenamedpart +POSTHOOK: Output: hdfs://### HDFS PATH ### +3 CBS +4 ABC +5 AQWR +6 ABC +8 ABC +9 AQWR +10 YUIO +PREHOOK: query: create table iceorgin (id int, name string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@iceorgin +POSTHOOK: query: create table iceorgin (id int, name string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@iceorgin +PREHOOK: query: insert into iceorgin values (100, 'ABCDWC') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@iceorgin +POSTHOOK: query: insert into iceorgin values (100, 'ABCDWC') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@iceorgin +PREHOOK: query: select * from iceorgin order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@iceorgin +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from iceorgin order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@iceorgin +POSTHOOK: Output: hdfs://### HDFS PATH ### +100 ABCDWC +PREHOOK: query: create table iceorginpart (id int) partitioned by (part string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@iceorginpart +POSTHOOK: query: create table iceorginpart (id int) partitioned by (part string) Stored by Iceberg TBLPROPERTIES ('format-version'='2') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@iceorginpart +PREHOOK: query: insert into iceorginpart values (22, 'DER'),(2, 'KLM') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@iceorginpart +POSTHOOK: query: insert into iceorginpart values (22, 'DER'),(2, 'KLM') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@iceorginpart +PREHOOK: query: select * from iceorginpart order by id +PREHOOK: type: QUERY +PREHOOK: Input: default@iceorginpart +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select * from iceorginpart order by id +POSTHOOK: type: QUERY +POSTHOOK: Input: default@iceorginpart +POSTHOOK: Output: hdfs://### HDFS PATH ### +2 KLM +22 DER diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java index e360f8f6ba40..c17ca82ef5de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java @@ -132,7 +132,7 @@ private void finalizeAlterTableWithWriteIdOp(Table table, Table oldTable, List

params, HiveConf conf) throws HiveException { String user = SessionState.getUserFromAuthenticator(); params.put("last_modified_by", user); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java index f06776c19bd4..acce65dcb9e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/rename/AlterTableRenameOperation.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.ddl.table.misc.rename; import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableOperation; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -50,4 +51,9 @@ public int execute() throws HiveException { protected void doAlteration(Table table, Partition partition) throws HiveException { HiveTableName.setFrom(desc.getNewName(), table); } + + @Override + protected void checkValidity(Table table, DDLOperationContext context) throws HiveException { + table.validateName(context.getConf()); + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 73a94d138d77..a97506d2f013 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -114,6 +114,7 @@ import org.apache.hadoop.hive.metastore.api.SourceTable; import org.apache.hadoop.hive.metastore.api.UpdateTransactionalStatsRequest; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.io.HdfsUtils; import org.apache.hadoop.hive.metastore.HiveMetaException; import org.apache.hadoop.hive.metastore.HiveMetaHook; @@ -875,10 +876,16 @@ public void alterTable(String catName, String dbName, String tblName, Table newT if (newTbl.getParameters() != null) { newTbl.getParameters().remove(hive_metastoreConstants.DDL_TIME); } - newTbl.checkValidity(conf); if (environmentContext == null) { environmentContext = new EnvironmentContext(); } + if (isRename(environmentContext)) { + newTbl.validateName(conf); + environmentContext.putToProperties(HiveMetaHook.OLD_TABLE_NAME, tblName); + environmentContext.putToProperties(HiveMetaHook.OLD_DB_NAME, dbName); + } else { + newTbl.checkValidity(conf); + } if (cascade) { environmentContext.putToProperties(StatsSetupConst.CASCADE, StatsSetupConst.TRUE); } @@ -918,6 +925,14 @@ public void alterTable(String catName, String dbName, String tblName, Table newT } } + private static boolean isRename(EnvironmentContext environmentContext) { + if (environmentContext.isSetProperties()) { + String operation = environmentContext.getProperties().get(HiveMetaHook.ALTER_TABLE_OPERATION_TYPE); + return operation != null && AlterTableType.RENAME == AlterTableType.valueOf(operation); + } + return false; + } + /** * Create a dataconnector * @param connector diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index d422fbf36030..88a7960bcea1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -254,11 +254,7 @@ public void setTTable(org.apache.hadoop.hive.metastore.api.Table tTable) { public void checkValidity(Configuration conf) throws HiveException { // check for validity - String name = tTable.getTableName(); - if (null == name || name.length() == 0 - || !MetaStoreUtils.validateName(name, conf)) { - throw new HiveException("[" + name + "]: is not a valid table name"); - } + validateName(conf); if (0 == getCols().size()) { throw new HiveException( "at least one column must be specified for the table"); @@ -286,6 +282,13 @@ public void checkValidity(Configuration conf) throws HiveException { validateColumns(getCols(), getPartCols()); } + public void validateName(Configuration conf) throws HiveException { + String name = tTable.getTableName(); + if (StringUtils.isBlank(name) || !MetaStoreUtils.validateName(name, conf)) { + throw new HiveException("[" + name + "]: is not a valid table name"); + } + } + public StorageDescriptor getSd() { return tTable.getSd(); } diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaHook.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaHook.java index ac624131430b..6a534c1669a0 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaHook.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaHook.java @@ -49,12 +49,20 @@ public interface HiveMetaHook { String ALLOW_PARTITION_KEY_CHANGE = "allow_partition_key_change"; String SET_PROPERTIES = "set_properties"; String UNSET_PROPERTIES = "unset_properties"; + + String TABLE_TYPE = "table_type"; + + String ICEBERG = "ICEBERG"; String PROPERTIES_SEPARATOR = "'"; String MIGRATE_HIVE_TO_ICEBERG = "migrate_hive_to_iceberg"; String INITIALIZE_ROLLBACK_MIGRATION = "initialize_rollback_migration"; // if this flag is set to true, the HMS call from HiveMetaStoreClient#alter_table() will be skipped String SKIP_METASTORE_ALTER = "skip_metastore_alter"; + String OLD_TABLE_NAME = "old_table_name"; + + String OLD_DB_NAME = "old_db_name"; + /** * Called before a new table definition is added to the metastore * during CREATE TABLE. diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java index 2ee80fd4eb46..fc413504ec1e 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java @@ -253,11 +253,14 @@ public void alterTable(RawStore msdb, Warehouse wh, String catName, String dbnam boolean renamedTranslatedToExternalTable = rename && MetaStoreUtils.isTranslatedToExternalTable(oldt) && MetaStoreUtils.isTranslatedToExternalTable(newt); + boolean isRenameIcebergTable = + rename && HiveMetaHook.ICEBERG.equalsIgnoreCase(newt.getParameters().get(HiveMetaHook.TABLE_TYPE)); + List columnStatistics = getColumnStats(msdb, oldt); columnStatistics = deleteTableColumnStats(msdb, oldt, newt, columnStatistics); - if (replDataLocationChanged - || renamedManagedTable || renamedTranslatedToExternalTable) { + if (!isRenameIcebergTable && + (replDataLocationChanged || renamedManagedTable || renamedTranslatedToExternalTable)) { srcPath = new Path(oldt.getSd().getLocation()); if (replDataLocationChanged) {