diff --git a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 148932b25299..141e91a16835 100644 --- a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -468,7 +468,7 @@ public enum ErrorMsg { RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", true), RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true), INCOMPATIBLE_STRUCT(10419, "Incompatible structs.", true), - OBJECTNAME_CONTAINS_DOT(10420, "Table or database name may not contain dot(.) character", true), + OBJECTNAME_CONTAINS_DOT(10420, "Catalog or table or database name may not contain dot(.) character", true), WITHIN_GROUP_NOT_ALLOWED(10421, "Not an ordered-set aggregate function: {0}. WITHIN GROUP clause is not allowed.", true), WITHIN_GROUP_PARAMETER_MISMATCH(10422, diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g index 3cfaaf4669fb..c46a67d5b8f3 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g @@ -50,7 +50,7 @@ alterStatement : KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix) | KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix) | KW_ALTER KW_MATERIALIZED KW_VIEW tableNameTree=tableName alterMaterializedViewStatementSuffix[$tableNameTree.tree] -> alterMaterializedViewStatementSuffix - | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix + | KW_ALTER (KW_DATABASE|KW_SCHEMA) databaseName alterDatabaseStatementSuffix -> ^(TOK_ALTERDATABASE databaseName alterDatabaseStatementSuffix) | KW_ALTER KW_DATACONNECTOR alterDataConnectorStatementSuffix -> alterDataConnectorStatementSuffix | KW_OPTIMIZE KW_TABLE tableName optimizeTableStatementSuffix -> ^(TOK_ALTERTABLE tableName optimizeTableStatementSuffix) | KW_ALTER KW_CATALOG alterCatalogStatementSuffix -> alterCatalogStatementSuffix @@ -181,31 +181,31 @@ alterDatabaseStatementSuffix alterDatabaseSuffixProperties @init { gParent.pushMsg("alter database properties statement", state); } @after { gParent.popMsg(state); } - : name=identifier KW_SET KW_DBPROPERTIES dbProperties - -> ^(TOK_ALTERDATABASE_PROPERTIES $name dbProperties) + : KW_SET KW_DBPROPERTIES dbProperties + -> ^(TOK_ALTERDATABASE_PROPERTIES dbProperties) ; alterDatabaseSuffixSetOwner @init { gParent.pushMsg("alter database set owner", state); } @after { gParent.popMsg(state); } - : dbName=identifier KW_SET KW_OWNER principalName - -> ^(TOK_ALTERDATABASE_OWNER $dbName principalName) + : KW_SET KW_OWNER principalName + -> ^(TOK_ALTERDATABASE_OWNER principalName) ; alterDatabaseSuffixSetLocation @init { gParent.pushMsg("alter database set location", state); } @after { gParent.popMsg(state); } - : dbName=identifier KW_SET KW_LOCATION newLocation=StringLiteral - -> ^(TOK_ALTERDATABASE_LOCATION $dbName $newLocation) - | dbName=identifier KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral - -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $dbName $newLocation) + : KW_SET KW_LOCATION newLocation=StringLiteral + -> ^(TOK_ALTERDATABASE_LOCATION $newLocation) + | KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral + -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $newLocation) ; alterDatabaseSuffixSetManagedLocation @init { gParent.pushMsg("alter database set managed location", state); } @after { gParent.popMsg(state); } - : dbName=identifier KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral - -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $dbName $newLocation) + : KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral + -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $newLocation) ; alterStatementSuffixRename[boolean table] diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g index a5c37faec8ae..d3461064e574 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g @@ -231,6 +231,17 @@ uniqueJoinTableSource -> ^(TOK_TABREF $tabname $ts? $alias?) ; +databaseName +@init { gParent.pushMsg("database name", state); } +@after { gParent.popMsg(state); } + : + catalog=identifier DOT db=identifier? + -> ^(TOK_DBNAME $catalog $db) + | + db=identifier + -> ^(TOK_DBNAME $db) + ; + tableName @init { gParent.pushMsg("table name", state); } @after { gParent.popMsg(state); } diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index ddce6aa85af6..90aeedde5870 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -378,11 +378,13 @@ TOK_DESCCATALOG; TOK_CATALOGLOCATION; TOK_CATALOGCOMMENT; TOK_ALTERCATALOG_LOCATION; +TOK_SWITCHCATALOG; TOK_DESCDATABASE; TOK_DATABASEPROPERTIES; TOK_DATABASELOCATION; TOK_DATABASE_MANAGEDLOCATION; TOK_DBPROPLIST; +TOK_ALTERDATABASE; TOK_ALTERDATABASE_PROPERTIES; TOK_ALTERDATABASE_OWNER; TOK_ALTERDATABASE_LOCATION; @@ -1011,6 +1013,7 @@ ddlStatement @after { popMsg(state); } : createCatalogStatement | dropCatalogStatement + | switchCatalogStatement | createDatabaseStatement | switchDatabaseStatement | dropDatabaseStatement @@ -1151,12 +1154,19 @@ dropCatalogStatement -> ^(TOK_DROPCATALOG identifier ifExists?) ; +switchCatalogStatement +@init { pushMsg("switch catalog statement", state); } +@after { popMsg(state); } + : KW_SET KW_CATALOG identifier + -> ^(TOK_SWITCHCATALOG identifier) + ; + createDatabaseStatement @init { pushMsg("create database statement", state); } @after { popMsg(state); } : KW_CREATE (KW_DATABASE|KW_SCHEMA) ifNotExists? - name=identifier + name=databaseName databaseComment? dbLocation? dbManagedLocation? @@ -1165,7 +1175,7 @@ createDatabaseStatement | KW_CREATE KW_REMOTE (KW_DATABASE|KW_SCHEMA) ifNotExists? - name=identifier + name=databaseName databaseComment? dbConnectorName (KW_WITH KW_DBPROPERTIES dbprops=dbProperties)? @@ -1210,15 +1220,15 @@ dbConnectorName switchDatabaseStatement @init { pushMsg("switch database statement", state); } @after { popMsg(state); } - : KW_USE identifier - -> ^(TOK_SWITCHDATABASE identifier) + : KW_USE databaseName + -> ^(TOK_SWITCHDATABASE databaseName) ; dropDatabaseStatement @init { pushMsg("drop database statement", state); } @after { popMsg(state); } - : KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? identifier restrictOrCascade? - -> ^(TOK_DROPDATABASE identifier ifExists? restrictOrCascade?) + : KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? databaseName restrictOrCascade? + -> ^(TOK_DROPDATABASE databaseName ifExists? restrictOrCascade?) ; databaseComment @@ -1284,7 +1294,7 @@ descStatement ( (KW_CATALOG) => (KW_CATALOG) KW_EXTENDED? (catName=identifier) -> ^(TOK_DESCCATALOG $catName KW_EXTENDED?) | - (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) + (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=databaseName) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) | (KW_DATACONNECTOR) => (KW_DATACONNECTOR) KW_EXTENDED? (dcName=identifier) -> ^(TOK_DESCDATACONNECTOR $dcName KW_EXTENDED?) | @@ -1323,7 +1333,7 @@ showStatement | KW_SHOW KW_FUNCTIONS (KW_LIKE showFunctionIdentifier)? -> ^(TOK_SHOWFUNCTIONS KW_LIKE? showFunctionIdentifier?) | KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? whereClause? orderByClause? limitClause? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec? whereClause? orderByClause? limitClause?) | KW_SHOW KW_CREATE ( - (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) db_name=identifier -> ^(TOK_SHOW_CREATEDATABASE $db_name) + (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) db_name=databaseName -> ^(TOK_SHOW_CREATEDATABASE $db_name) | KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName) ) diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g index ac2084bf4eed..07ba0098b15e 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g @@ -61,7 +61,7 @@ lockStatement lockDatabase @init { gParent.pushMsg("lock database statement", state); } @after { gParent.popMsg(state); } - : KW_LOCK (KW_DATABASE|KW_SCHEMA) (dbName=identifier) lockMode -> ^(TOK_LOCKDB $dbName lockMode) + : KW_LOCK (KW_DATABASE|KW_SCHEMA) (dbName=databaseName) lockMode -> ^(TOK_LOCKDB $dbName lockMode) ; lockMode @@ -79,5 +79,5 @@ unlockStatement unlockDatabase @init { gParent.pushMsg("unlock database statement", state); } @after { gParent.popMsg(state); } - : KW_UNLOCK (KW_DATABASE|KW_SCHEMA) (dbName=identifier) -> ^(TOK_UNLOCKDB $dbName) + : KW_UNLOCK (KW_DATABASE|KW_SCHEMA) (dbName=databaseName) -> ^(TOK_UNLOCKDB $dbName) ; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java new file mode 100644 index 000000000000..f2aa9fe2aeb5 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.use; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog switching commands. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_SWITCHCATALOG) +public class SwitchCatalogAnalyzer extends BaseSemanticAnalyzer { + public SwitchCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String catalogName = unescapeIdentifier(root.getChild(0).getText()); + + Catalog catalog = getCatalog(catalogName); + ReadEntity readEntity = new ReadEntity(catalog); + readEntity.noLockNeeded(); + inputs.add(readEntity); + + SwitchCatalogDesc desc = new SwitchCatalogDesc(catalogName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogDesc.java new file mode 100644 index 000000000000..68ce7222a7f9 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogDesc.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.use; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for SET CATALOG commands. + */ +@Explain(displayName = "Switch Catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class SwitchCatalogDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + private final String catalogName; + + public SwitchCatalogDesc(String databaseName) { + this.catalogName = databaseName; + } + + @Explain(displayName = "name", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogOperation.java new file mode 100644 index 000000000000..95468461b42b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogOperation.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.use; + +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Operation process of switching to another catalog. + */ +public class SwitchCatalogOperation extends DDLOperation { + public SwitchCatalogOperation(DDLOperationContext context, SwitchCatalogDesc desc) { + super(context, desc); + } + + @Override + public int execute() throws HiveException { + String catalogName = desc.getCatalogName(); + if (context.getDb().getCatalog(catalogName) == null) { + throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + + SessionState.get().setCurrentCatalog(catalogName); + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java index 92cbee55b6d5..9016a8e2c27c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java @@ -35,7 +35,7 @@ public AbstractAlterDatabaseAnalyzer(QueryState queryState) throws SemanticExcep } protected void addAlterDatabaseDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException { - Database database = getDatabase(alterDesc.getDatabaseName()); + Database database = getDatabase(alterDesc.getCatalogName(), alterDesc.getDatabaseName(), true); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java index 854cc9116f21..0f74359276c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java @@ -31,14 +31,21 @@ public abstract class AbstractAlterDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final ReplicationSpec replicationSpec; - public AbstractAlterDatabaseDesc(String databaseName, ReplicationSpec replicationSpec) { + public AbstractAlterDatabaseDesc(String catalogName, String databaseName, ReplicationSpec replicationSpec) { + this.catalogName = catalogName; this.databaseName = databaseName; this.replicationSpec = replicationSpec; } + @Explain(displayName="catalogName", explainLevels = {Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName="name", explainLevels = {Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java index 8deb44f0c708..0a7ab14b6c5f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.SemanticException; /** * Operation process of altering a database. @@ -36,8 +37,9 @@ public AbstractAlterDatabaseOperation(DDLOperationContext context, T desc) { @Override public int execute() throws HiveException { + String catName = desc.getCatalogName(); String dbName = desc.getDatabaseName(); - Database database = context.getDb().getDatabase(dbName); + Database database = context.getDb().getDatabase(catName, dbName); if (database == null) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AlterDatabaseAnalyzerCategory.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AlterDatabaseAnalyzerCategory.java new file mode 100644 index 000000000000..c0a78646d670 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AlterDatabaseAnalyzerCategory.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.alter; + +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLSemanticAnalyzerCategory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; + +/** + * Alter Database category helper. It derives the actual type of the command from the root element, by selecting the type + * of the second child, as I found that when using the cat.db syntax to execute ALTER DATABASE tableName, the databaseName + * needs to be extracted to the Antlr grammar prefix to take effect. + * I made this change by referring to {@link org.apache.hadoop.hive.ql.ddl.table.AlterTableAnalyzerCategory}. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_ALTERDATABASE) +public class AlterDatabaseAnalyzerCategory implements DDLSemanticAnalyzerCategory { + @Override + public int getType(ASTNode root) { + return root.getChild(1).getType(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java index 6ea68f0c6889..a1031588a9ff 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.alter.location; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -36,12 +37,14 @@ public AlterDatabaseSetLocationAnalyzer(QueryState queryState) throws SemanticEx @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = getUnescapedName((ASTNode) root.getChild(0)); - String newLocation = unescapeSQLString(root.getChild(1).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); + String newLocation = unescapeSQLString(root.getChild(1).getChild(0).getText()); outputs.add(toWriteEntity(newLocation)); - AlterDatabaseSetLocationDesc desc = new AlterDatabaseSetLocationDesc(databaseName, newLocation); + AlterDatabaseSetLocationDesc desc = new AlterDatabaseSetLocationDesc(catalogName, databaseName, newLocation); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java index ddb320692aac..766bd676bfe9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java @@ -31,8 +31,8 @@ public class AlterDatabaseSetLocationDesc extends AbstractAlterDatabaseDesc { private final String location; - public AlterDatabaseSetLocationDesc(String databaseName, String location) { - super(databaseName, null); + public AlterDatabaseSetLocationDesc(String catalogName, String databaseName, String location) { + super(catalogName, databaseName, null); this.location = location; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java index db7a3ba1b2cd..60b633490153 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.alter.location; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -36,12 +37,14 @@ public AlterDatabaseSetManagedLocationAnalyzer(QueryState queryState) throws Sem @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = getUnescapedName((ASTNode) root.getChild(0)); - String newLocation = unescapeSQLString(root.getChild(1).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); + String newLocation = unescapeSQLString(root.getChild(1).getChild(0).getText()); outputs.add(toWriteEntity(newLocation)); - AlterDatabaseSetManagedLocationDesc desc = new AlterDatabaseSetManagedLocationDesc(databaseName, newLocation); + AlterDatabaseSetManagedLocationDesc desc = new AlterDatabaseSetManagedLocationDesc(catalogName, databaseName, newLocation); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java index fc43583882e5..1e785d4f822d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java @@ -31,8 +31,8 @@ public class AlterDatabaseSetManagedLocationDesc extends AbstractAlterDatabaseDe private final String managedLocation; - public AlterDatabaseSetManagedLocationDesc(String databaseName, String managedLocation) { - super(databaseName, null); + public AlterDatabaseSetManagedLocationDesc(String catalogName, String databaseName, String managedLocation) { + super(catalogName, databaseName, null); this.managedLocation = managedLocation; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java index 61028761afab..e34eef8dc690 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.alter.owner; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -38,8 +39,10 @@ public AlterDatabaseSetOwnerAnalyzer(QueryState queryState) throws SemanticExcep @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = getUnescapedName((ASTNode) root.getChild(0)); - PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1)); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); + PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1).getChild(0)); if (principalDesc.getName() == null) { throw new SemanticException("Owner name can't be null in alter database set owner command"); @@ -48,7 +51,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { throw new SemanticException("Owner type can't be null in alter database set owner command"); } - AlterDatabaseSetOwnerDesc desc = new AlterDatabaseSetOwnerDesc(databaseName, principalDesc, null); + AlterDatabaseSetOwnerDesc desc = new AlterDatabaseSetOwnerDesc(catalogName, databaseName, principalDesc, null); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java index 424694043351..e5e47a963281 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java @@ -33,8 +33,8 @@ public class AlterDatabaseSetOwnerDesc extends AbstractAlterDatabaseDesc { private final PrincipalDesc ownerPrincipal; - public AlterDatabaseSetOwnerDesc(String databaseName, PrincipalDesc ownerPrincipal, ReplicationSpec replicationSpec) { - super(databaseName, replicationSpec); + public AlterDatabaseSetOwnerDesc(String catalogName, String databaseName, PrincipalDesc ownerPrincipal, ReplicationSpec replicationSpec) { + super(catalogName, databaseName, replicationSpec); this.ownerPrincipal = ownerPrincipal; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java index 79e072989b01..9f6ff2228001 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java @@ -20,6 +20,7 @@ import java.util.Map; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -38,11 +39,13 @@ public AlterDatabaseSetPropertiesAnalyzer(QueryState queryState) throws Semantic @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); Map dbProps = null; for (int i = 1; i < root.getChildCount(); i++) { - ASTNode childNode = (ASTNode) root.getChild(i); + ASTNode childNode = (ASTNode) root.getChild(i).getChild(0); switch (childNode.getToken().getType()) { case HiveParser.TOK_DATABASEPROPERTIES: dbProps = getProps((ASTNode) childNode.getChild(0)); @@ -52,7 +55,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { } } - AlterDatabaseSetPropertiesDesc desc = new AlterDatabaseSetPropertiesDesc(databaseName, dbProps, null); + AlterDatabaseSetPropertiesDesc desc = new AlterDatabaseSetPropertiesDesc(catalogName, databaseName, dbProps, null); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java index 98496417f8c7..abcc1d7b1d0d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java @@ -34,9 +34,9 @@ public class AlterDatabaseSetPropertiesDesc extends AbstractAlterDatabaseDesc { private final Map dbProperties; - public AlterDatabaseSetPropertiesDesc(String databaseName, Map dbProperties, + public AlterDatabaseSetPropertiesDesc(String catalogName, String databaseName, Map dbProperties, ReplicationSpec replicationSpec) { - super(databaseName, replicationSpec); + super(catalogName, databaseName, replicationSpec); this.dbProperties = dbProperties; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java index c7b04e9e1c13..2d7f5b9d4a52 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java @@ -20,10 +20,12 @@ import java.util.Map; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.metastore.api.DataConnector; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.DatabaseType; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; @@ -47,7 +49,12 @@ public CreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); boolean ifNotExists = false; String comment = null; @@ -92,14 +99,15 @@ public void analyzeInternal(ASTNode root) throws SemanticException { } } - if (ifNotExists && getDatabase(databaseName, false) != null) { + if (ifNotExists && getDatabase(catalogName, databaseName, false) != null) { return; } CreateDatabaseDesc desc = null; Database database = new Database(databaseName, comment, locationUri, props); + database.setCatalogName(catalogName); if (type.equalsIgnoreCase(DatabaseType.NATIVE.name())) { - desc = new CreateDatabaseDesc(databaseName, comment, locationUri, managedLocationUri, ifNotExists, props); + desc = new CreateDatabaseDesc(catalogName, databaseName, comment, locationUri, managedLocationUri, ifNotExists, props); database.setType(DatabaseType.NATIVE); // database = new Database(databaseName, comment, locationUri, props); if (managedLocationUri != null) { @@ -109,7 +117,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { String remoteDbName = databaseName; if (props != null && props.get("connector.remoteDbName") != null) // TODO finalize the property name remoteDbName = props.get("connector.remoteDbName"); - desc = new CreateDatabaseDesc(databaseName, comment, locationUri, null, ifNotExists, props, type, + desc = new CreateDatabaseDesc(catalogName, databaseName, comment, locationUri, null, ifNotExists, props, type, connectorName, remoteDbName); database.setConnector_name(connectorName); database.setType(DatabaseType.REMOTE); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java index f458cdc3356e..2d096d6c4eb7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java @@ -35,6 +35,7 @@ public class CreateDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final String comment; private final String locationUri; @@ -45,13 +46,14 @@ public class CreateDatabaseDesc implements DDLDesc, Serializable { private final String remoteDbName; private final Map dbProperties; - public CreateDatabaseDesc(String databaseName, String comment, String locationUri, String managedLocationUri, + public CreateDatabaseDesc(String catalogName, String databaseName, String comment, String locationUri, String managedLocationUri, boolean ifNotExists, Map dbProperties) { - this(databaseName, comment, locationUri, managedLocationUri, ifNotExists, dbProperties, "NATIVE", null, null); + this(catalogName, databaseName, comment, locationUri, managedLocationUri, ifNotExists, dbProperties, "NATIVE", null, null); } - public CreateDatabaseDesc(String databaseName, String comment, String locationUri, String managedLocationUri, + public CreateDatabaseDesc(String catalogName, String databaseName, String comment, String locationUri, String managedLocationUri, boolean ifNotExists, Map dbProperties, String dbtype, String connectorName, String remoteDbName) { + this.catalogName = catalogName; this.databaseName = databaseName; this.comment = comment; if (dbtype != null && dbtype.equalsIgnoreCase("REMOTE")) { @@ -80,6 +82,11 @@ public Map getDatabaseProperties() { return dbProperties; } + @Explain(displayName="catalogName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java index 0bccba094fed..e8beba96430f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java @@ -47,6 +47,7 @@ public CreateDatabaseOperation(DDLOperationContext context, CreateDatabaseDesc d public int execute() throws HiveException { Database database = new Database(desc.getName(), desc.getComment(), desc.getLocationUri(), desc.getDatabaseProperties()); + database.setCatalogName(desc.getCatalogName()); database.setOwnerName(SessionState.getUserFromAuthenticator()); database.setOwnerType(PrincipalType.USER); database.setType(desc.getDatabaseType()); @@ -55,12 +56,12 @@ public int execute() throws HiveException { if (desc.getManagedLocationUri() != null) { database.setManagedLocationUri(desc.getManagedLocationUri()); } - makeLocationQualified(database); + makeLocationQualified(database); // TODO catalog. Add catalog prefix for location if (database.getLocationUri().equalsIgnoreCase(database.getManagedLocationUri())) { throw new HiveException("Managed and external locations for database cannot be the same"); } } else if (desc.getDatabaseType() == DatabaseType.REMOTE) { - makeLocationQualified(database); + makeLocationQualified(database); // TODO catalog. Add catalog prefix for location database.setConnector_name(desc.getConnectorName()); database.setRemote_dbname(desc.getRemoteDbName()); } else { // should never be here diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java index 6b4860b9e1d2..3e6adc9904d1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.desc; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -46,12 +47,14 @@ public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); - String databaseName = stripQuotes(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catName = catDbNamePair.getLeft(); + String dbName = catDbNamePair.getRight(); boolean isExtended = root.getChildCount() == 2; - inputs.add(new ReadEntity(getDatabase(databaseName))); + inputs.add(new ReadEntity(getDatabase(catName, dbName, true))); - DescDatabaseDesc desc = new DescDatabaseDesc(ctx.getResFile(), databaseName, isExtended); + DescDatabaseDesc desc = new DescDatabaseDesc(ctx.getResFile(), catName, dbName, isExtended); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java index aafa67fe0edd..274c18f60164 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java @@ -33,18 +33,21 @@ public class DescDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; public static final String DESC_DATABASE_SCHEMA = - "db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname#string:string:string:string:string:string,string,string"; + "db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname#" + + "string:string:string:string:string:string,string,string"; public static final String DESC_DATABASE_SCHEMA_EXTENDED = "db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname,parameters#" + "string:string:string:string:string:string:string,string,string"; private final String resFile; + private final String catName; private final String dbName; private final boolean isExtended; - public DescDatabaseDesc(Path resFile, String dbName, boolean isExtended) { + public DescDatabaseDesc(Path resFile, String catName, String dbName, boolean isExtended) { this.resFile = resFile.toString(); + this.catName = catName; this.dbName = dbName; this.isExtended = isExtended; } @@ -55,6 +58,11 @@ public boolean isExtended() { return isExtended; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return dbName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java index 332e36eb7ae4..c114f9874194 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java @@ -45,7 +45,7 @@ public DescDatabaseOperation(DDLOperationContext context, DescDatabaseDesc desc) @Override public int execute() throws HiveException { try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) { - Database database = context.getDb().getDatabase(desc.getDatabaseName()); + Database database = context.getDb().getDatabase(desc.getCatalogName(), desc.getDatabaseName()); if (database == null) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, desc.getDatabaseName()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java index 6d01c2e88b30..5c39700718df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java @@ -18,9 +18,11 @@ package org.apache.hadoop.hive.ql.ddl.database.drop; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; @@ -51,12 +53,17 @@ public DropDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null; boolean cascade = root.getFirstChildWithType(HiveParser.TOK_CASCADE) != null; boolean isSoftDelete = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_ACID_LOCKLESS_READS_ENABLED); - Database database = getDatabase(databaseName, !ifExists); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); + Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), !ifExists); if (database == null) { return; } @@ -72,7 +79,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { HiveConf hiveConf = new HiveConf(conf); hiveConf.set("hive.metastore.client.filter.enabled", "false"); newDb = Hive.get(hiveConf); - List tables = newDb.getAllTableObjects(databaseName); + List
tables = newDb.getAllTableObjects(catalogName, databaseName); isDbLevelLock = !isSoftDelete || tables.stream().allMatch( table -> AcidUtils.isTableSoftDeleteEnabled(table, conf)); for (Table table : tables) { @@ -85,7 +92,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { outputs.add(new WriteEntity(table, lockType)); } // fetch all the functions in the database - List functions = db.getFunctionsInDb(databaseName, ".*"); + List functions = db.getFunctionsInDb(catalogName, databaseName, ".*"); for (Function func: functions) { outputs.add(new WriteEntity(func, WriteEntity.WriteType.DDL_NO_LOCK)); } @@ -111,7 +118,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { WriteEntity.WriteType.DDL_EXCL_WRITE : WriteEntity.WriteType.DDL_EXCLUSIVE; outputs.add(new WriteEntity(database, lockType)); } - DropDatabaseDesc desc = new DropDatabaseDesc(databaseName, ifExists, cascade, new ReplicationSpec()); + DropDatabaseDesc desc = new DropDatabaseDesc(catalogName, databaseName, ifExists, cascade, new ReplicationSpec()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java index c86922a23729..e436a43fd1aa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java @@ -32,6 +32,7 @@ public class DropDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final boolean ifExists; private final boolean cascade; @@ -40,21 +41,27 @@ public class DropDatabaseDesc implements DDLDesc, Serializable { private boolean deleteData = true; public DropDatabaseDesc(String databaseName, boolean ifExists, ReplicationSpec replicationSpec) { - this(databaseName, ifExists, false, replicationSpec); + this(null, databaseName, ifExists, false, replicationSpec); //TODO check the actual catalog } - public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, ReplicationSpec replicationSpec) { + public DropDatabaseDesc(String catalogName, String databaseName, boolean ifExists, boolean cascade, ReplicationSpec replicationSpec) { + this.catalogName = catalogName; this.databaseName = databaseName; this.ifExists = ifExists; this.cascade = cascade; this.replicationSpec = replicationSpec; } - public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, boolean deleteData) { - this(databaseName, ifExists, cascade, null); + public DropDatabaseDesc(String catalogName, String databaseName, boolean ifExists, boolean cascade, boolean deleteData) { + this(catalogName, databaseName, ifExists, cascade, null); this.deleteData = deleteData; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java index e83d082703ef..ed08a59babd5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java @@ -40,10 +40,11 @@ public DropDatabaseOperation(DDLOperationContext context, DropDatabaseDesc desc) @Override public int execute() throws HiveException { try { + String catName = desc.getCatalogName(); String dbName = desc.getDatabaseName(); ReplicationSpec replicationSpec = desc.getReplicationSpec(); if (replicationSpec.isInReplicationScope()) { - Database database = context.getDb().getDatabase(dbName); + Database database = context.getDb().getDatabase(catName, dbName); if (database == null || !replicationSpec.allowEventReplacementInto(database.getParameters())) { return 0; } @@ -52,12 +53,12 @@ public int execute() throws HiveException { if (LlapHiveUtils.isLlapMode(context.getConf())) { ProactiveEviction.Request.Builder llapEvictRequestBuilder = ProactiveEviction.Request.Builder.create(); - llapEvictRequestBuilder.addDb(dbName); + llapEvictRequestBuilder.addDb(dbName); //TODO add catalog for the cache ProactiveEviction.evict(context.getConf(), llapEvictRequestBuilder.build()); } // Unregister the functions as well if (desc.isCasdade()) { - FunctionRegistry.unregisterPermanentFunctions(dbName); + FunctionRegistry.unregisterPermanentFunctions(dbName); //TODO add catalog for the cache } } catch (NoSuchObjectException ex) { throw new HiveException(ex, ErrorMsg.DATABASE_NOT_EXISTS, desc.getDatabaseName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java index cd0392dd066c..4d83f53b81c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java @@ -18,8 +18,10 @@ package org.apache.hadoop.hive.ql.ddl.database.lock; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -43,16 +45,21 @@ public LockDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); String mode = unescapeIdentifier(root.getChild(1).getText().toUpperCase()); - inputs.add(new ReadEntity(getDatabase(databaseName))); + inputs.add(new ReadEntity(getDatabase(catalogName, databaseName, true))); // Lock database operation is to acquire the lock explicitly, the operation itself doesn't need to be locked. // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. - outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + outputs.add(new WriteEntity(getDatabase(catalogName, databaseName, true), WriteType.DDL_NO_LOCK)); LockDatabaseDesc desc = - new LockDatabaseDesc(databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVE_QUERY_ID), ctx.getCmd()); + new LockDatabaseDesc(catalogName, databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVE_QUERY_ID), ctx.getCmd()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); ctx.setNeedLockMgr(true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java index 0affeced35e8..3f298bdb8fab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java @@ -31,18 +31,25 @@ public class LockDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final String mode; private final String queryId; private final String queryStr; - public LockDatabaseDesc(String databaseName, String mode, String queryId, String queryStr) { + public LockDatabaseDesc(String catalogName, String databaseName, String mode, String queryId, String queryStr) { + this.catalogName = catalogName; this.databaseName = databaseName; this.mode = mode; this.queryId = queryId; this.queryStr = queryStr; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java index 4345503f4b93..106c8f1a1590 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java @@ -18,7 +18,9 @@ package org.apache.hadoop.hive.ql.ddl.database.showcreate; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; @@ -43,12 +45,17 @@ public ShowCreateDatabaseAnalyzer(QueryState queryState) throws SemanticExceptio public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); - String databaseName = getUnescapedName((ASTNode)root.getChild(0)); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); - Database database = getDatabase(databaseName); + Database database = getDatabase(catalogName, databaseName, true); inputs.add(new ReadEntity(database)); - ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(databaseName, ctx.getResFile()); + ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(catalogName, databaseName, ctx.getResFile()); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java index cb60d7ed5ea9..31fc4d9787e4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java @@ -35,9 +35,11 @@ public class ShowCreateDatabaseDesc implements DDLDesc, Serializable { public static final String SCHEMA = "createdb_stmt#string"; private final Path resFile; + private final String catName; private final String dbName; - public ShowCreateDatabaseDesc(String dbName, Path resFile) { + public ShowCreateDatabaseDesc(String catName, String dbName, Path resFile) { + this.catName = catName; this.dbName = dbName; this.resFile = resFile; } @@ -51,4 +53,9 @@ public Path getResFile() { public String getDatabaseName() { return dbName; } + + @Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catName; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java index 1dba71f64059..2e13b42f7030 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java @@ -52,7 +52,7 @@ public int execute() throws HiveException { } private int showCreateDatabase(DataOutputStream outStream) throws Exception { - Database database = context.getDb().getDatabase(desc.getDatabaseName()); + Database database = context.getDb().getDatabase(desc.getCatalogName(), desc.getDatabaseName()); StringBuilder createDbCommand = new StringBuilder(); createDbCommand.append("CREATE DATABASE `").append(database.getName()).append("`\n"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java index 64c80bf83132..ee4409d1f3ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.ddl.database.unlock; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -41,14 +43,19 @@ public UnlockDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); - inputs.add(new ReadEntity(getDatabase(databaseName))); + inputs.add(new ReadEntity(getDatabase(catalogName, databaseName, true))); // Unlock database operation is to release the lock explicitly, the operation itself don't need to be locked. // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. - outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + outputs.add(new WriteEntity(getDatabase(catalogName, databaseName, true), WriteType.DDL_NO_LOCK)); - UnlockDatabaseDesc desc = new UnlockDatabaseDesc(databaseName); + UnlockDatabaseDesc desc = new UnlockDatabaseDesc(catalogName, databaseName); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); ctx.setNeedLockMgr(true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java index 3605a6d47e2d..341b5f5388f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java @@ -31,12 +31,19 @@ public class UnlockDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; - public UnlockDatabaseDesc(String databaseName) { + public UnlockDatabaseDesc(String catalogName, String databaseName) { + this.catalogName = catalogName; this.databaseName = databaseName; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java index 97d86617d672..b8b5474fcd13 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.use; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -40,14 +41,13 @@ public SwitchDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); - - Database database = getDatabase(databaseName, true); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), true); ReadEntity readEntity = new ReadEntity(database); readEntity.noLockNeeded(); inputs.add(readEntity); - SwitchDatabaseDesc desc = new SwitchDatabaseDesc(databaseName); + SwitchDatabaseDesc desc = new SwitchDatabaseDesc(catDbNamePair.getLeft(), catDbNamePair.getRight()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java index 57923d8e53ba..447e67f4c4e0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java @@ -31,9 +31,11 @@ public class SwitchDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; - public SwitchDatabaseDesc(String databaseName) { + public SwitchDatabaseDesc(String catalogName, String databaseName) { + this.catalogName = catalogName; this.databaseName = databaseName; } @@ -41,4 +43,9 @@ public SwitchDatabaseDesc(String databaseName) { public String getDatabaseName() { return databaseName; } + + @Explain(displayName = "catalogName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java index 8a3c863825d5..0c947d459f6e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java @@ -38,15 +38,21 @@ public SwitchDatabaseOperation(DDLOperationContext context, SwitchDatabaseDesc d @Override public int execute() throws HiveException { + String catName = desc.getCatalogName(); + if (catName != null && context.getDb().getCatalog(catName) != null) { + SessionState.get().setCurrentCatalog(catName); + } else if (catName != null) { + throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catName); + } String dbName = desc.getDatabaseName(); - if (!context.getDb().databaseExists(dbName)) { + if (!context.getDb().databaseExists(catName, dbName)) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } SessionState.get().setCurrentDatabase(dbName); // set database specific parameters - Database database = context.getDb().getDatabase(dbName); + Database database = context.getDb().getDatabase(catName, dbName); assert(database != null); Map dbParams = database.getParameters(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java index d4be520dd476..a4ad0092acf5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.ddl.table.lock.show; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.DDLWork; @@ -41,11 +43,16 @@ public ShowDbLocksAnalyzer(QueryState queryState) throws SemanticException { public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); - String dbName = stripQuotes(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catName = catDbNamePair.getLeft(); + if (catName != null && getCatalog(catName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catName); + } + String dbName = catDbNamePair.getRight(); boolean isExtended = (root.getChildCount() > 1); assert txnManager != null : "Transaction manager should be set before calling analyze"; - ShowLocksDesc desc = new ShowLocksDesc(ctx.getResFile(), dbName, isExtended, txnManager.useNewShowLocksFormat()); + ShowLocksDesc desc = new ShowLocksDesc(ctx.getResFile(), catName, dbName, isExtended, txnManager.useNewShowLocksFormat()); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java index 898d3b8be618..e826e090b997 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java @@ -39,14 +39,16 @@ public class ShowLocksDesc implements DDLDesc, Serializable { "string:string:string:string:string:string:string:string:string:string:string:string:string"; private final String resFile; + private final String catName; private final String dbName; private final String tableName; private final Map partSpec; private final boolean isExt; private final boolean isNewFormat; - public ShowLocksDesc(Path resFile, String dbName, boolean isExt, boolean isNewFormat) { + public ShowLocksDesc(Path resFile, String catName, String dbName, boolean isExt, boolean isNewFormat) { this.resFile = resFile.toString(); + this.catName = catName; this.dbName = dbName; this.tableName = null; this.partSpec = null; @@ -57,6 +59,7 @@ public ShowLocksDesc(Path resFile, String dbName, boolean isExt, boolean isNewFo public ShowLocksDesc(Path resFile, String tableName, Map partSpec, boolean isExt, boolean isNewFormat) { this.resFile = resFile.toString(); + this.catName = null; this.dbName = null; this.tableName = tableName; this.partSpec = partSpec; @@ -69,6 +72,11 @@ public String getResFile() { return resFile; } + @Explain(displayName = "catName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatName() { + return catName; + } + @Explain(displayName = "dbName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbName() { return dbName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java index d7e71858a0ad..5a3ac38dfe2d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java @@ -160,6 +160,8 @@ private ShowLocksResponse getLocksForNewFormat(HiveLockManager lockMgr) throws H throw new HiveException("New lock format only supported with db lock manager."); } + // TODO catalog. Need to add catalog into ShowLocksRequest. But ShowLocksRequest doesn't have catalog field. + // Maybe we need to change hive_metastore.thrift to add catalog into ShowLocksRequest struct. ShowLocksRequest request = new ShowLocksRequest(); if (desc.getDbName() == null && desc.getTableName() != null) { request.setDbname(SessionState.get().getCurrentDatabase()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java index f6ac672eb67f..74fc85ac304c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.repl.util.SnapshotUtils; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.repl.load.log.IncrementalLoadLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status; import org.apache.thrift.TException; @@ -448,8 +449,9 @@ private void setDbReadOnly() { Map props = new HashMap<>(); props.put(READONLY, Boolean.TRUE.toString()); + // TODO catalog. Need to double check the actual catalog here. AlterDatabaseSetPropertiesDesc setTargetReadOnly = - new AlterDatabaseSetPropertiesDesc(work.dbNameToLoadIn, props, null); + new AlterDatabaseSetPropertiesDesc(HiveUtils.getCurrentCatalogOrDefault(conf), work.dbNameToLoadIn, props, null); DDLWork alterDbPropWork = new DDLWork(new HashSet<>(), new HashSet<>(), setTargetReadOnly, true, work.dumpDirectory, work.getMetricCollector()); @@ -865,7 +867,7 @@ private int executeIncrementalLoad(long loadStartTime) throws Exception { props.put(currProp.getKey(), (actualVal == null) ? "" : actualVal); } } - AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(sourceDb.getName(), + AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(sourceDb.getCatalogName(), sourceDb.getName(), new PrincipalDesc(sourceDb.getOwnerName(), sourceDb.getOwnerType()), null); DDLWork ddlWork = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(work.dumpDirectory)).getParent().toString(), work.getMetricCollector()); @@ -881,7 +883,9 @@ private int executeIncrementalLoad(long loadStartTime) throws Exception { props.put(ReplConst.REPL_FAILOVER_ENDPOINT, ""); } if (!props.isEmpty()) { - AlterDatabaseSetPropertiesDesc setTargetDesc = new AlterDatabaseSetPropertiesDesc(work.dbNameToLoadIn, props, null); + // TODO catalog. Need to double check the actual catalog here. + AlterDatabaseSetPropertiesDesc setTargetDesc = new AlterDatabaseSetPropertiesDesc(targetDb.getCatalogName(), + work.dbNameToLoadIn, props, null); Task addReplTargetPropTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), setTargetDesc, true, work.dumpDirectory, work.getMetricCollector()), conf); @@ -927,8 +931,9 @@ private int executeIncrementalLoad(long loadStartTime) throws Exception { String lastEventid = builder.eventTo().toString(); Map mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), lastEventid); + // TODO catalog. Need to double check the actual catalog here. AlterDatabaseSetPropertiesDesc alterDbDesc = - new AlterDatabaseSetPropertiesDesc(dbName, mapProp, + new AlterDatabaseSetPropertiesDesc(targetDb.getCatalogName(), dbName, mapProp, new ReplicationSpec(lastEventid, lastEventid)); Task updateReplIdTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index 76ff8db26900..5c2061dbeb5b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -150,7 +150,7 @@ private boolean isDbEmpty(String dbName) throws HiveException { private Task createDbTask(Database dbObj) throws MetaException { // note that we do not set location - for repl load, we want that auto-created. - CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getName(), dbObj.getDescription(), + CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getCatalogName(), dbObj.getName(), dbObj.getDescription(), getDbLocation(dbObj), getDbManagedLocation(dbObj), false, updateDbProps(dbObj, context.dumpDirectory)); // If it exists, we want this to be an error condition. Repl Load is not intended to replace a // db. @@ -161,12 +161,12 @@ private Task createDbTask(Database dbObj) throws MetaException { } private Task alterDbTask(Database dbObj) { - return alterDbTask(dbObj.getName(), updateDbProps(dbObj, context.dumpDirectory), + return alterDbTask(dbObj.getCatalogName(), dbObj.getName(), updateDbProps(dbObj, context.dumpDirectory), context.hiveConf, context.dumpDirectory, this.metricCollector); } private Task setOwnerInfoTask(Database dbObj) { - AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(dbObj.getName(), + AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(dbObj.getCatalogName(), dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(context.dumpDirectory)).getParent().toString(), this.metricCollector); @@ -204,10 +204,10 @@ private static Map updateDbProps(Database dbObj, String dumpDire return parameters; } - private static Task alterDbTask(String dbName, Map props, + private static Task alterDbTask(String catName, String dbName, Map props, HiveConf hiveConf, String dumpDirectory, ReplicationMetricCollector metricCollector) { - AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(dbName, props, null); + AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(catName, dbName, props, null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(dumpDirectory)).getParent().toString(), metricCollector); return TaskFactory.get(work, hiveConf); @@ -228,7 +228,7 @@ public AlterDatabase(Context context, DatabaseEvent event, String dbNameToLoadIn @Override public TaskTracker tasks() throws SemanticException { Database dbObj = readDbMetadata(); - tracker.addTask(alterDbTask(dbObj.getName(), dbObj.getParameters(), context.hiveConf, + tracker.addTask(alterDbTask(dbObj.getCatalogName(), dbObj.getName(), dbObj.getParameters(), context.hiveConf, context.dumpDirectory, this.metricCollector )); return tracker; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index eda4a3f070e8..9d7847af5d05 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.DumpType; @@ -264,8 +265,9 @@ private Task dbUpdateReplStateTask(String dbName, String replState, Task p HashMap mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), replState); - AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(dbName, mapProp, - new ReplicationSpec(replState, replState)); + // TODO catalog. Need to double check the actual catalog here. + AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(HiveUtils.getCurrentCatalogOrDefault(conf), + dbName, mapProp, new ReplicationSpec(replState, replState)); Task updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc, true, dumpDirectory, metricCollector), conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java index 8449ef83a087..e6faaaadd599 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; import org.apache.hadoop.hive.metastore.api.TxnToWriteId; import org.apache.hadoop.hive.metastore.api.TxnType; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ValidTxnList; @@ -394,7 +395,9 @@ private List getLockObjects(QueryPlan plan, Database db, conf); if (db != null) { - locks.add(new HiveLockObj(new HiveLockObject(db.getName(), lockData), + String catName = Objects.requireNonNullElse(db.getCatalogName(), + HiveUtils.getCurrentCatalogOrDefault(conf)); + locks.add(new HiveLockObj(new HiveLockObject(catName + "@" + db.getName(), lockData), mode)); return locks; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java index 9897795db9c7..ab2fcae26834 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java @@ -20,6 +20,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; @@ -38,8 +39,10 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.stringtemplate.v4.ST; /** * An implementation HiveTxnManager that includes internal methods that all @@ -150,9 +153,11 @@ public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveExcepti HiveLockManager lockMgr = getAndCheckLockManager(); HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); + String catName = Objects.requireNonNullElse(lockDb.getCatalogName(), + HiveUtils.getCurrentCatalogOrDefault(conf)); String dbName = lockDb.getDatabaseName(); - Database dbObj = hiveDB.getDatabase(dbName); + Database dbObj = hiveDB.getDatabase(catName, dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } @@ -162,7 +167,8 @@ public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveExcepti String.valueOf(System.currentTimeMillis()), "EXPLICIT", lockDb.getQueryStr(), conf); - HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true); + // Using the catalogName@databaseName format to uniquely identify a database. + HiveLock lck = lockMgr.lock(new HiveLockObject(catName + "@" +dbObj.getName(), lockData), mode, true); if (lck == null) { return 1; } @@ -173,13 +179,15 @@ public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveExcepti public int unlockDatabase(Hive hiveDB, UnlockDatabaseDesc unlockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); + String catName = Objects.requireNonNullElse(unlockDb.getCatalogName(), + HiveUtils.getCurrentCatalogOrDefault(conf)); String dbName = unlockDb.getDatabaseName(); - Database dbObj = hiveDB.getDatabase(dbName); + Database dbObj = hiveDB.getDatabase(catName, dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } - HiveLockObject obj = new HiveLockObject(dbObj.getName(), null); + HiveLockObject obj = new HiveLockObject(catName + "@" +dbObj.getName(), null); List locks = lockMgr.getLocks(obj, false, false); if ((locks == null) || (locks.isEmpty())) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 6a332a10f19a..49fe02300fa6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -667,6 +667,9 @@ public void dropCatalog(String catName, boolean ignoreUnknownCat) public void createDatabase(Database db, boolean ifNotExist) throws AlreadyExistsException, HiveException { try { + if (db.getCatalogName() == null) { + db.setCatalogName(HiveUtils.getCurrentCatalogOrDefault(conf)); + } getMSC().createDatabase(db); } catch (AlreadyExistsException e) { if (!ifNotExist) { @@ -723,7 +726,7 @@ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownD */ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade) throws HiveException, NoSuchObjectException { - dropDatabase(new DropDatabaseDesc(name, ignoreUnknownDb, cascade, deleteData)); + dropDatabase(new DropDatabaseDesc(getDefaultCatalog(conf) ,name, ignoreUnknownDb, cascade, deleteData)); // TODO catalog. check the actual catalog } public void dropDatabase(DropDatabaseDesc desc) @@ -735,7 +738,7 @@ public void dropDatabase(DropDatabaseDesc desc) .map(HiveTxnManager::getCurrentTxnId).orElse(0L); DropDatabaseRequest req = new DropDatabaseRequest(); - req.setCatalogName(getDefaultCatalog(conf)); + req.setCatalogName(Optional.ofNullable(desc.getCatalogName()).orElse(HiveUtils.getCurrentCatalogOrDefault(conf))); req.setName(desc.getDatabaseName()); req.setIgnoreUnknownDb(desc.getIfExists()); req.setDeleteData(desc.isDeleteData()); @@ -1297,7 +1300,12 @@ public void renamePartition(Table tbl, Map oldPartSpec, Partitio } } - // TODO: this whole path won't work with catalogs + /** + * When you call this method, you need to ensure that the catalog has been set in the db object. + * @param dbName The database name. + * @param db The database object. + * @throws HiveException + */ public void alterDatabase(String dbName, Database db) throws HiveException { try { @@ -1428,7 +1436,10 @@ public void createTable(Table tbl, boolean ifNotExists, } public void createTable(Table tbl, boolean ifNotExists) throws HiveException { - createTable(tbl, ifNotExists, null, null, null, null, + if (tbl.getCatalogName() == null) { + tbl.setCatalogName(HiveUtils.getCurrentCatalogOrDefault(conf)); + } + createTable(tbl, ifNotExists, null, null, null, null, null, null); } @@ -1463,6 +1474,9 @@ public void dropTable(Table table, boolean ifPurge) throws HiveException { long txnId = Optional.ofNullable(SessionState.get()) .map(ss -> ss.getTxnMgr().getCurrentTxnId()).orElse(0L); table.getTTable().setTxnId(txnId); + if (table.getCatName() == null) { + table.setCatalogName(HiveUtils.getCurrentCatalogOrDefault(conf)); + } dropTable(table.getTTable(), !tableWithSuffix, true, ifPurge); } @@ -1868,6 +1882,17 @@ public List
getAllTableObjects(String dbName) throws HiveException { return getTableObjects(dbName, ".*", null); } + /** + * Get all tables for the specified database. + * @param catName + * @param dbName + * @return List of all tables + * @throws HiveException + */ + public List
getAllTableObjects(String catName, String dbName) throws HiveException { + return getTableObjects(catName, dbName, ".*", null); + } + /** * Get all materialized view names for the specified database. * @param dbName @@ -1914,6 +1939,16 @@ public Table apply(org.apache.hadoop.hive.metastore.api.Table table) { } } + public List
getTableObjects(String catName, String dbName, String pattern, TableType tableType) throws HiveException { + try { + return Lists.transform(getMSC().getTables(catName, dbName, getTablesByType(catName, dbName, pattern, tableType), null), + Table::new + ); + } catch (Exception e) { + throw new HiveException(e); + } + } + /** * Returns all existing tables from default database which match the given * pattern. The matching occurs as per Java regular expressions @@ -1997,6 +2032,52 @@ public List getTablesByType(String dbName, String pattern, TableType typ } } + /** + * Returns all existing tables of a type (VIRTUAL_VIEW|EXTERNAL_TABLE|MANAGED_TABLE) from the specified + * database which match the given pattern. The matching occurs as per Java regular expressions. + * @param catName catalog name to find the tables in. if null, uses the current catalog in this session. + * @param dbName Database name to find the tables in. if null, uses the current database in this session. + * @param pattern A pattern to match for the table names.If null, returns all names from this DB. + * @param type The type of tables to return. VIRTUAL_VIEWS for views. If null, returns all tables and views. + * @return list of table names that match the pattern. + * @throws HiveException + */ + public List getTablesByType(String catName, String dbName, String pattern, TableType type) + throws HiveException { + PerfLogger perfLogger = SessionState.getPerfLogger(); + perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE); + + if (catName == null) { + catName = HiveUtils.getCurrentCatalogOrDefault(conf); + } + + if (dbName == null) { + dbName = SessionState.get().getCurrentDatabase(); + } + + try { + List result; + if (type != null) { + if (pattern != null) { + result = getMSC().getTables(catName, dbName, pattern, type); + } else { + result = getMSC().getTables(catName, dbName, ".*", type); + } + } else { + if (pattern != null) { + result = getMSC().getTables(catName, dbName, pattern); + } else { + result = getMSC().getTables(catName, dbName, ".*"); + } + } + return result; + } catch (Exception e) { + throw new HiveException(e); + } finally { + perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_TABLE, "HS2-cache"); + } + } + /** * Get the materialized views that have been enabled for rewriting from the * cache (registry). It will preprocess them to discard those that are @@ -2445,7 +2526,7 @@ public List getMaterializedViewsByAST( */ public List getAllDatabases() throws HiveException { try { - return getMSC().getAllDatabases(); + return getMSC().getAllDatabases(HiveUtils.getCurrentCatalogOrDefault(conf)); } catch (Exception e) { throw new HiveException(e); } @@ -2530,6 +2611,7 @@ public void alterCatalog(String catName, Catalog catalog) throws HiveException { } /** + * @deprecated please use {@link #databaseExists(String, String)}} * Query metadata to see if a database with the given name already exists. * * @param dbName @@ -2541,6 +2623,19 @@ public boolean databaseExists(String dbName) throws HiveException { return getDatabase(dbName) != null; } + /** + * Query metadata to see if a database with the given name already exists. + * + * @param catName + * @param dbName + * @return true if a database with the given name already exists, false if + * does not exist. + * @throws HiveException + */ + public boolean databaseExists(String catName, String dbName) throws HiveException { + return getDatabase(catName, dbName) != null; + } + /** * Get the database by name. * @param dbName the name of the database. @@ -2572,6 +2667,9 @@ public Database getDatabase(String catName, String dbName) throws HiveException PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_DATABASE_2); try { + if (catName == null) { + catName = HiveUtils.getCurrentCatalogOrDefault(conf); + } return getMSC().getDatabase(catName, dbName); } catch (NoSuchObjectException e) { return null; @@ -6463,6 +6561,18 @@ public List getFunctionsInDb(String dbName, String pattern) throws Hiv } } + public List getFunctionsInDb(String catName, String dbName, String pattern) throws HiveException { + try { + GetFunctionsRequest request = new GetFunctionsRequest(dbName); + request.setPattern(pattern); + request.setCatalogName(Optional.ofNullable(catName).orElse(HiveUtils.getCurrentCatalogOrDefault(conf))); + request.setReturnNames(false); + return getMSC().getFunctionsRequest(request).getFunctions(); + } catch (TException te) { + throw new HiveException(te); + } + } + public void setMetaConf(String propName, String propValue) throws HiveException { try { getMSC().setMetaConf(propName, propValue); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index ccc97614d73f..bf42a5c3fa43 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.parse.Quotation; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.UnparseTranslator; +import org.apache.hadoop.hive.ql.session.SessionState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -60,6 +61,8 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; + /** * General collection of helper functions. * @@ -543,4 +546,10 @@ public static String getLowerCaseTableName(String refName) { } return refName.toLowerCase(); } + + public static String getCurrentCatalogOrDefault(Configuration conf) { + return SessionState.get() != null ? + SessionState.get().getCurrentCatalog() : + getDefaultCatalog(conf); + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 8a37073509ef..e0aed673dc25 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -1198,6 +1198,10 @@ public String getCatalogName() { return this.tTable.getCatName(); } + public void setCatalogName(String catalogName) { + this.tTable.setCatName(catalogName); + } + public void setOutdatedForRewriting(Boolean validForRewritingMaterializedView) { this.outdatedForRewritingMaterializedView = validForRewritingMaterializedView; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f0850a27be6c..b9dbf992d9a6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -95,7 +95,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; -import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -422,6 +421,33 @@ public static String charSetString(String charSetName, String charSetString) } } + /** + * + * @param dbNameNode A root node that contains database fields + * @return Return a Pair object which includes catalogName and dbName + * @throws SemanticException + */ + public static Pair getCatDbNamePair(ASTNode dbNameNode) throws SemanticException { + String catName = null; + String dbName; + + if (dbNameNode.getChildCount() == 2) { + catName = unescapeIdentifier(dbNameNode.getChild(0).getText()); + dbName = unescapeIdentifier(dbNameNode.getChild(1).getText()); + } else if (dbNameNode.getChildCount() == 1) { + dbName = unescapeIdentifier(dbNameNode.getChild(0).getText()); + } else { + dbName = unescapeIdentifier(dbNameNode.getText()); + } + + if ((catName != null && catName.contains(".")) || dbName.contains(".")) { + throw new SemanticException(ASTErrorUtils.getMsg( + ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), dbNameNode)); + } + + return Pair.of(catName, dbName); + } + /** * Get dequoted name from a table/column node. * @param tableOrColumnNode the table or column node @@ -1915,6 +1941,12 @@ protected Database getDatabase(String dbName) throws SemanticException { return getDatabase(dbName, true); } + /** + * TODO catalog. Once we confirm that no compatibility has been broken, we can remove these non-catalog APIs + * @deprecated Replaced by + * {@link BaseSemanticAnalyzer#getDatabase(String catalogName, String dbName, boolean throwException)} + * @return the database if existed. + */ protected Database getDatabase(String dbName, boolean throwException) throws SemanticException { Database database; try { @@ -1928,6 +1960,19 @@ protected Database getDatabase(String dbName, boolean throwException) throws Sem return database; } + protected Database getDatabase(String catalogName, String dbName, boolean throwException) throws SemanticException { + Database database; + try { + database = db.getDatabase(catalogName, dbName); + } catch (Exception e) { + throw new SemanticException(e.getMessage(), e); + } + if (database == null && throwException) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName)); + } + return database; + } + protected DataConnector getDataConnector(String dbName) throws SemanticException { return getDataConnector(dbName, true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index e32b3275dd74..196d76a729c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.SemanticException; import java.util.Collections; @@ -58,10 +59,13 @@ public List> handle(Context context) for (Map.Entry entry : dbProps.entrySet()) { newDbProps.put(entry.getKey(), entry.getValue()); } - alterDbDesc = new AlterDatabaseSetPropertiesDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec()); + //TODO catalog. Need to double check the acutual catalog here. + alterDbDesc = new AlterDatabaseSetPropertiesDesc(HiveUtils.getCurrentCatalogOrDefault(context.hiveConf), + actualDbName, newDbProps, context.eventOnlyReplicationSpec()); } else { - alterDbDesc = new AlterDatabaseSetOwnerDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), - newDb.getOwnerType()), context.eventOnlyReplicationSpec()); + // TODO catalog. Need to double the actual catalog here. + alterDbDesc = new AlterDatabaseSetOwnerDesc(HiveUtils.getCurrentCatalogOrDefault(context.hiveConf), actualDbName, + new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec()); } Task alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index cf7879875a71..ebcda945e372 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -56,14 +56,15 @@ public List> handle(Context context) Database db = metaData.getDatabase(); String destinationDBName = context.dbName == null ? db.getName() : context.dbName; + String destinationCatalogName = db.getCatalogName(); // TODO catalog. Need to double check the catalog here. CreateDatabaseDesc createDatabaseDesc = - new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters()); + new CreateDatabaseDesc(destinationCatalogName, destinationDBName, db.getDescription(), null, null, true, db.getParameters()); Task createDBTask = TaskFactory.get( new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf); if (!db.getParameters().isEmpty()) { - AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, + AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationCatalogName, destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec()); Task alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), @@ -71,7 +72,7 @@ public List> handle(Context context) createDBTask.addDependentTask(alterDbProperties); } if (StringUtils.isNotEmpty(db.getOwnerName())) { - AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, + AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationCatalogName, destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec()); Task alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 1d908bb1bdb5..ed6e331bd928 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -34,6 +34,7 @@ public enum HiveOperation { REPLSTATUS("REPLSTATUS", HiveParser.TOK_REPL_STATUS, new Privilege[]{Privilege.SELECT}, null), CREATECATALOG("CREATECATALOG", HiveParser.TOK_CREATECATALOG, null, new Privilege[]{Privilege.CREATE}), DROPCATALOG("DROPCATALOG", HiveParser.TOK_DROPCATALOG, null, new Privilege[]{Privilege.DROP}), + SWITCHCATALOG("SWITCHCATALOG", HiveParser.TOK_SWITCHCATALOG, null, null, true, false), CREATEDATABASE("CREATEDATABASE", HiveParser.TOK_CREATEDATABASE, null, new Privilege[]{Privilege.CREATE}), CREATEDATACONNECTOR("CREATEDATACONNECTOR", HiveParser.TOK_CREATEDATACONNECTOR, null, new Privilege[]{Privilege.CREATE}), DROPDATABASE("DROPDATABASE", HiveParser.TOK_DROPDATABASE, null, new Privilege[]{Privilege.DROP}), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java index afc82639771a..a9e9590247ea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java @@ -86,6 +86,8 @@ public static HiveCommand find(String[] command, boolean findOnlyForTesting) { } else if (command.length > 1 && "show".equalsIgnoreCase(command[0]) && "processlist".equalsIgnoreCase(command[1])) { return PROCESSLIST; + } else if(command.length > 1 && "set".equalsIgnoreCase(command[0]) && "catalog".equalsIgnoreCase(command[1])) { + return null;// set catalog catalog_name should be a SQLOperation instead of a HiveCommandOperation } else if (COMMANDS.contains(cmd)) { HiveCommand hiveCommand = HiveCommand.valueOf(cmd); if (findOnlyForTesting == hiveCommand.isOnlyForTesting()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java index 6c84e31fc9ef..8217be9d844b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java @@ -37,6 +37,7 @@ public enum HiveOperationType { CREATEDATABASE, CREATEDATACONNECTOR, DROPCATALOG, + SWITCHCATALOG, DROPDATABASE, DROPDATACONNECTOR, SWITCHDATABASE, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index d5869c2f4f5a..d7f1420330d0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -481,6 +481,7 @@ public HivePrivilegeObjectType getObjectType() { op2Priv.put(HiveOperationType.ALTERCATALOG_LOCATION, PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR)); op2Priv.put(HiveOperationType.DESCCATALOG, PrivRequirement.newIOPrivRequirement(null, null)); op2Priv.put(HiveOperationType.SHOWCATALOGS, PrivRequirement.newIOPrivRequirement(null, null)); + op2Priv.put(HiveOperationType.SWITCHCATALOG, PrivRequirement.newIOPrivRequirement(null, null)); } /** diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 0fb4b06611e5..b94657f716b3 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -324,7 +324,7 @@ public void testMetaStoreApiTiming() throws Throwable { hm.getAllDatabases(); hm.dumpAndClearMetaCallTiming("test"); String logStr = appender.getOutput(); - String expectedString = "getAllDatabases_()="; + String expectedString = "getAllDatabases_(String)="; Assert.assertTrue(logStr + " should contain <" + expectedString, logStr.contains(expectedString)); diff --git a/ql/src/test/queries/clientnegative/lockneg_try_lock_cat_db_in_use.q b/ql/src/test/queries/clientnegative/lockneg_try_lock_cat_db_in_use.q new file mode 100644 index 000000000000..9f6bed6aa559 --- /dev/null +++ b/ql/src/test/queries/clientnegative/lockneg_try_lock_cat_db_in_use.q @@ -0,0 +1,9 @@ +set hive.lock.numretries=0; +set hive.support.concurrency=true; + +CREATE CATALOG testcat LOCATION '/tmp/testcat' COMMENT 'Hive test catalog'; +create database testcat.lockneg9; + +lock database testcat.lockneg9 shared; + +drop database testcat.lockneg9; diff --git a/ql/src/test/queries/clientpositive/catalog.q b/ql/src/test/queries/clientpositive/catalog.q index 173e9a065da0..f2247761b15e 100644 --- a/ql/src/test/queries/clientpositive/catalog.q +++ b/ql/src/test/queries/clientpositive/catalog.q @@ -45,3 +45,6 @@ SHOW CATALOGS LIKE 'test__'; -- ALTER LOCATION ALTER CATALOG test_cat SET LOCATION '/tmp/test_cat_new'; DESC CATALOG EXTENDED test_cat; + +-- DROP catalog at the end +DROP CATALOG test_cat; diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q new file mode 100644 index 000000000000..9feb1c30bec8 --- /dev/null +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -0,0 +1,71 @@ +set hive.support.concurrency=true; + +-- SORT_QUERY_RESULTS + +-- CREATE DATABASE in default catalog 'hive' +CREATE DATABASE testdb; + +-- Check databases in default catalog 'hive', +-- The list of databases in the catalog 'hive' should only contain the default and the testdb. +SHOW DATABASES; + +-- CREATE a new catalog with comment +CREATE CATALOG testcat LOCATION '/tmp/testcat' COMMENT 'Hive test catalog'; + +-- Check catalogs list +SHOW CATALOGS; + +-- CREATE DATABASE in new catalog testcat by catalog.db pattern +CREATE DATABASE testcat.testdb_1; + +-- Switch the catalog from hive to 'testcat' +SET CATALOG testcat; + +-- Check the current catalog, should be testcat. +select current_catalog(); + +-- Switch database by catalog.db pattern, and the catalog also be changed. +USE hive.default; + +-- Check the current catalog, should be hive +select current_catalog(); + +-- CREATE DATABASE in new catalog testcat +SET CATALOG testcat; +CREATE DATABASE testdb_2; + +-- Check databases in catalog 'testcat', +-- The list of databases in the catalog 'testcat' should contain default and testdb_1 and testdb_2. +SHOW DATABASES; + +-- Switch database by catalog.db pattern +USE testcat.testdb_1; + +-- Drop database by catalog.db pattern +DROP DATABASE testcat.testdb_1; + +-- Check databases in catalog 'testcat', +-- The list of databases in the catalog 'testcat' should contain default and testdb_2. +SHOW DATABASES; + +-- DESC DATABASE by catalog.db pattern +DESCRIBE DATABASE testcat.testdb_2; +DESCRIBE DATABASE EXTENDED testcat.testdb_2; + +-- ALTER DATABASE by catalog.db pattern +ALTER DATABASE testcat.testdb_2 SET dbproperties('test'='yesthisis'); +ALTER DATABASE testcat.testdb_2 SET owner user user1; +ALTER DATABASE testcat.testdb_2 SET LOCATION '/tmp/testcat/path/testcat.testdb_2'; +DESCRIBE DATABASE testcat.testdb_2; + +-- SHOW CREATE DATABASE vy catalog.db pattern +SHOW CREATE DATABASE testcat.testdb_2; + +-- DROP CATALOG at the end. Need to drop all non-default databases first. +DROP DATABASE testcat.testdb_2; +DROP CATALOG testcat; + +-- Switch back to the clean default hive catalog at the end. +DROP DATABASE hive.testdb; +SET CATALOG hive; + diff --git a/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out index 7b476d5130a0..d81c7a14a5c6 100644 --- a/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out +++ b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out @@ -26,4 +26,4 @@ POSTHOOK: query: create table t2(id int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:tdb POSTHOOK: Output: tdb@t2 -FAILED: SemanticException Line 2:20 Table or database name may not contain dot(.) character 'tdb.t1' +FAILED: SemanticException Line 2:20 Catalog or table or database name may not contain dot(.) character 'tdb.t1' diff --git a/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out b/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out index ad264fadc52a..8b065d01a006 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out @@ -16,5 +16,5 @@ PREHOOK: query: lock database lockneg4 shared PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg4 PREHOOK: Output: database:lockneg4 -Unable to acquire EXPLICIT, SHARED lock lockneg4 after 1 attempts. +Unable to acquire EXPLICIT, SHARED lock hive@lockneg4 after 1 attempts. FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git a/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out b/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out index 964b77600496..cdb4620f9d6e 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out @@ -16,5 +16,5 @@ PREHOOK: query: show locks database lockneg9 PREHOOK: type: SHOWLOCKS POSTHOOK: query: show locks database lockneg9 POSTHOOK: type: SHOWLOCKS -Unable to acquire IMPLICIT, EXCLUSIVE lock lockneg9 after 1 attempts. +Unable to acquire IMPLICIT, EXCLUSIVE lock hive@lockneg9 after 1 attempts. FAILED: Error in acquiring locks: Locks on the underlying objects cannot be acquired, retry after some time. diff --git a/ql/src/test/results/clientnegative/lockneg_try_lock_cat_db_in_use.q.out b/ql/src/test/results/clientnegative/lockneg_try_lock_cat_db_in_use.q.out new file mode 100644 index 000000000000..97cf1983b97d --- /dev/null +++ b/ql/src/test/results/clientnegative/lockneg_try_lock_cat_db_in_use.q.out @@ -0,0 +1,23 @@ +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:testcat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:testcat +#### A masked pattern was here #### +PREHOOK: query: create database testcat.lockneg9 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:lockneg9 +POSTHOOK: query: create database testcat.lockneg9 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:lockneg9 +PREHOOK: query: lock database testcat.lockneg9 shared +PREHOOK: type: LOCKDATABASE +PREHOOK: Input: database:lockneg9 +PREHOOK: Output: database:lockneg9 +POSTHOOK: query: lock database testcat.lockneg9 shared +POSTHOOK: type: LOCKDATABASE +POSTHOOK: Input: database:lockneg9 +POSTHOOK: Output: database:lockneg9 +Unable to acquire IMPLICIT, EXCLUSIVE lock testcat@lockneg9 after 1 attempts. +FAILED: Error in acquiring locks: Locks on the underlying objects cannot be acquired, retry after some time. diff --git a/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out b/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out index 00d231a48b25..5aa7500aeaf4 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out @@ -48,5 +48,5 @@ PREHOOK: query: lock database lockneg2 exclusive PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg2 PREHOOK: Output: database:lockneg2 -Unable to acquire EXPLICIT, EXCLUSIVE lock lockneg2 after 1 attempts. +Unable to acquire EXPLICIT, EXCLUSIVE lock hive@lockneg2 after 1 attempts. FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git a/ql/src/test/results/clientnegative/table_create_with_dot.q.out b/ql/src/test/results/clientnegative/table_create_with_dot.q.out index 99cdf0cf844a..06c5cc53d7c6 100644 --- a/ql/src/test/results/clientnegative/table_create_with_dot.q.out +++ b/ql/src/test/results/clientnegative/table_create_with_dot.q.out @@ -4,4 +4,4 @@ PREHOOK: Output: database:asd POSTHOOK: query: create database asd POSTHOOK: type: CREATEDATABASE POSTHOOK: Output: database:asd -FAILED: SemanticException Line 2:13 Table or database name may not contain dot(.) character 'asd.tbl' +FAILED: SemanticException Line 2:13 Catalog or table or database name may not contain dot(.) character 'asd.tbl' diff --git a/ql/src/test/results/clientpositive/llap/catalog.q.out b/ql/src/test/results/clientpositive/llap/catalog.q.out index 6f9ef138dcd0..aad833b332b6 100644 --- a/ql/src/test/results/clientpositive/llap/catalog.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog.q.out @@ -116,3 +116,11 @@ POSTHOOK: Input: catalog:test_cat Catalog Name test_cat Comment Hive test catalog #### A masked pattern was here #### +PREHOOK: query: DROP CATALOG test_cat +PREHOOK: type: DROPCATALOG +PREHOOK: Input: catalog:test_cat +PREHOOK: Output: catalog:test_cat +POSTHOOK: query: DROP CATALOG test_cat +POSTHOOK: type: DROPCATALOG +POSTHOOK: Input: catalog:test_cat +POSTHOOK: Output: catalog:test_cat diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out new file mode 100644 index 000000000000..0a445ab50ec9 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -0,0 +1,181 @@ +PREHOOK: query: CREATE DATABASE testdb +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb +POSTHOOK: query: CREATE DATABASE testdb +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +testdb +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:testcat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:testcat +#### A masked pattern was here #### +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +testcat +PREHOOK: query: CREATE DATABASE testcat.testdb_1 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb_1 +POSTHOOK: query: CREATE DATABASE testcat.testdb_1 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb_1 +PREHOOK: query: SET CATALOG testcat +PREHOOK: type: SWITCHCATALOG +PREHOOK: Input: catalog:testcat +POSTHOOK: query: SET CATALOG testcat +POSTHOOK: type: SWITCHCATALOG +POSTHOOK: Input: catalog:testcat +PREHOOK: query: select current_catalog() +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select current_catalog() +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +testcat +PREHOOK: query: USE hive.default +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:default +POSTHOOK: query: USE hive.default +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:default +PREHOOK: query: select current_catalog() +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select current_catalog() +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +hive +PREHOOK: query: SET CATALOG testcat +PREHOOK: type: SWITCHCATALOG +PREHOOK: Input: catalog:testcat +POSTHOOK: query: SET CATALOG testcat +POSTHOOK: type: SWITCHCATALOG +POSTHOOK: Input: catalog:testcat +PREHOOK: query: CREATE DATABASE testdb_2 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb_2 +POSTHOOK: query: CREATE DATABASE testdb_2 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb_2 +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +testdb_1 +testdb_2 +PREHOOK: query: USE testcat.testdb_1 +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:testdb_1 +POSTHOOK: query: USE testcat.testdb_1 +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:testdb_1 +PREHOOK: query: DROP DATABASE testcat.testdb_1 +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:testdb_1 +PREHOOK: Output: database:testdb_1 +POSTHOOK: query: DROP DATABASE testcat.testdb_1 +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:testdb_1 +POSTHOOK: Output: database:testdb_1 +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +testdb_2 +PREHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:testdb_2 +testdb_2 location/in/test hive_test_user USER +PREHOOK: query: DESCRIBE DATABASE EXTENDED testcat.testdb_2 +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: DESCRIBE DATABASE EXTENDED testcat.testdb_2 +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:testdb_2 +testdb_2 location/in/test hive_test_user USER +PREHOOK: query: ALTER DATABASE testcat.testdb_2 SET dbproperties('test'='yesthisis') +PREHOOK: type: ALTERDATABASE +PREHOOK: Output: database:testdb_2 +POSTHOOK: query: ALTER DATABASE testcat.testdb_2 SET dbproperties('test'='yesthisis') +POSTHOOK: type: ALTERDATABASE +POSTHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +PREHOOK: type: ALTERDATABASE_OWNER +PREHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +POSTHOOK: type: ALTERDATABASE_OWNER +POSTHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +PREHOOK: type: ALTERDATABASE_LOCATION +PREHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +POSTHOOK: type: ALTERDATABASE_LOCATION +POSTHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +PREHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:testdb_2 +testdb_2 location/in/test user1 USER +PREHOOK: query: SHOW CREATE DATABASE testcat.testdb_2 +PREHOOK: type: SHOW_CREATEDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: SHOW CREATE DATABASE testcat.testdb_2 +POSTHOOK: type: SHOW_CREATEDATABASE +POSTHOOK: Input: database:testdb_2 +CREATE DATABASE `testdb_2` +LOCATION +#### A masked pattern was here #### +WITH DBPROPERTIES ( + 'test'='yesthisis') +PREHOOK: query: DROP DATABASE testcat.testdb_2 +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:testdb_2 +PREHOOK: Output: database:testdb_2 +POSTHOOK: query: DROP DATABASE testcat.testdb_2 +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:testdb_2 +POSTHOOK: Output: database:testdb_2 +PREHOOK: query: DROP CATALOG testcat +PREHOOK: type: DROPCATALOG +PREHOOK: Input: catalog:testcat +PREHOOK: Output: catalog:testcat +POSTHOOK: query: DROP CATALOG testcat +POSTHOOK: type: DROPCATALOG +POSTHOOK: Input: catalog:testcat +POSTHOOK: Output: catalog:testcat +PREHOOK: query: DROP DATABASE hive.testdb +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:testdb +PREHOOK: Output: database:testdb +POSTHOOK: query: DROP DATABASE hive.testdb +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:testdb +POSTHOOK: Output: database:testdb +PREHOOK: query: SET CATALOG hive +PREHOOK: type: SWITCHCATALOG +PREHOOK: Input: catalog:hive +POSTHOOK: query: SET CATALOG hive +POSTHOOK: type: SWITCHCATALOG +POSTHOOK: Input: catalog:hive diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java index 8a624735a14d..78c20bddace2 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java @@ -513,7 +513,8 @@ public final void dropDatabase(String name, boolean deleteData, boolean ignoreUn @Override public final void alterDatabase(String name, Database db) throws NoSuchObjectException, MetaException, TException { - alterDatabase(getDefaultCatalog(conf), name, db); + String catName = db.getCatalogName() == null ? getDefaultCatalog(conf) : db.getCatalogName(); + alterDatabase(catName, name, db); } @Override