Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -591,6 +591,46 @@ public void testReplDump() throws Exception {
assertEquals("table name", inDbTableName.toLowerCase(), dbObj.getObjectName());
}

@Test
public void showTablesInDB() throws Exception{
final String tableName1 = "table1";
driver.run("create table " + dbName+"."+tableName1 + "(eid int, yoj int)");
final String tableName2 = "table2";
driver.run("create table " + dbName+"."+tableName2 + "(eid int, ecode int)");
reset(mockedAuthorizer);

int status = driver.compile("show tables in "+dbName, true);
assertEquals(0, status);
Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
List<HivePrivilegeObject> inputs = io.getLeft();
HivePrivilegeObject dbObj = inputs.get(0);
assertEquals("input type", HivePrivilegeObjectType.DATABASE, dbObj.getType());
assertTrue(dbObj.getOwnerName() != null);
}

@Test
public void AlterViewAsStmnt() throws Exception{
reset(mockedAuthorizer);
final String tableName1 = "foo_tbl";
driver.run("create table " + dbName+"."+tableName1 + "(eid int, yoj int)");
final String tableName2 = "foo_bar";
driver.run("create table " + dbName+"."+tableName2 + "(eid int, name string)");
final String viewName = "foo_view";
driver.run("create view " + dbName+"."+viewName + " as select * from "+ dbName+"."+tableName1);
reset(mockedAuthorizer);
int status = driver.compile("Alter view "+dbName+"."+viewName+" as select * from "+ dbName+"."+tableName2, true);
assertEquals(0, status);
Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
List<HivePrivilegeObject> inputs = io.getLeft();
assertEquals(1, inputs.size()); // foo_bar table object
List<HivePrivilegeObject> outputs = io.getRight();
assertEquals(2, outputs.size()); // Database and view objects
HivePrivilegeObject dbObj = outputs.get(0);
assertEquals("input type", HivePrivilegeObjectType.DATABASE, dbObj.getType());
HivePrivilegeObject viewObj = outputs.get(1);
assertEquals("input type", HivePrivilegeObjectType.TABLE_OR_VIEW, viewObj.getType());
}

private void checkSingleTableInput(List<HivePrivilegeObject> inputs) {
assertEquals("number of inputs", 1, inputs.size());

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.view.create;

import java.util.ArrayList;

import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
import org.apache.hadoop.hive.ql.ddl.DDLUtils;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.HiveOperation;

/**
* Analyzer for alter view ... as commands.
*/
@DDLType(types = HiveParser.TOK_ALTERVIEW_AS)
public class AlterViewAsAnalyzer extends AbstractCreateViewAnalyzer {
public AlterViewAsAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
queryState.setCommandType(HiveOperation.ALTERVIEW_AS);
}

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
TableName viewName = getQualifiedTableName((ASTNode) root.getChild(0));
String fqViewName = viewName.getNotEmptyDbTable();
LOG.info("Altering the query of view " + fqViewName + " position=" + root.getCharPositionInLine());

ASTNode select = (ASTNode) root.getChild(1).getChild(0);

String originalText = ctx.getTokenRewriteStream().toString(select.getTokenStartIndex(), select.getTokenStopIndex());

SemanticAnalyzer analyzer = analyzeQuery(select, fqViewName);

schema = new ArrayList<FieldSchema>(analyzer.getResultSchema());
ParseUtils.validateColumnNameUniqueness(
analyzer.getOriginalResultSchema() == null ? schema : analyzer.getOriginalResultSchema());

String expandedText = ctx.getTokenRewriteStream().toString(select.getTokenStartIndex(), select.getTokenStopIndex());

AlterViewAsDesc desc = new AlterViewAsDesc(fqViewName, schema, originalText, expandedText);
validateCreateView(desc, analyzer);

rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
DDLUtils.addDbAndTableToOutputs(getDatabase(viewName.getDb()), viewName, TableType.VIRTUAL_VIEW, false,
null, outputs);
}

private void validateCreateView(AlterViewAsDesc desc, SemanticAnalyzer analyzer) throws SemanticException {
validateTablesUsed(analyzer);

Table oldView = null;
try {
oldView = getTable(desc.getViewName(), false);
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}

if (oldView == null) {
String viewNotExistErrorMsg = "The following view does not exist: " + desc.getViewName();
throw new SemanticException(ErrorMsg.ALTER_VIEW_AS_SELECT_NOT_EXIST.getMsg(viewNotExistErrorMsg));
}

validateReplaceWithPartitions(desc.getViewName(), oldView, null);
}
}
8 changes: 4 additions & 4 deletions ql/src/test/results/clientpositive/alter_view_as_select.q.out
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,12 @@ Sort Columns: []
Original Query: SELECT * FROM srcpart
Expanded Query: SELECT `srcpart`.`key`, `srcpart`.`value`, `srcpart`.`ds`, `srcpart`.`hr` FROM `default`.`srcpart`
PREHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86
PREHOOK: type: CREATEVIEW
PREHOOK: type: ALTERVIEW_AS
PREHOOK: Input: default@src
PREHOOK: Output: database:tv
PREHOOK: Output: tv@testView
POSTHOOK: query: ALTER VIEW tv.testView AS SELECT value FROM src WHERE key=86
POSTHOOK: type: CREATEVIEW
POSTHOOK: type: ALTERVIEW_AS
POSTHOOK: Input: default@src
POSTHOOK: Output: database:tv
POSTHOOK: Output: tv@testView
Expand Down Expand Up @@ -96,7 +96,7 @@ SELECT * FROM src
WHERE key > 80 AND key < 100
ORDER BY key, value
LIMIT 10
PREHOOK: type: CREATEVIEW
PREHOOK: type: ALTERVIEW_AS
PREHOOK: Input: default@src
PREHOOK: Output: database:tv
PREHOOK: Output: tv@testView
Expand All @@ -105,7 +105,7 @@ SELECT * FROM src
WHERE key > 80 AND key < 100
ORDER BY key, value
LIMIT 10
POSTHOOK: type: CREATEVIEW
POSTHOOK: type: ALTERVIEW_AS
POSTHOOK: Input: default@src
POSTHOOK: Output: database:tv
POSTHOOK: Output: tv@testView
Expand Down
4 changes: 2 additions & 2 deletions ql/src/test/results/clientpositive/alter_view_col_type.q.out
Original file line number Diff line number Diff line change
Expand Up @@ -120,12 +120,12 @@ POSTHOOK: Input: default@av1
4 d
5 e
PREHOOK: query: alter view av1 as select c2, c3 from at1
PREHOOK: type: CREATEVIEW
PREHOOK: type: ALTERVIEW_AS
PREHOOK: Input: default@at1
PREHOOK: Output: database:default
PREHOOK: Output: default@av1
POSTHOOK: query: alter view av1 as select c2, c3 from at1
POSTHOOK: type: CREATEVIEW
POSTHOOK: type: ALTERVIEW_AS
POSTHOOK: Input: default@at1
POSTHOOK: Output: database:default
POSTHOOK: Output: default@av1
Expand Down
4 changes: 2 additions & 2 deletions ql/src/test/results/clientpositive/llap/lineage3.q.out
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ PREHOOK: Input: default@dest_v1
-64 253665376
-64 253665376
PREHOOK: query: alter view dest_v1 as select ctinyint from alltypesorc
PREHOOK: type: CREATEVIEW
PREHOOK: type: ALTERVIEW_AS
PREHOOK: Input: default@alltypesorc
PREHOOK: Output: database:default
PREHOOK: Output: default@dest_v1
Expand Down Expand Up @@ -307,7 +307,7 @@ PREHOOK: query: alter view dest_v3 as
group by a.ctinyint, a.csmallint, b.cboolean1
having count(a.cint) > 10
order by a, x, b.cboolean1 limit 10) t_n20
PREHOOK: type: CREATEVIEW
PREHOOK: type: ALTERVIEW_AS
PREHOOK: Input: default@alltypesorc
PREHOOK: Output: database:default
PREHOOK: Output: default@dest_v3
Expand Down