Skip to content

Commit

Permalink
[KYUUBI #2016] Hive Backend Engine - GetTables Operation
Browse files Browse the repository at this point in the history
### _Why are the changes needed?_

Hive Backend Engine - GetTables Operation.

### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request

Closes #2155 from SteNicholas/KYUUBI-2016.

Closes #2016

7363719 [SteNicholas] [KYUUBI #2016] Hive Backend Engine - GetTables Operation
1f3d691 [SteNicholas] [KYUUBI #2016] Hive Backend Engine - GetTables Operation

Authored-by: SteNicholas <programgeek@163.com>
Signed-off-by: Cheng Pan <chengpan@apache.org>
  • Loading branch information
SteNicholas authored and pan3793 committed Mar 17, 2022
1 parent 31be7a3 commit 449c426
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 2 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.kyuubi.engine.hive.operation

import java.util.List

import org.apache.hive.service.cli.operation.Operation

import org.apache.kyuubi.operation.OperationType
import org.apache.kyuubi.session.Session

class GetTables(
session: Session,
catalogName: String,
schemaName: String,
tableName: String,
tableTypes: List[String])
extends HiveOperation(OperationType.GET_TABLES, session) {

override val internalHiveOperation: Operation =
delegatedOperationManager.newGetTablesOperation(
hive,
catalogName,
schemaName,
tableName,
tableTypes)
}
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@ class HiveOperationManager() extends OperationManager("HiveOperationManager") {
schemaName: String,
tableName: String,
tableTypes: List[String]): Operation = {
throw KyuubiSQLException.featureNotSupported()
val operation = new GetTables(session, catalogName, schemaName, tableName, tableTypes)
addOperation(operation)
}

override def newGetTableTypesOperation(session: Session): Operation = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import scala.collection.mutable.ArrayBuffer

import org.apache.kyuubi.engine.hive.HiveSQLEngine
import org.apache.kyuubi.operation.HiveJDBCTestHelper
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant.{TABLE_CATALOG, TABLE_SCHEM}
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant.{TABLE_CAT, TABLE_CATALOG, TABLE_NAME, TABLE_SCHEM, TABLE_TYPE}

class HiveOperationSuite extends HiveJDBCTestHelper {

Expand Down Expand Up @@ -63,6 +63,73 @@ class HiveOperationSuite extends HiveJDBCTestHelper {
}
}

test("get tables") {
withDatabases("test_schema") { statement =>
statement.execute("CREATE SCHEMA IF NOT EXISTS test_schema")
statement.execute("CREATE TABLE IF NOT EXISTS test_schema.test_table(a string)")
statement.execute(
"CREATE OR REPLACE VIEW test_schema.test_view AS SELECT * FROM test_schema.test_table")

try {
val meta = statement.getConnection.getMetaData
var resultSet = meta.getTables(null, null, null, null)
val resultSetBuffer = ArrayBuffer[(String, String, String, String)]()
while (resultSet.next()) {
resultSetBuffer += Tuple4(
resultSet.getString(TABLE_CAT),
resultSet.getString(TABLE_SCHEM),
resultSet.getString(TABLE_NAME),
resultSet.getString(TABLE_TYPE))
}
assert(resultSetBuffer.contains(("", "test_schema", "test_table", "TABLE")))
assert(resultSetBuffer.contains(("", "test_schema", "test_view", "VIEW")))

resultSet = meta.getTables("", null, null, null)
resultSetBuffer.clear()
while (resultSet.next()) {
resultSetBuffer += Tuple4(
resultSet.getString(TABLE_CAT),
resultSet.getString(TABLE_SCHEM),
resultSet.getString(TABLE_NAME),
resultSet.getString(TABLE_TYPE))
}
assert(resultSetBuffer.contains(("", "test_schema", "test_table", "TABLE")))
assert(resultSetBuffer.contains(("", "test_schema", "test_view", "VIEW")))

resultSet = meta.getTables(null, "test_schema", null, null)
resultSetBuffer.clear()
while (resultSet.next()) {
resultSetBuffer += Tuple4(
resultSet.getString(TABLE_CAT),
resultSet.getString(TABLE_SCHEM),
resultSet.getString(TABLE_NAME),
resultSet.getString(TABLE_TYPE))
}
assert(resultSetBuffer.contains(("", "test_schema", "test_table", "TABLE")))
assert(resultSetBuffer.contains(("", "test_schema", "test_view", "VIEW")))

resultSet = meta.getTables(null, null, "test_table", null)
while (resultSet.next()) {
assert(resultSet.getString(TABLE_CAT) == "")
assert(resultSet.getString(TABLE_SCHEM) == "test_schema")
assert(resultSet.getString(TABLE_NAME) == "test_table")
assert(resultSet.getString(TABLE_TYPE) == "TABLE")
}

resultSet = meta.getTables(null, null, null, Array("VIEW"))
while (resultSet.next()) {
assert(resultSet.getString(TABLE_CAT) == "")
assert(resultSet.getString(TABLE_SCHEM) == "test_schema")
assert(resultSet.getString(TABLE_NAME) == "test_view")
assert(resultSet.getString(TABLE_TYPE) == "VIEW")
}
} finally {
statement.execute("DROP VIEW test_schema.test_view")
statement.execute("DROP TABLE test_schema.test_table")
}
}
}

test("basic execute statements, create, insert query") {
withJdbcStatement("hive_engine_test") { statement =>
statement.execute("CREATE TABLE hive_engine_test(id int, value string) stored as orc")
Expand Down

0 comments on commit 449c426

Please sign in to comment.