Skip to content

Commit 9bcd08f

Browse files
SteNicholasyaooqinn
authored andcommitted
[KYUUBI #1906] Add GetColumns for trino engine
<!-- Thanks for sending a pull request! Here are some tips for you: 1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html 2. If the PR is related to an issue in https://github.com/apache/incubator-kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'. 3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'. --> ### _Why are the changes needed?_ <!-- Please clarify why the changes are needed. For instance, 1. If you add a feature, you can talk about the use case of it. 2. If you fix a bug, you can clarify why it is a bug. --> Add GetColumns for trino engine. ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #1922 from SteNicholas/KYUUBI-1906. Closes #1906 e7ff09b [SteNicholas] [KYUUBI #1906] Add GetColumns for trino engine Authored-by: SteNicholas <programgeek@163.com> Signed-off-by: Kent Yao <yao@apache.org>
1 parent 2501f9a commit 9bcd08f

File tree

3 files changed

+230
-34
lines changed

3 files changed

+230
-34
lines changed
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.kyuubi.engine.trino.operation
19+
20+
import scala.collection.mutable.ArrayBuffer
21+
22+
import org.apache.commons.lang3.StringUtils
23+
24+
import org.apache.kyuubi.engine.trino.TrinoStatement
25+
import org.apache.kyuubi.operation.{IterableFetchIterator, OperationType}
26+
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant.{COLUMN_NAME, TABLE_CAT, TABLE_NAME, TABLE_SCHEM}
27+
import org.apache.kyuubi.session.Session
28+
29+
class GetColumns(
30+
session: Session,
31+
catalogName: String,
32+
schemaName: String,
33+
tableName: String,
34+
columnName: String)
35+
extends TrinoOperation(OperationType.GET_COLUMNS, session) {
36+
37+
private val SEARCH_STRING_ESCAPE: String = "\\"
38+
39+
override protected def runInternal(): Unit = {
40+
val query = new StringBuilder(
41+
"""
42+
|SELECT TABLE_CAT, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, DATA_TYPE,
43+
|TYPE_NAME, COLUMN_SIZE, BUFFER_LENGTH, DECIMAL_DIGITS, NUM_PREC_RADIX,
44+
|NULLABLE, REMARKS, COLUMN_DEF, SQL_DATA_TYPE, SQL_DATETIME_SUB,
45+
|CHAR_OCTET_LENGTH, ORDINAL_POSITION, IS_NULLABLE, SCOPE_CATALOG,
46+
|SCOPE_SCHEMA, SCOPE_TABLE, SOURCE_DATA_TYPE
47+
|FROM system.jdbc.columns
48+
|""".stripMargin)
49+
50+
val filters = ArrayBuffer[String]()
51+
if (StringUtils.isNotEmpty(catalogName)) {
52+
filters += s"$TABLE_CAT = '$catalogName'"
53+
}
54+
if (StringUtils.isNotEmpty(schemaName)) {
55+
filters += s"$TABLE_SCHEM LIKE '$schemaName' ESCAPE '$SEARCH_STRING_ESCAPE'"
56+
}
57+
if (StringUtils.isNotEmpty(tableName)) {
58+
filters += s"$TABLE_NAME LIKE '$tableName' ESCAPE '$SEARCH_STRING_ESCAPE'"
59+
}
60+
if (StringUtils.isNotEmpty(columnName)) {
61+
filters += s"$COLUMN_NAME LIKE '$columnName' ESCAPE '$SEARCH_STRING_ESCAPE'"
62+
}
63+
64+
if (filters.nonEmpty) {
65+
query.append(" WHERE ")
66+
query.append(filters.mkString(" AND "))
67+
}
68+
69+
try {
70+
val trinoStatement =
71+
TrinoStatement(trinoContext, session.sessionManager.getConf, query.toString)
72+
schema = trinoStatement.getColumns
73+
val resultSet = trinoStatement.execute()
74+
iter = new IterableFetchIterator(resultSet)
75+
} catch onError()
76+
}
77+
}

externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationManager.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,10 @@ class TrinoOperationManager extends OperationManager("TrinoOperationManager") {
8282
catalogName: String,
8383
schemaName: String,
8484
tableName: String,
85-
columnName: String): Operation = null
85+
columnName: String): Operation = {
86+
val op = new GetColumns(session, catalogName, schemaName, tableName, columnName)
87+
addOperation(op)
88+
}
8689

8790
override def newGetFunctionsOperation(
8891
session: Session,

externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala

Lines changed: 149 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -47,40 +47,42 @@ class TrinoOperationSuite extends WithTrinoEngine with HiveJDBCTestHelper {
4747

4848
override protected def jdbcUrl: String = getJdbcUrl
4949

50+
private val standardTypes: Set[String] = Set(
51+
BIGINT,
52+
INTEGER,
53+
SMALLINT,
54+
TINYINT,
55+
BOOLEAN,
56+
DATE,
57+
DECIMAL,
58+
REAL,
59+
DOUBLE,
60+
HYPER_LOG_LOG,
61+
QDIGEST,
62+
P4_HYPER_LOG_LOG,
63+
INTERVAL_DAY_TO_SECOND,
64+
INTERVAL_YEAR_TO_MONTH,
65+
TIMESTAMP,
66+
TIMESTAMP_WITH_TIME_ZONE,
67+
TIME,
68+
TIME_WITH_TIME_ZONE,
69+
VARBINARY,
70+
VARCHAR,
71+
CHAR,
72+
ROW,
73+
ARRAY,
74+
MAP,
75+
JSON,
76+
IPADDRESS,
77+
UUID,
78+
GEOMETRY,
79+
SPHERICAL_GEOGRAPHY,
80+
BING_TILE)
81+
5082
test("trino - get type info") {
5183
withJdbcStatement() { statement =>
5284
val typeInfo = statement.getConnection.getMetaData.getTypeInfo
53-
val types: Set[String] = Set(
54-
BIGINT,
55-
INTEGER,
56-
SMALLINT,
57-
TINYINT,
58-
BOOLEAN,
59-
DATE,
60-
DECIMAL,
61-
REAL,
62-
DOUBLE,
63-
HYPER_LOG_LOG,
64-
QDIGEST,
65-
P4_HYPER_LOG_LOG,
66-
INTERVAL_DAY_TO_SECOND,
67-
INTERVAL_YEAR_TO_MONTH,
68-
TIMESTAMP,
69-
TIMESTAMP_WITH_TIME_ZONE,
70-
TIME,
71-
TIME_WITH_TIME_ZONE,
72-
VARBINARY,
73-
VARCHAR,
74-
CHAR,
75-
ROW,
76-
ARRAY,
77-
MAP,
78-
JSON,
79-
IPADDRESS,
80-
UUID,
81-
GEOMETRY,
82-
SPHERICAL_GEOGRAPHY,
83-
BING_TILE,
85+
val expectedTypes = standardTypes ++ Set(
8486
"color",
8587
"KdbTree",
8688
"CodePoints",
@@ -98,10 +100,10 @@ class TrinoOperationSuite extends WithTrinoEngine with HiveJDBCTestHelper {
98100
"Classifier")
99101
val typeInfos: Set[String] = Set()
100102
while (typeInfo.next()) {
101-
assert(types.contains(typeInfo.getString(TYPE_NAME)))
103+
assert(expectedTypes.contains(typeInfo.getString(TYPE_NAME)))
102104
typeInfos += typeInfo.getString(TYPE_NAME)
103105
}
104-
assert(types.size === typeInfos.size)
106+
assert(expectedTypes.size === typeInfos.size)
105107
}
106108
}
107109

@@ -441,6 +443,120 @@ class TrinoOperationSuite extends WithTrinoEngine with HiveJDBCTestHelper {
441443
}
442444
}
443445

446+
test("trino - get columns") {
447+
case class ColumnWithTableAndCatalogAndSchema(
448+
catalog: String,
449+
schema: String,
450+
tableName: String,
451+
columnName: String,
452+
typeName: String)
453+
454+
withJdbcStatement() { statement =>
455+
val meta = statement.getConnection.getMetaData
456+
val resultSetBuffer = ArrayBuffer[ColumnWithTableAndCatalogAndSchema]()
457+
458+
var columns = meta.getColumns(null, null, null, null)
459+
while (columns.next()) {
460+
resultSetBuffer +=
461+
ColumnWithTableAndCatalogAndSchema(
462+
columns.getString(TABLE_CAT),
463+
columns.getString(TABLE_SCHEM),
464+
columns.getString(TABLE_NAME),
465+
columns.getString(COLUMN_NAME),
466+
columns.getString(TYPE_NAME))
467+
}
468+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
469+
"memory",
470+
"information_schema",
471+
"columns",
472+
"table_catalog",
473+
VARCHAR)))
474+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
475+
"memory",
476+
"information_schema",
477+
"columns",
478+
"table_schema",
479+
VARCHAR)))
480+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
481+
"memory",
482+
"information_schema",
483+
"columns",
484+
"table_name",
485+
VARCHAR)))
486+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
487+
"memory",
488+
"information_schema",
489+
"columns",
490+
"column_name",
491+
VARCHAR)))
492+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
493+
"memory",
494+
"information_schema",
495+
"columns",
496+
"ordinal_position",
497+
BIGINT)))
498+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
499+
"memory",
500+
"information_schema",
501+
"columns",
502+
"column_default",
503+
VARCHAR)))
504+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
505+
"memory",
506+
"information_schema",
507+
"columns",
508+
"is_nullable",
509+
VARCHAR)))
510+
assert(resultSetBuffer.contains(ColumnWithTableAndCatalogAndSchema(
511+
"memory",
512+
"information_schema",
513+
"columns",
514+
"data_type",
515+
VARCHAR)))
516+
517+
val columnTypes = standardTypes.map {
518+
case ARRAY => s"$ARRAY($VARCHAR)"
519+
case MAP => s"$MAP($VARCHAR, $VARCHAR)"
520+
case ROW => s"$ROW(c $VARCHAR)"
521+
case QDIGEST => s"$QDIGEST($VARCHAR)"
522+
case columnType => columnType
523+
}
524+
var schema: Seq[String] = Seq()
525+
for (position <- 0 until columnTypes.size) {
526+
schema = schema :+ s"c$position ${columnTypes.toSeq(position)}"
527+
}
528+
statement.execute(s"CREATE SCHEMA IF NOT EXISTS memory.test_schema")
529+
statement.execute(
530+
s"CREATE TABLE IF NOT EXISTS memory.test_schema.test_column(${schema.mkString(",")})")
531+
532+
columns = meta.getColumns("memory", "test_schema", "test_column", null)
533+
534+
var position = 0
535+
while (columns.next()) {
536+
assert(columns.getString(TABLE_CAT) === "memory")
537+
assert(columns.getString(TABLE_SCHEM) === "test_schema")
538+
assert(columns.getString(TABLE_NAME) === "test_column")
539+
assert(columns.getString(COLUMN_NAME) === s"c$position")
540+
541+
val expectType = columnTypes.toSeq(position) match {
542+
case CHAR => s"$CHAR(1)"
543+
case DECIMAL => s"$DECIMAL(38,0)"
544+
case TIME => s"$TIME(3)"
545+
case TIME_WITH_TIME_ZONE => s"$TIME(3) with time zone"
546+
case TIMESTAMP => s"$TIMESTAMP(3)"
547+
case TIMESTAMP_WITH_TIME_ZONE => s"$TIMESTAMP(3) with time zone"
548+
case columnType => columnType
549+
}
550+
assert(columns.getString(TYPE_NAME) === expectType)
551+
position += 1
552+
}
553+
assert(position === columnTypes.size, "all columns should have been verified")
554+
555+
statement.execute("DROP TABLE memory.test_schema.test_column")
556+
statement.execute("DROP SCHEMA memory.test_schema")
557+
}
558+
}
559+
444560
test("execute statement - select decimal") {
445561
withJdbcStatement() { statement =>
446562
val resultSet = statement.executeQuery("SELECT DECIMAL '1.2' as col1, DECIMAL '1.23' AS col2")

0 commit comments

Comments
 (0)