From b87d206ffb5da16b7d7b7d401be053b0e78c7a99 Mon Sep 17 00:00:00 2001 From: Muru Muthusamy Date: Thu, 2 Dec 2021 09:49:06 +0800 Subject: [PATCH] [KYUUBI #1458] Delta lake table columns won't show up in DBeaver. ### _Why are the changes needed?_ ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.readthedocs.io/en/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #1476 from mmuru/delta-table-cols-dbeaver-fix. Closes #1458 7801c701 [Muru Muthusamy] Reformatted to fix style issue. 4126da47 [Muru Muthusamy] Updated based on review feedback and added ignore case sensitive check. 28a4a12d [Muru Muthusamy] [KYUUBI#1458] Delta lake table columns won't show up in DBeaver. Authored-by: Muru Muthusamy Signed-off-by: Cheng Pan --- .../kyuubi/engine/spark/shim/CatalogShim_v2_4.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala index 4f703394c75..cc5a25b0c29 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala @@ -125,7 +125,13 @@ class CatalogShim_v2_4 extends SparkCatalogShim { databases.flatMap { db => val identifiers = catalog.listTables(db, tablePattern, includeLocalTempViews = true) catalog.getTablesByName(identifiers).flatMap { t => - t.schema.zipWithIndex.filter(f => columnPattern.matcher(f._1.name).matches()) + val tableSchema = + if (t.provider.getOrElse("").equalsIgnoreCase("delta")) { + spark.table(t.identifier.table).schema + } else { + t.schema + } + tableSchema.zipWithIndex.filter(f => columnPattern.matcher(f._1.name).matches()) .map { case (f, i) => toColumnResult(catalogName, t.database, t.identifier.table, f, i) } } }