Skip to content

Commit 152e394

Browse files
pan3793yaooqinn
authored andcommitted
[KYUUBI #1592] [TEST][ICEBERG][DELTA] Introduce row level operation test for data lake format
### _Why are the changes needed?_ Introduce row level operation test for data lake format and remove redundant tests ### _How was this patch tested?_ - [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.readthedocs.io/en/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #1592 from pan3793/test. Closes #1592 892feb8 [Cheng Pan] Simplify test 72fd093 [Cheng Pan] Fix import 9d20839 [Cheng Pan] Add row level operation tests for Iceberg and Delta 7730b4d [Cheng Pan] Also test JDBC connection MetaData in Kyuubi server 7a41dfd [Cheng Pan] [TEST] Remove redundant type info test in DeltaMetadataTests Authored-by: Cheng Pan <chengpan@apache.org> Signed-off-by: Kent Yao <yao@apache.org>
1 parent fcc6471 commit 152e394

File tree

6 files changed

+312
-351
lines changed

6 files changed

+312
-351
lines changed

externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkDeltaOperationSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,13 @@
1818
package org.apache.kyuubi.engine.spark.operation
1919

2020
import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
21-
import org.apache.kyuubi.operation.DeltaMetadataTests
21+
import org.apache.kyuubi.operation.{DeltaMetadataTests, RowLevelOperationTests}
2222
import org.apache.kyuubi.tags.DeltaTest
2323

2424
@DeltaTest
25-
class SparkDeltaOperationSuite extends WithSparkSQLEngine with DeltaMetadataTests {
25+
class SparkDeltaOperationSuite extends WithSparkSQLEngine
26+
with DeltaMetadataTests
27+
with RowLevelOperationTests {
2628

2729
override protected def jdbcUrl: String = getJdbcUrl
2830

externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkIcebergOperationSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,13 @@
1818
package org.apache.kyuubi.engine.spark.operation
1919

2020
import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
21-
import org.apache.kyuubi.operation.IcebergMetadataTests
21+
import org.apache.kyuubi.operation.{IcebergMetadataTests, RowLevelOperationTests}
2222
import org.apache.kyuubi.tags.IcebergTest
2323

2424
@IcebergTest
25-
class SparkIcebergOperationSuite extends WithSparkSQLEngine with IcebergMetadataTests {
25+
class SparkIcebergOperationSuite extends WithSparkSQLEngine
26+
with IcebergMetadataTests
27+
with RowLevelOperationTests {
2628

2729
override protected def jdbcUrl: String = getJdbcUrl
2830

externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala

Lines changed: 2 additions & 189 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.kyuubi.engine.spark.operation
1919

20-
import java.sql.{DatabaseMetaData, ResultSet, SQLException, SQLFeatureNotSupportedException}
21-
2220
import scala.collection.JavaConverters._
2321
import scala.util.Random
2422

@@ -32,7 +30,6 @@ import org.apache.spark.kyuubi.SparkContextHelper
3230
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
3331
import org.apache.spark.sql.types._
3432

35-
import org.apache.kyuubi.Utils
3633
import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
3734
import org.apache.kyuubi.engine.spark.shim.SparkCatalogShim
3835
import org.apache.kyuubi.operation.{HiveMetadataTests, SparkQueryTests}
@@ -240,7 +237,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
240237
}
241238
}
242239

243-
test("execute statement - select decimal") {
240+
test("execute statement - select decimal") {
244241
withJdbcStatement() { statement =>
245242
val resultSet = statement.executeQuery("SELECT 1.2BD as col1, 1.23BD AS col2")
246243
assert(resultSet.next())
@@ -256,7 +253,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
256253
}
257254
}
258255

259-
test("execute statement - select column name with dots") {
256+
test("execute statement - select column name with dots") {
260257
withJdbcStatement() { statement =>
261258
val resultSet = statement.executeQuery("select 'tmp.hello'")
262259
assert(resultSet.next())
@@ -305,190 +302,6 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
305302
}
306303
}
307304

308-
test("Hive JDBC Database MetaData API Auditing") {
309-
withJdbcStatement() { statement =>
310-
val metaData = statement.getConnection.getMetaData
311-
Seq(
312-
() => metaData.allProceduresAreCallable(),
313-
() => metaData.getURL,
314-
() => metaData.getUserName,
315-
() => metaData.isReadOnly,
316-
() => metaData.nullsAreSortedHigh,
317-
() => metaData.nullsAreSortedLow,
318-
() => metaData.nullsAreSortedAtStart(),
319-
() => metaData.nullsAreSortedAtEnd(),
320-
() => metaData.usesLocalFiles(),
321-
() => metaData.usesLocalFilePerTable(),
322-
() => metaData.supportsMixedCaseIdentifiers(),
323-
() => metaData.supportsMixedCaseQuotedIdentifiers(),
324-
() => metaData.storesUpperCaseIdentifiers(),
325-
() => metaData.storesUpperCaseQuotedIdentifiers(),
326-
() => metaData.storesLowerCaseIdentifiers(),
327-
() => metaData.storesLowerCaseQuotedIdentifiers(),
328-
() => metaData.storesMixedCaseIdentifiers(),
329-
() => metaData.storesMixedCaseQuotedIdentifiers(),
330-
() => metaData.getSQLKeywords,
331-
() => metaData.nullPlusNonNullIsNull,
332-
() => metaData.supportsConvert,
333-
() => metaData.supportsTableCorrelationNames,
334-
() => metaData.supportsDifferentTableCorrelationNames,
335-
() => metaData.supportsExpressionsInOrderBy(),
336-
() => metaData.supportsOrderByUnrelated,
337-
() => metaData.supportsGroupByUnrelated,
338-
() => metaData.supportsGroupByBeyondSelect,
339-
() => metaData.supportsLikeEscapeClause,
340-
() => metaData.supportsMultipleTransactions,
341-
() => metaData.supportsMinimumSQLGrammar,
342-
() => metaData.supportsCoreSQLGrammar,
343-
() => metaData.supportsExtendedSQLGrammar,
344-
() => metaData.supportsANSI92EntryLevelSQL,
345-
() => metaData.supportsANSI92IntermediateSQL,
346-
() => metaData.supportsANSI92FullSQL,
347-
() => metaData.supportsIntegrityEnhancementFacility,
348-
() => metaData.isCatalogAtStart,
349-
() => metaData.supportsSubqueriesInComparisons,
350-
() => metaData.supportsSubqueriesInExists,
351-
() => metaData.supportsSubqueriesInIns,
352-
() => metaData.supportsSubqueriesInQuantifieds,
353-
// Spark support this, see https://issues.apache.org/jira/browse/SPARK-18455
354-
() => metaData.supportsCorrelatedSubqueries,
355-
() => metaData.supportsOpenCursorsAcrossCommit,
356-
() => metaData.supportsOpenCursorsAcrossRollback,
357-
() => metaData.supportsOpenStatementsAcrossCommit,
358-
() => metaData.supportsOpenStatementsAcrossRollback,
359-
() => metaData.getMaxBinaryLiteralLength,
360-
() => metaData.getMaxCharLiteralLength,
361-
() => metaData.getMaxColumnsInGroupBy,
362-
() => metaData.getMaxColumnsInIndex,
363-
() => metaData.getMaxColumnsInOrderBy,
364-
() => metaData.getMaxColumnsInSelect,
365-
() => metaData.getMaxColumnsInTable,
366-
() => metaData.getMaxConnections,
367-
() => metaData.getMaxCursorNameLength,
368-
() => metaData.getMaxIndexLength,
369-
() => metaData.getMaxSchemaNameLength,
370-
() => metaData.getMaxProcedureNameLength,
371-
() => metaData.getMaxCatalogNameLength,
372-
() => metaData.getMaxRowSize,
373-
() => metaData.doesMaxRowSizeIncludeBlobs,
374-
() => metaData.getMaxStatementLength,
375-
() => metaData.getMaxStatements,
376-
() => metaData.getMaxTableNameLength,
377-
() => metaData.getMaxTablesInSelect,
378-
() => metaData.getMaxUserNameLength,
379-
() => metaData.supportsTransactionIsolationLevel(1),
380-
() => metaData.supportsDataDefinitionAndDataManipulationTransactions,
381-
() => metaData.supportsDataManipulationTransactionsOnly,
382-
() => metaData.dataDefinitionCausesTransactionCommit,
383-
() => metaData.dataDefinitionIgnoredInTransactions,
384-
() => metaData.getColumnPrivileges("", "%", "%", "%"),
385-
() => metaData.getTablePrivileges("", "%", "%"),
386-
() => metaData.getBestRowIdentifier("", "%", "%", 0, true),
387-
() => metaData.getVersionColumns("", "%", "%"),
388-
() => metaData.getExportedKeys("", "default", ""),
389-
() => metaData.supportsResultSetConcurrency(ResultSet.TYPE_FORWARD_ONLY, 2),
390-
() => metaData.ownUpdatesAreVisible(ResultSet.TYPE_FORWARD_ONLY),
391-
() => metaData.ownDeletesAreVisible(ResultSet.TYPE_FORWARD_ONLY),
392-
() => metaData.ownInsertsAreVisible(ResultSet.TYPE_FORWARD_ONLY),
393-
() => metaData.othersUpdatesAreVisible(ResultSet.TYPE_FORWARD_ONLY),
394-
() => metaData.othersDeletesAreVisible(ResultSet.TYPE_FORWARD_ONLY),
395-
() => metaData.othersInsertsAreVisible(ResultSet.TYPE_FORWARD_ONLY),
396-
() => metaData.updatesAreDetected(ResultSet.TYPE_FORWARD_ONLY),
397-
() => metaData.deletesAreDetected(ResultSet.TYPE_FORWARD_ONLY),
398-
() => metaData.insertsAreDetected(ResultSet.TYPE_FORWARD_ONLY),
399-
() => metaData.supportsNamedParameters(),
400-
() => metaData.supportsMultipleOpenResults,
401-
() => metaData.supportsGetGeneratedKeys,
402-
() => metaData.getSuperTypes("", "%", "%"),
403-
() => metaData.getSuperTables("", "%", "%"),
404-
() => metaData.getAttributes("", "%", "%", "%"),
405-
() => metaData.getResultSetHoldability,
406-
() => metaData.locatorsUpdateCopy,
407-
() => metaData.supportsStatementPooling,
408-
() => metaData.getRowIdLifetime,
409-
() => metaData.supportsStoredFunctionsUsingCallSyntax,
410-
() => metaData.autoCommitFailureClosesAllResultSets,
411-
() => metaData.getClientInfoProperties,
412-
() => metaData.getFunctionColumns("", "%", "%", "%"),
413-
() => metaData.getPseudoColumns("", "%", "%", "%"),
414-
() => metaData.generatedKeyAlwaysReturned).foreach { func =>
415-
val e = intercept[SQLFeatureNotSupportedException](func())
416-
assert(e.getMessage === "Method not supported")
417-
}
418-
419-
import org.apache.kyuubi.KYUUBI_VERSION
420-
assert(metaData.allTablesAreSelectable)
421-
assert(metaData.getDatabaseProductName === "Apache Kyuubi (Incubating)")
422-
assert(metaData.getDatabaseProductVersion === KYUUBI_VERSION)
423-
assert(metaData.getDriverName === "Kyuubi Project Hive JDBC Shaded Client")
424-
assert(metaData.getDriverVersion === KYUUBI_VERSION)
425-
assert(metaData.getDatabaseMajorVersion === Utils.majorVersion(KYUUBI_VERSION))
426-
assert(metaData.getDatabaseMinorVersion === Utils.minorVersion(KYUUBI_VERSION))
427-
assert(
428-
metaData.getIdentifierQuoteString === " ",
429-
"This method returns a space \" \" if identifier quoting is not supported")
430-
assert(metaData.getNumericFunctions === "")
431-
assert(metaData.getStringFunctions === "")
432-
assert(metaData.getSystemFunctions === "")
433-
assert(metaData.getTimeDateFunctions === "")
434-
assert(metaData.getSearchStringEscape === "\\")
435-
assert(metaData.getExtraNameCharacters === "")
436-
assert(metaData.supportsAlterTableWithAddColumn())
437-
assert(!metaData.supportsAlterTableWithDropColumn())
438-
assert(metaData.supportsColumnAliasing())
439-
assert(metaData.supportsGroupBy)
440-
assert(!metaData.supportsMultipleResultSets)
441-
assert(!metaData.supportsNonNullableColumns)
442-
assert(metaData.supportsOuterJoins)
443-
assert(metaData.supportsFullOuterJoins)
444-
assert(metaData.supportsLimitedOuterJoins)
445-
assert(metaData.getSchemaTerm === "database")
446-
assert(metaData.getProcedureTerm === "UDF")
447-
assert(metaData.getCatalogTerm === "instance")
448-
assert(metaData.getCatalogSeparator === ".")
449-
assert(metaData.supportsSchemasInDataManipulation)
450-
assert(!metaData.supportsSchemasInProcedureCalls)
451-
assert(metaData.supportsSchemasInTableDefinitions)
452-
assert(!metaData.supportsSchemasInIndexDefinitions)
453-
assert(!metaData.supportsSchemasInPrivilegeDefinitions)
454-
// This is actually supported, but hive jdbc package return false
455-
assert(!metaData.supportsCatalogsInDataManipulation)
456-
assert(!metaData.supportsCatalogsInProcedureCalls)
457-
// This is actually supported, but hive jdbc package return false
458-
assert(!metaData.supportsCatalogsInTableDefinitions)
459-
assert(!metaData.supportsCatalogsInIndexDefinitions)
460-
assert(!metaData.supportsCatalogsInPrivilegeDefinitions)
461-
assert(!metaData.supportsPositionedDelete)
462-
assert(!metaData.supportsPositionedUpdate)
463-
assert(!metaData.supportsSelectForUpdate)
464-
assert(!metaData.supportsStoredProcedures)
465-
// This is actually supported, but hive jdbc package return false
466-
assert(!metaData.supportsUnion)
467-
assert(metaData.supportsUnionAll)
468-
assert(metaData.getMaxColumnNameLength === 128)
469-
assert(metaData.getDefaultTransactionIsolation === java.sql.Connection.TRANSACTION_NONE)
470-
assert(!metaData.supportsTransactions)
471-
assert(!metaData.getProcedureColumns("", "%", "%", "%").next())
472-
intercept[SQLException](metaData.getPrimaryKeys("", "default", ""))
473-
assert(!metaData.getImportedKeys("", "default", "").next())
474-
intercept[SQLException] {
475-
metaData.getCrossReference("", "default", "src", "", "default", "src2")
476-
}
477-
assert(!metaData.getIndexInfo("", "default", "src", true, true).next())
478-
479-
assert(metaData.supportsResultSetType(new Random().nextInt()))
480-
assert(!metaData.supportsBatchUpdates)
481-
assert(!metaData.getUDTs(",", "%", "%", null).next())
482-
assert(!metaData.supportsSavepoints)
483-
assert(!metaData.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT))
484-
assert(metaData.getJDBCMajorVersion === 3)
485-
assert(metaData.getJDBCMinorVersion === 0)
486-
assert(metaData.getSQLStateType === DatabaseMetaData.sqlStateSQL)
487-
assert(metaData.getMaxLogicalLobSize === 0)
488-
assert(!metaData.supportsRefCursors)
489-
}
490-
}
491-
492305
test("get operation status") {
493306
val sql = "select date_sub(date'2011-11-11', '1')"
494307

0 commit comments

Comments
 (0)