Skip to content

Commit d77882a

Browse files
Nick-0723yaooqinn
authored andcommitted
[KYUUBI #2207] Fix DayTimeIntervalType/YearMonthIntervalType Column Size
### _Why are the changes needed?_ Data type DayTimeIntervalType and YearMonthIntervalType have default column size ,but now is null. ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #2251 from Nick-0723/fix_interval_columnSize. Closes #2207 1903da2 [Nick Song] fix spark3.3.0 interval type column size Authored-by: Nick Song <chun2184@163.com> Signed-off-by: Kent Yao <yao@apache.org>
1 parent 513ea83 commit d77882a

File tree

2 files changed

+22
-6
lines changed

2 files changed

+22
-6
lines changed

externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,16 @@ object SchemaHelper {
3131
*/
3232
final val TIMESTAMP_NTZ = "TimestampNTZType$"
3333

34+
/**
35+
* Spark 3.2.0 DataType DayTimeIntervalType's class name.
36+
*/
37+
final val DAY_TIME_INTERVAL = "DayTimeIntervalType"
38+
39+
/**
40+
* Spark 3.2.0 DataType YearMonthIntervalType's class name.
41+
*/
42+
final val YEAR_MONTH_INTERVAL = "YearMonthIntervalType"
43+
3444
def toTTypeId(typ: DataType): TTypeId = typ match {
3545
case NullType => TTypeId.NULL_TYPE
3646
case BooleanType => TTypeId.BOOLEAN_TYPE
@@ -47,9 +57,9 @@ object SchemaHelper {
4757
case ntz if ntz.getClass.getSimpleName.equals(TIMESTAMP_NTZ) => TTypeId.TIMESTAMP_TYPE
4858
case BinaryType => TTypeId.BINARY_TYPE
4959
case CalendarIntervalType => TTypeId.STRING_TYPE
50-
case dt if dt.getClass.getSimpleName.equals("DayTimeIntervalType") =>
60+
case dt if dt.getClass.getSimpleName.equals(DAY_TIME_INTERVAL) =>
5161
TTypeId.INTERVAL_DAY_TIME_TYPE
52-
case ym if ym.getClass.getSimpleName.equals("YearMonthIntervalType") =>
62+
case ym if ym.getClass.getSimpleName.equals(YEAR_MONTH_INTERVAL) =>
5363
TTypeId.INTERVAL_YEAR_MONTH_TYPE
5464
case _: ArrayType => TTypeId.ARRAY_TYPE
5565
case _: MapType => TTypeId.MAP_TYPE
@@ -125,7 +135,9 @@ object SchemaHelper {
125135
* For array, map, string, and binaries, the column size is variable, return null as unknown.
126136
*/
127137
def getColumnSize(sparkType: DataType): Option[Int] = sparkType match {
128-
case ntz if ntz.getClass.getSimpleName.equals(TIMESTAMP_NTZ) => Some(ntz.defaultSize)
138+
case dt
139+
if Array(TIMESTAMP_NTZ, DAY_TIME_INTERVAL, YEAR_MONTH_INTERVAL)
140+
.contains(dt.getClass.getSimpleName) => Some(dt.defaultSize)
129141
case dt @ (BooleanType | _: NumericType | DateType | TimestampType |
130142
CalendarIntervalType | NullType) =>
131143
Some(dt.defaultSize)

externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
5757

5858
test("get columns operation") {
5959
val tableName = "spark_get_col_operation"
60-
val schema = new StructType()
60+
var schema = new StructType()
6161
.add("c0", "boolean", nullable = false, "0")
6262
.add("c1", "tinyint", nullable = true, "1")
6363
.add("c2", "smallint", nullable = false, "2")
@@ -80,7 +80,9 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
8080
// since spark3.3.0
8181
if (SPARK_ENGINE_MAJOR_MINOR_VERSION._1 > 3 ||
8282
(SPARK_ENGINE_MAJOR_MINOR_VERSION._1 == 3 && SPARK_ENGINE_MAJOR_MINOR_VERSION._2 >= 3)) {
83-
schema.add("c18", "timestamp_ntz", nullable = true, "18")
83+
schema = schema.add("c18", "timestamp_ntz", nullable = true, "18")
84+
.add("c19", "interval day", nullable = true, "19")
85+
.add("c20", "interval year", nullable = true, "20")
8486
}
8587

8688
val ddl =
@@ -118,7 +120,9 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
118120
STRUCT,
119121
BINARY,
120122
STRUCT,
121-
TIMESTAMP)
123+
TIMESTAMP,
124+
OTHER,
125+
OTHER)
122126

123127
var pos = 0
124128

0 commit comments

Comments
 (0)