Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-40008][SQL] Support casting of integrals to ANSI intervals #37442

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -112,6 +112,7 @@ object Cast {
case (StringType, _: AnsiIntervalType) => true

case (_: AnsiIntervalType, _: IntegralType | _: DecimalType) => true
case (_: IntegralType, _: AnsiIntervalType) => true

case (_: DayTimeIntervalType, _: DayTimeIntervalType) => true
case (_: YearMonthIntervalType, _: YearMonthIntervalType) => true
Expand Down Expand Up @@ -196,6 +197,7 @@ object Cast {
case (_: DayTimeIntervalType, _: DayTimeIntervalType) => true
case (_: YearMonthIntervalType, _: YearMonthIntervalType) => true
case (_: AnsiIntervalType, _: IntegralType | _: DecimalType) => true
case (_: IntegralType, _: AnsiIntervalType) => true

case (StringType, _: NumericType) => true
case (BooleanType, _: NumericType) => true
Expand Down Expand Up @@ -786,7 +788,6 @@ case class Cast(
case _: DayTimeIntervalType => buildCast[Long](_, s =>
IntervalUtils.durationToMicros(IntervalUtils.microsToDuration(s), it.endField))
case x: IntegralType =>
assert(it.startField == it.endField)
if (x == LongType) {
b => IntervalUtils.longToDayTimeInterval(
x.integral.asInstanceOf[Integral[Any]].toLong(b), it.endField)
Expand All @@ -804,7 +805,6 @@ case class Cast(
case _: YearMonthIntervalType => buildCast[Int](_, s =>
IntervalUtils.periodToMonths(IntervalUtils.monthsToPeriod(s), it.endField))
case x: IntegralType =>
assert(it.startField == it.endField)
if (x == LongType) {
b => IntervalUtils.longToYearMonthInterval(
x.integral.asInstanceOf[Integral[Any]].toLong(b), it.endField)
Expand Down
14 changes: 13 additions & 1 deletion sql/core/src/test/resources/sql-tests/inputs/cast.sql
Expand Up @@ -105,7 +105,7 @@ select cast('a' as timestamp_ntz);
select cast(cast('inf' as double) as timestamp);
select cast(cast('inf' as float) as timestamp);

-- cast ANSI intervals to numerics
-- cast ANSI intervals to integrals
select cast(interval '1' year as tinyint);
select cast(interval '-10-2' year to month as smallint);
select cast(interval '1000' month as int);
Expand All @@ -117,6 +117,18 @@ select cast(interval '10' day as bigint);
select cast(interval '-1000' month as tinyint);
select cast(interval '1000000' second as smallint);

-- cast integrals to ANSI intervals
select cast(1Y as interval year);
select cast(-122S as interval year to month);
select cast(1000 as interval month);
select cast(-10L as interval second);
select cast(100Y as interval hour to second);
select cast(-1000S as interval day to second);
select cast(10 as interval day);

select cast(2147483647 as interval year);
select cast(-9223372036854775808L as interval day);

-- cast ANSI intervals to decimals
select cast(interval '-1' year as decimal(10, 0));
select cast(interval '1.000001' second as decimal(10, 6));
Expand Down
74 changes: 74 additions & 0 deletions sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
Expand Up @@ -840,6 +840,80 @@ org.apache.spark.SparkArithmeticException
[CAST_OVERFLOW] The value INTERVAL '1000000' SECOND of the type "INTERVAL SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
select cast(1Y as interval year)
-- !query schema
struct<CAST(1 AS INTERVAL YEAR):interval year>
-- !query output
1-0


-- !query
select cast(-122S as interval year to month)
-- !query schema
struct<CAST(-122 AS INTERVAL YEAR TO MONTH):interval year to month>
-- !query output
-10-2


-- !query
select cast(1000 as interval month)
-- !query schema
struct<CAST(1000 AS INTERVAL MONTH):interval month>
-- !query output
83-4


-- !query
select cast(-10L as interval second)
-- !query schema
struct<CAST(-10 AS INTERVAL SECOND):interval second>
-- !query output
-0 00:00:10.000000000


-- !query
select cast(100Y as interval hour to second)
-- !query schema
struct<CAST(100 AS INTERVAL HOUR TO SECOND):interval hour to second>
-- !query output
0 00:01:40.000000000


-- !query
select cast(-1000S as interval day to second)
-- !query schema
struct<CAST(-1000 AS INTERVAL DAY TO SECOND):interval day to second>
-- !query output
-0 00:16:40.000000000


-- !query
select cast(10 as interval day)
-- !query schema
struct<CAST(10 AS INTERVAL DAY):interval day>
-- !query output
10 00:00:00.000000000


-- !query
select cast(2147483647 as interval year)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
[CAST_OVERFLOW] The value 2147483647 of the type "INT" cannot be cast to "INTERVAL YEAR" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
select cast(-9223372036854775808L as interval day)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
[CAST_OVERFLOW] The value -9223372036854775808L of the type "BIGINT" cannot be cast to "INTERVAL DAY" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
select cast(interval '-1' year as decimal(10, 0))
-- !query schema
Expand Down
74 changes: 74 additions & 0 deletions sql/core/src/test/resources/sql-tests/results/cast.sql.out
Expand Up @@ -668,6 +668,80 @@ org.apache.spark.SparkArithmeticException
[CAST_OVERFLOW] The value INTERVAL '1000000' SECOND of the type "INTERVAL SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
select cast(1Y as interval year)
-- !query schema
struct<CAST(1 AS INTERVAL YEAR):interval year>
-- !query output
1-0


-- !query
select cast(-122S as interval year to month)
-- !query schema
struct<CAST(-122 AS INTERVAL YEAR TO MONTH):interval year to month>
-- !query output
-10-2


-- !query
select cast(1000 as interval month)
-- !query schema
struct<CAST(1000 AS INTERVAL MONTH):interval month>
-- !query output
83-4


-- !query
select cast(-10L as interval second)
-- !query schema
struct<CAST(-10 AS INTERVAL SECOND):interval second>
-- !query output
-0 00:00:10.000000000


-- !query
select cast(100Y as interval hour to second)
-- !query schema
struct<CAST(100 AS INTERVAL HOUR TO SECOND):interval hour to second>
-- !query output
0 00:01:40.000000000


-- !query
select cast(-1000S as interval day to second)
-- !query schema
struct<CAST(-1000 AS INTERVAL DAY TO SECOND):interval day to second>
-- !query output
-0 00:16:40.000000000


-- !query
select cast(10 as interval day)
-- !query schema
struct<CAST(10 AS INTERVAL DAY):interval day>
-- !query output
10 00:00:00.000000000


-- !query
select cast(2147483647 as interval year)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
[CAST_OVERFLOW] The value 2147483647 of the type "INT" cannot be cast to "INTERVAL YEAR" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
select cast(-9223372036854775808L as interval day)
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
[CAST_OVERFLOW] The value -9223372036854775808L of the type "BIGINT" cannot be cast to "INTERVAL DAY" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
select cast(interval '-1' year as decimal(10, 0))
-- !query schema
Expand Down