diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 89c384ab2c8b8..7a2b4e63e133a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -246,7 +246,7 @@ class Analyzer( ResolveLambdaVariables(conf) :: ResolveTimeZone(conf) :: ResolveRandomSeed :: - ResolveBinaryArithmetic(conf) :: + ResolveBinaryArithmetic :: TypeCoercion.typeCoercionRules(conf) ++ extendedResolutionRules : _*), Batch("Post-Hoc Resolution", Once, postHocResolutionRules: _*), @@ -268,17 +268,21 @@ class Analyzer( /** * For [[Add]]: * 1. if both side are interval, stays the same; - * 2. else if one side is interval, turns it to [[TimeAdd]]; - * 3. else if one side is date, turns it to [[DateAdd]] ; - * 4. else stays the same. + * 2. else if one side is date and the other is interval, + * turns it to [[DateAddInterval]]; + * 3. else if one side is interval, turns it to [[TimeAdd]]; + * 4. else if one side is date, turns it to [[DateAdd]] ; + * 5. else stays the same. * * For [[Subtract]]: * 1. if both side are interval, stays the same; - * 2. else if the right side is an interval, turns it to [[TimeSub]]; - * 3. else if one side is timestamp, turns it to [[SubtractTimestamps]]; - * 4. else if the right side is date, turns it to [[DateDiff]]/[[SubtractDates]]; - * 5. else if the left side is date, turns it to [[DateSub]]; - * 6. else turns it to stays the same. + * 2. else if the left side is date and the right side is interval, + * turns it to [[DateAddInterval(l, -r)]]; + * 3. else if the right side is an interval, turns it to [[TimeSub]]; + * 4. else if one side is timestamp, turns it to [[SubtractTimestamps]]; + * 5. else if the right side is date, turns it to [[DateDiff]]/[[SubtractDates]]; + * 6. else if the left side is date, turns it to [[DateSub]]; + * 7. else turns it to stays the same. * * For [[Multiply]]: * 1. If one side is interval, turns it to [[MultiplyInterval]]; @@ -288,12 +292,14 @@ class Analyzer( * 1. If the left side is interval, turns it to [[DivideInterval]]; * 2. otherwise, stays the same. */ - case class ResolveBinaryArithmetic(conf: SQLConf) extends Rule[LogicalPlan] { + object ResolveBinaryArithmetic extends Rule[LogicalPlan] { override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp { case p: LogicalPlan => p.transformExpressionsUp { case a @ Add(l, r) if a.childrenResolved => (l.dataType, r.dataType) match { case (CalendarIntervalType, CalendarIntervalType) => a + case (DateType, CalendarIntervalType) => DateAddInterval(l, r) case (_, CalendarIntervalType) => Cast(TimeAdd(l, r), l.dataType) + case (CalendarIntervalType, DateType) => DateAddInterval(r, l) case (CalendarIntervalType, _) => Cast(TimeAdd(r, l), r.dataType) case (DateType, dt) if dt != StringType => DateAdd(l, r) case (dt, DateType) if dt != StringType => DateAdd(r, l) @@ -301,6 +307,7 @@ class Analyzer( } case s @ Subtract(l, r) if s.childrenResolved => (l.dataType, r.dataType) match { case (CalendarIntervalType, CalendarIntervalType) => s + case (DateType, CalendarIntervalType) => DateAddInterval(l, UnaryMinus(r)) case (_, CalendarIntervalType) => Cast(TimeSub(l, r), l.dataType) case (TimestampType, _) => SubtractTimestamps(l, r) case (_, TimestampType) => SubtractTimestamps(l, r) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 7ffed9d32aaeb..b131b24a49703 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -34,6 +34,7 @@ import org.apache.spark.sql.catalyst.util.{DateTimeUtils, LegacyDateFormats, Tim import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ import org.apache.spark.sql.catalyst.util.LegacyDateFormats.SIMPLE_DATE_FORMAT +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} @@ -1196,6 +1197,68 @@ case class TimeAdd(start: Expression, interval: Expression, timeZoneId: Option[S } } +/** + * Adds date and an interval. + * + * When ansi mode is on, the microseconds part of interval needs to be 0, otherwise a runtime + * [[IllegalArgumentException]] will be raised. + * When ansi mode is off, if the microseconds part of interval is 0, we perform date + interval + * for better performance. if the microseconds part is not 0, then the date will be converted to a + * timestamp to add with the whole interval parts. + */ +case class DateAddInterval( + start: Expression, + interval: Expression, + timeZoneId: Option[String] = None, + ansiEnabled: Boolean = SQLConf.get.ansiEnabled) + extends BinaryExpression with ExpectsInputTypes with TimeZoneAwareExpression { + + override def left: Expression = start + override def right: Expression = interval + + override def toString: String = s"$left + $right" + override def sql: String = s"${left.sql} + ${right.sql}" + override def inputTypes: Seq[AbstractDataType] = Seq(DateType, CalendarIntervalType) + + override def dataType: DataType = DateType + + override def nullSafeEval(start: Any, interval: Any): Any = { + val itvl = interval.asInstanceOf[CalendarInterval] + if (ansiEnabled || itvl.microseconds == 0) { + DateTimeUtils.dateAddInterval(start.asInstanceOf[Int], itvl) + } else { + val startTs = DateTimeUtils.epochDaysToMicros(start.asInstanceOf[Int], zoneId) + val resultTs = DateTimeUtils.timestampAddInterval( + startTs, itvl.months, itvl.days, itvl.microseconds, zoneId) + DateTimeUtils.microsToEpochDays(resultTs, zoneId) + } + } + + override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { + val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") + nullSafeCodeGen(ctx, ev, (sd, i) => if (ansiEnabled) { + s"""${ev.value} = $dtu.dateAddInterval($sd, $i);""" + } else { + val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) + val startTs = ctx.freshName("startTs") + val resultTs = ctx.freshName("resultTs") + s""" + |if ($i.microseconds == 0) { + | ${ev.value} = $dtu.dateAddInterval($sd, $i); + |} else { + | long $startTs = $dtu.epochDaysToMicros($sd, $zid); + | long $resultTs = + | $dtu.timestampAddInterval($startTs, $i.months, $i.days, $i.microseconds, $zid); + | ${ev.value} = $dtu.microsToEpochDays($resultTs, $zid); + |} + |""".stripMargin + }) + } + + override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = + copy(timeZoneId = Option(timeZoneId)) +} + /** * This is a common function for databases supporting TIMESTAMP WITHOUT TIMEZONE. This function * takes a timestamp which is timezone-agnostic, and interprets it as a timestamp in UTC, and diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala index 42c3eaafff56e..21a478aaf06a6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala @@ -618,6 +618,22 @@ object DateTimeUtils { instantToMicros(resultTimestamp.toInstant) } + /** + * Add the date and the interval's months and days. + * Returns a date value, expressed in days since 1.1.1970. + * + * @throws DateTimeException if the result exceeds the supported date range + * @throws IllegalArgumentException if the interval has `microseconds` part + */ + def dateAddInterval( + start: SQLDate, + interval: CalendarInterval): SQLDate = { + require(interval.microseconds == 0, + "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date") + val ld = LocalDate.ofEpochDay(start).plusMonths(interval.months).plusDays(interval.days) + localDateToDays(ld) + } + /** * Returns number of months between time1 and time2. time1 and time2 are expressed in * microseconds since 1.1.1970. If time1 is later than time2, the result is positive. diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index 1f6d0e48a14a8..90939f3dd9354 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -27,6 +27,7 @@ import org.apache.spark.{SparkFunSuite, SparkUpgradeException} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection import org.apache.spark.sql.catalyst.util.{DateTimeUtils, IntervalUtils, TimestampFormatter} +import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ @@ -358,6 +359,40 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { checkConsistencyBetweenInterpretedAndCodegen(DateAdd, DateType, IntegerType) } + test("date add interval") { + val d = Date.valueOf("2016-02-28") + Seq("true", "false") foreach { flag => + withSQLConf((SQLConf.ANSI_ENABLED.key, flag)) { + checkEvaluation( + DateAddInterval(Literal(d), Literal(new CalendarInterval(0, 1, 0))), + DateTimeUtils.fromJavaDate(Date.valueOf("2016-02-29"))) + checkEvaluation( + DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 0))), + DateTimeUtils.fromJavaDate(Date.valueOf("2016-03-29"))) + checkEvaluation(DateAddInterval(Literal(d), Literal.create(null, CalendarIntervalType)), + null) + checkEvaluation(DateAddInterval(Literal.create(null, DateType), + Literal(new CalendarInterval(1, 1, 0))), + null) + } + } + + withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) { + checkExceptionInExpression[IllegalArgumentException]( + DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), + "Cannot add hours, minutes or seconds, milliseconds, microseconds to a date") + } + + withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) { + checkEvaluation( + DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25))), + DateTimeUtils.fromJavaDate(Date.valueOf("2016-03-29"))) + checkEvaluation( + DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 * MICROS_PER_HOUR))), + DateTimeUtils.fromJavaDate(Date.valueOf("2016-03-30"))) + } + } + test("date_sub") { checkEvaluation( DateSub(Literal(Date.valueOf("2015-01-01")), Literal(1.toByte)), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala index 492d97ba9d524..ab0ef22bb746f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala @@ -176,5 +176,9 @@ class ExpressionSQLBuilderSuite extends SparkFunSuite { TimeSub('a, interval), "`a` - INTERVAL '1 hours'" ) + checkSQL( + DateAddInterval('a, interval), + "`a` + INTERVAL '1 hours'" + ) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala index bc6f68297cae2..807ec7dafb568 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala @@ -30,7 +30,7 @@ import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ -import org.apache.spark.unsafe.types.UTF8String +import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { @@ -391,6 +391,14 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper { assert(dateAddMonths(input, -13) === days(1996, 1, 28)) } + test("date add interval with day precision") { + val input = days(1997, 2, 28, 10, 30) + assert(dateAddInterval(input, new CalendarInterval(36, 0, 0)) === days(2000, 2, 28)) + assert(dateAddInterval(input, new CalendarInterval(36, 47, 0)) === days(2000, 4, 15)) + assert(dateAddInterval(input, new CalendarInterval(-13, 0, 0)) === days(1996, 1, 28)) + intercept[IllegalArgumentException](dateAddInterval(input, new CalendarInterval(36, 47, 1))) + } + test("timestamp add months") { val ts1 = date(1997, 2, 28, 10, 30, 0) val ts2 = date(2000, 2, 28, 10, 30, 0, 123000) diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/datetime.sql b/sql/core/src/test/resources/sql-tests/inputs/ansi/datetime.sql new file mode 100644 index 0000000000000..58ecf80637883 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/ansi/datetime.sql @@ -0,0 +1 @@ +--IMPORT datetime.sql diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out new file mode 100644 index 0000000000000..81a73a6377715 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out @@ -0,0 +1,732 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 85 + + +-- !query +select current_date = current_date(), current_timestamp = current_timestamp() +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'current_date'(line 1, pos 22) + +== SQL == +select current_date = current_date(), current_timestamp = current_timestamp() +----------------------^^^ + + +-- !query +select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd') +-- !query schema +struct +-- !query output +NULL 2016-12-31 2016-12-31 + + +-- !query +select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd') +-- !query schema +struct +-- !query output +NULL 2016-12-31 00:12:00 2016-12-31 00:00:00 + + +-- !query +select dayofweek('2007-02-03'), dayofweek('2009-07-30'), dayofweek('2017-05-27'), dayofweek(null), dayofweek('1582-10-15 13:10:15') +-- !query schema +struct +-- !query output +7 5 7 NULL 6 + + +-- !query +create temporary view ttf1 as select * from values + (1, 2), + (2, 3) + as ttf1(current_date, current_timestamp) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'current_date'(line 4, pos 10) + +== SQL == +create temporary view ttf1 as select * from values + (1, 2), + (2, 3) + as ttf1(current_date, current_timestamp) +----------^^^ + + +-- !query +select current_date, current_timestamp from ttf1 +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +Table or view not found: ttf1; line 1 pos 44 + + +-- !query +create temporary view ttf2 as select * from values + (1, 2), + (2, 3) + as ttf2(a, b) +-- !query schema +struct<> +-- !query output + + + +-- !query +select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2 +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'current_date'(line 1, pos 22) + +== SQL == +select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2 +----------------------^^^ + + +-- !query +select a, b from ttf2 order by a, current_date +-- !query schema +struct +-- !query output +1 2 +2 3 + + +-- !query +select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), weekday(null), weekday('1582-10-15 13:10:15') +-- !query schema +struct +-- !query output +5 3 5 NULL 4 + + +-- !query +select year('1500-01-01'), month('1500-01-01'), dayOfYear('1500-01-01') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'year'(line 1, pos 7) + +== SQL == +select year('1500-01-01'), month('1500-01-01'), dayOfYear('1500-01-01') +-------^^^ + + +-- !query +select date '2019-01-01\t' +-- !query schema +struct +-- !query output +2019-01-01 + + +-- !query +select timestamp '2019-01-01\t' +-- !query schema +struct +-- !query output +2019-01-01 00:00:00 + + +-- !query +select timestamp'2011-11-11 11:11:11' + interval '2' day +-- !query schema +struct +-- !query output +2011-11-13 11:11:11 + + +-- !query +select timestamp'2011-11-11 11:11:11' - interval '2' day +-- !query schema +struct +-- !query output +2011-11-09 11:11:11 + + +-- !query +select date'2011-11-11 11:11:11' + interval '2' second +-- !query schema +struct<> +-- !query output +java.lang.IllegalArgumentException +requirement failed: Cannot add hours, minutes or seconds, milliseconds, microseconds to a date + + +-- !query +select date'2011-11-11 11:11:11' - interval '2' second +-- !query schema +struct<> +-- !query output +java.lang.IllegalArgumentException +requirement failed: Cannot add hours, minutes or seconds, milliseconds, microseconds to a date + + +-- !query +select '2011-11-11' - interval '2' day +-- !query schema +struct +-- !query output +2011-11-09 00:00:00 + + +-- !query +select '2011-11-11 11:11:11' - interval '2' second +-- !query schema +struct +-- !query output +2011-11-11 11:11:09 + + +-- !query +select '1' - interval '2' second +-- !query schema +struct +-- !query output +NULL + + +-- !query +select 1 - interval '2' second +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve '1 - INTERVAL '2 seconds'' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 + + +-- !query +select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' +-- !query schema +struct +-- !query output +2078 hours 48 minutes 47.654322 seconds + + +-- !query +select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' +-- !query schema +struct +-- !query output +-2078 hours -48 minutes -47.654322 seconds + + +-- !query +select timestamp'2019-10-06 10:11:12.345678' - null +-- !query schema +struct +-- !query output +NULL + + +-- !query +select null - timestamp'2019-10-06 10:11:12.345678' +-- !query schema +struct +-- !query output +NULL + + +-- !query +select date_add('2011-11-11', 1Y) +-- !query schema +struct +-- !query output +2011-11-12 + + +-- !query +select date_add('2011-11-11', 1S) +-- !query schema +struct +-- !query output +2011-11-12 + + +-- !query +select date_add('2011-11-11', 1) +-- !query schema +struct +-- !query output +2011-11-12 + + +-- !query +select date_add('2011-11-11', 1L) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 + + +-- !query +select date_add('2011-11-11', 1.0) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 + + +-- !query +select date_add('2011-11-11', 1E1) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 + + +-- !query +select date_add('2011-11-11', '1') +-- !query schema +struct +-- !query output +2011-11-12 + + +-- !query +select date_add('2011-11-11', '1.2') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +The second argument of 'date_add' function needs to be an integer.; + + +-- !query +select date_add(date'2011-11-11', 1) +-- !query schema +struct +-- !query output +2011-11-12 + + +-- !query +select date_add(timestamp'2011-11-11', 1) +-- !query schema +struct +-- !query output +2011-11-12 + + +-- !query +select date_sub(date'2011-11-11', 1) +-- !query schema +struct +-- !query output +2011-11-10 + + +-- !query +select date_sub(date'2011-11-11', '1') +-- !query schema +struct +-- !query output +2011-11-10 + + +-- !query +select date_sub(date'2011-11-11', '1.2') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +The second argument of 'date_sub' function needs to be an integer.; + + +-- !query +select date_sub(timestamp'2011-11-11', 1) +-- !query schema +struct +-- !query output +2011-11-10 + + +-- !query +select date_sub(null, 1) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select date_sub(date'2011-11-11', null) +-- !query schema +struct +-- !query output +NULL + + +-- !query +select date'2011-11-11' + 1E1 +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 + + +-- !query +select date'2011-11-11' + '1' +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 + + +-- !query +select null + date '2001-09-28' +-- !query schema +struct +-- !query output +NULL + + +-- !query +select date '2001-09-28' + 7Y +-- !query schema +struct +-- !query output +2001-10-05 + + +-- !query +select 7S + date '2001-09-28' +-- !query schema +struct +-- !query output +2001-10-05 + + +-- !query +select date '2001-10-01' - 7 +-- !query schema +struct +-- !query output +2001-09-24 + + +-- !query +select date '2001-10-01' - '7' +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_sub(DATE '2001-10-01', CAST('7' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('7' AS DOUBLE)' is of double type.; line 1 pos 7 + + +-- !query +select date '2001-09-28' + null +-- !query schema +struct +-- !query output +NULL + + +-- !query +select date '2001-09-28' - null +-- !query schema +struct +-- !query output +NULL + + +-- !query +create temp view v as select '1' str +-- !query schema +struct<> +-- !query output + + + +-- !query +select date_add('2011-11-11', str) from v +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), v.`str`)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.`str`' is of string type.; line 1 pos 7 + + +-- !query +select date_sub('2011-11-11', str) from v +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.`str`)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.`str`' is of string type.; line 1 pos 7 + + +-- !query +select null - date '2019-10-06' +-- !query schema +struct +-- !query output +NULL + + +-- !query +select date '2001-10-01' - date '2001-09-28' +-- !query schema +struct +-- !query output +3 days + + +-- !query +select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.1 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.12 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 03:11:12.123 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.1234 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 08:11:12.12345 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.123456 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.123456 + + +-- !query +select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.1234 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.123 + + +-- !query +select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.12 + + +-- !query +select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]') +-- !query schema +struct +-- !query output +2019-10-06 10:11:00 + + +-- !query +select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS") +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.12345 + + +-- !query +select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.1234 + + +-- !query +select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") +-- !query schema +struct +-- !query output +NULL + + +-- !query +select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm") +-- !query schema +struct +-- !query output +0019-10-06 10:11:12.1234 + + +-- !query +select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'") +-- !query schema +struct +-- !query output +2019-10-06 00:00:00 + + +-- !query +select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd") +-- !query schema +struct +-- !query output +2019-10-06 00:00:00 + + +-- !query +select date_format(timestamp '2019-10-06', 'yyyy-MM-dd uuee') +-- !query schema +struct<> +-- !query output +java.lang.IllegalArgumentException +Illegal pattern character: e + + +-- !query +select date_format(timestamp '2019-10-06', 'yyyy-MM-dd uucc') +-- !query schema +struct<> +-- !query output +java.lang.IllegalArgumentException +Illegal pattern character: c + + +-- !query +select date_format(timestamp '2019-10-06', 'yyyy-MM-dd uuuu') +-- !query schema +struct +-- !query output +2019-10-06 Sunday + + +-- !query +select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS") +-- !query schema +struct +-- !query output +2019-10-06 10:11:12.12 + + +-- !query +select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''") +-- !query schema +struct +-- !query output +2019-10-06 10:11:12 + + +-- !query +select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss") +-- !query schema +struct +-- !query output +2019-10-06 10:11:12 + + +-- !query +select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss") +-- !query schema +struct +-- !query output +2019-10-06 10:11:12 diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index d4238c73e39e1..e8201d46924da 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -689,7 +689,7 @@ select interval '2-2' year to month + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -732,9 +732,10 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query schema -struct +struct<> -- !query output -2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 +java.lang.IllegalArgumentException +requirement failed: Cannot add hours, minutes or seconds, milliseconds, microseconds to a date -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index aec04da4dd54d..2e600850c48b9 100755 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -134,7 +134,7 @@ struct +struct -- !query output 2011-11-11 @@ -142,7 +142,7 @@ struct +struct -- !query output 2011-11-10 diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 0d758ca1f24ca..84e3fb3c238a4 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -668,7 +668,7 @@ select interval '2-2' year to month + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -711,7 +711,7 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index d5c27ade8e152..7b1fcddfdac7d 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -118,7 +118,7 @@ struct +struct -- !query output 2017-12-13 @@ -231,7 +231,7 @@ struct +struct -- !query output 2017-12-13 @@ -344,6 +344,6 @@ struct +struct -- !query output 2017-12-09