From 6d6b6669415dbb61e50fdcb3e25e55aef32b5e60 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Thu, 11 May 2023 21:07:07 +0300 Subject: [PATCH 1/2] Improve the error message for the DATEADD/DIFF functions --- .../main/resources/error/error-classes.json | 5 + .../sql/catalyst/parser/SqlBaseParser.g4 | 4 +- .../sql/catalyst/parser/AstBuilder.scala | 18 +- .../spark/sql/errors/QueryParsingErrors.scala | 14 ++ .../analyzer-results/ansi/date.sql.out | 88 ++++++++ .../analyzer-results/ansi/timestamp.sql.out | 88 ++++++++ .../sql-tests/analyzer-results/date.sql.out | 88 ++++++++ .../analyzer-results/datetime-legacy.sql.out | 176 ++++++++++++++++ .../analyzer-results/timestamp.sql.out | 88 ++++++++ .../timestampNTZ/timestamp-ansi.sql.out | 88 ++++++++ .../timestampNTZ/timestamp.sql.out | 88 ++++++++ .../test/resources/sql-tests/inputs/date.sql | 6 + .../resources/sql-tests/inputs/timestamp.sql | 6 + .../sql-tests/results/ansi/date.sql.out | 96 +++++++++ .../sql-tests/results/ansi/timestamp.sql.out | 96 +++++++++ .../resources/sql-tests/results/date.sql.out | 96 +++++++++ .../sql-tests/results/datetime-legacy.sql.out | 192 ++++++++++++++++++ .../sql-tests/results/timestamp.sql.out | 96 +++++++++ .../timestampNTZ/timestamp-ansi.sql.out | 96 +++++++++ .../results/timestampNTZ/timestamp.sql.out | 96 +++++++++ 20 files changed, 1521 insertions(+), 4 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index dde165e5fa9e9..fa838a6da761e 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1051,6 +1051,11 @@ "expects a binary value with 16, 24 or 32 bytes, but got bytes." ] }, + "DATETIME_UNIT" : { + "message" : [ + "expects one of the units without quotes YEAR, QUARTER, MONTH, WEEK, DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, but got the string literal ." + ] + }, "PATTERN" : { "message" : [ "." diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 index a112b6e31fefe..d5027ba79c0ea 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 @@ -892,8 +892,8 @@ datetimeUnit primaryExpression : name=(CURRENT_DATE | CURRENT_TIMESTAMP | CURRENT_USER | USER) #currentLike - | name=(TIMESTAMPADD | DATEADD) LEFT_PAREN unit=datetimeUnit COMMA unitsAmount=valueExpression COMMA timestamp=valueExpression RIGHT_PAREN #timestampadd - | name=(TIMESTAMPDIFF | DATEDIFF) LEFT_PAREN unit=datetimeUnit COMMA startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN #timestampdiff + | name=(TIMESTAMPADD | DATEADD) LEFT_PAREN (unit=datetimeUnit | invalidUnit=stringLit) COMMA unitsAmount=valueExpression COMMA timestamp=valueExpression RIGHT_PAREN #timestampadd + | name=(TIMESTAMPDIFF | DATEDIFF) LEFT_PAREN (unit=datetimeUnit | invalidUnit=stringLit) COMMA startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN #timestampdiff | CASE whenClause+ (ELSE elseExpression=expression)? END #searchedCase | CASE value=expression whenClause+ (ELSE elseExpression=expression)? END #simpleCase | name=(CAST | TRY_CAST) LEFT_PAREN expression AS dataType RIGHT_PAREN #cast diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index cb06fc31f0e7f..4761836cbad81 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -4943,14 +4943,28 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit * Create a TimestampAdd expression. */ override def visitTimestampadd(ctx: TimestampaddContext): Expression = withOrigin(ctx) { - TimestampAdd(ctx.unit.getText, expression(ctx.unitsAmount), expression(ctx.timestamp)) + if (ctx.invalidUnit != null) { + throw QueryParsingErrors.invalidDatetimeUnitError( + ctx, + ctx.name.getText, + ctx.invalidUnit.getText) + } else { + TimestampAdd(ctx.unit.getText, expression(ctx.unitsAmount), expression(ctx.timestamp)) + } } /** * Create a TimestampDiff expression. */ override def visitTimestampdiff(ctx: TimestampdiffContext): Expression = withOrigin(ctx) { - TimestampDiff(ctx.unit.getText, expression(ctx.startTimestamp), expression(ctx.endTimestamp)) + if (ctx.invalidUnit != null) { + throw QueryParsingErrors.invalidDatetimeUnitError( + ctx, + ctx.name.getText, + ctx.invalidUnit.getText) + } else { + TimestampDiff(ctx.unit.getText, expression(ctx.startTimestamp), expression(ctx.endTimestamp)) + } } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index 228da52f14a2f..6240bbc07cf70 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -669,4 +669,18 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { ctx ) } + + def invalidDatetimeUnitError( + ctx: ParserRuleContext, + functionName: String, + invalidValue: String): Throwable = { + new ParseException( + errorClass = "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + messageParameters = Map( + "functionName" -> toSQLId(functionName), + "parameter" -> toSQLId("unit"), + "invalidValue" -> invalidValue), + ctx + ) + } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out index 3929e02dd1b46..a2cb07b67ea91 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out @@ -807,6 +807,50 @@ select dateadd(YEAR, 1, date'2022-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select dateadd('QUARTER', 5, date'2022-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "dateadd('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query analysis @@ -865,3 +909,47 @@ select datediff(QUARTER, date'2022-02-25', date'2023-05-25') select datediff(YEAR, date'2022-02-25', date'2023-02-25') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 102, + "fragment" : "datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out index 65baeb48e1167..c4858ac10ae74 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out @@ -765,6 +765,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -787,3 +831,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out index baa7cc3554087..0501d1b86585a 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out @@ -882,6 +882,50 @@ select dateadd(YEAR, 1, date'2022-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select dateadd('QUARTER', 5, date'2022-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "dateadd('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query analysis @@ -940,3 +984,47 @@ select datediff(QUARTER, date'2022-02-25', date'2023-05-25') select datediff(YEAR, date'2022-02-25', date'2023-02-25') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 102, + "fragment" : "datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out index 9eb83778af839..a4e5e81933792 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out @@ -882,6 +882,50 @@ select dateadd(YEAR, 1, date'2022-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select dateadd('QUARTER', 5, date'2022-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "dateadd('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query analysis @@ -942,6 +986,50 @@ select datediff(YEAR, date'2022-02-25', date'2023-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 102, + "fragment" : "datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} + + -- !query select timestamp '2019-01-01\t' -- !query analysis @@ -1780,6 +1868,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -1802,3 +1934,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out index 25e7cfcbcbb1f..856c317d13745 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out @@ -837,6 +837,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -859,3 +903,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out index 630e49bfaa506..5c42401acc353 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out @@ -782,6 +782,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -806,3 +850,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out index d6e29e72682c6..18d05088b566b 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out @@ -842,6 +842,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -866,3 +910,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/inputs/date.sql b/sql/core/src/test/resources/sql-tests/inputs/date.sql index 163855069f0a7..36b23561231fd 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/date.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/date.sql @@ -154,6 +154,9 @@ select dateadd(MONTH, -1, timestamp'2022-02-25 01:02:03'); select dateadd(QUARTER, 5, date'2022-02-25'); select dateadd(YEAR, 1, date'2022-02-25'); +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123'); +select dateadd('QUARTER', 5, date'2022-02-25'); + -- Get the difference between timestamps or dates in the specified units select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001'); select datediff(MILLISECOND, timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455'); @@ -165,3 +168,6 @@ select datediff(WEEK, timestamp'2022-02-25 01:02:03', timestamp'2022-01-28 01:02 select datediff(MONTH, timestamp'2022-02-25 01:02:03', timestamp'2022-01-25 01:02:03'); select datediff(QUARTER, date'2022-02-25', date'2023-05-25'); select datediff(YEAR, date'2022-02-25', date'2023-02-25'); + +select datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455'); +select datediff('YEAR', date'2022-02-25', date'2023-02-25'); diff --git a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql index b0d958a24b149..163b734164ed6 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql @@ -150,8 +150,14 @@ select timestampadd(MINUTE, 58, timestamp'2022-02-14 01:02:03'); select timestampadd(YEAR, 1, date'2022-02-15'); select timestampadd(SECOND, -1, date'2022-02-15'); +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03'); +select timestampadd('SECOND', -1, date'2022-02-15'); + -- Get the difference between timestamps in the specified units select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03'); select timestampdiff(MINUTE, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03'); select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15'); select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59'); + +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03'); +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15'); diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 83e9e9ba82be2..aee570130f109 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -1041,6 +1041,54 @@ struct 2023-02-25 00:00:00 +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select dateadd('QUARTER', 5, date'2022-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "dateadd('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query schema @@ -1119,3 +1167,51 @@ select datediff(YEAR, date'2022-02-25', date'2023-02-25') struct -- !query output 1 + + +-- !query +select datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 102, + "fragment" : "datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 3afae8ab91aa3..7433fcd52b9c7 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -1011,6 +1011,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1041,3 +1089,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index da2c6e7c5d9e4..0c7afd84bc0d3 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -1047,6 +1047,54 @@ struct 2023-02-25 00:00:00 +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select dateadd('QUARTER', 5, date'2022-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "dateadd('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query schema @@ -1125,3 +1173,51 @@ select datediff(YEAR, date'2022-02-25', date'2023-02-25') struct -- !query output 1 + + +-- !query +select datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 102, + "fragment" : "datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index cc7e385585a58..1bc751f627cd4 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -1020,6 +1020,54 @@ struct 2023-02-25 00:00:00 +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select dateadd('QUARTER', 5, date'2022-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "dateadd('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query schema @@ -1100,6 +1148,54 @@ struct 1 +-- !query +select datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 102, + "fragment" : "datediff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} + + -- !query select timestamp '2019-01-01\t' -- !query schema @@ -2054,6 +2150,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -2084,3 +2228,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 9047b64e01d05..8a49774f74f4f 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -1007,6 +1007,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1037,3 +1085,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 53d86dfd51851..88ba5314b8ad5 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -1002,6 +1002,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1032,3 +1080,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 0c5f1aeb78b6f..545b49445e421 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -983,6 +983,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1013,3 +1061,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} From 166758c87c7752b37885c891770d1a484a6b63a1 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 12 May 2023 08:35:54 +0300 Subject: [PATCH 2/2] Trigger build