diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index dde165e5fa9e9..fa838a6da761e 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1051,6 +1051,11 @@ "expects a binary value with 16, 24 or 32 bytes, but got bytes." ] }, + "DATETIME_UNIT" : { + "message" : [ + "expects one of the units without quotes YEAR, QUARTER, MONTH, WEEK, DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, but got the string literal ." + ] + }, "PATTERN" : { "message" : [ "." diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 index 2bc7943034330..591b0839ac74f 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 @@ -892,8 +892,8 @@ datetimeUnit primaryExpression : name=(CURRENT_DATE | CURRENT_TIMESTAMP | CURRENT_USER | USER) #currentLike - | name=(TIMESTAMPADD | DATEADD | DATE_ADD) LEFT_PAREN unit=datetimeUnit COMMA unitsAmount=valueExpression COMMA timestamp=valueExpression RIGHT_PAREN #timestampadd - | name=(TIMESTAMPDIFF | DATEDIFF | DATE_DIFF) LEFT_PAREN unit=datetimeUnit COMMA startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN #timestampdiff + | name=(TIMESTAMPADD | DATEADD | DATE_ADD) LEFT_PAREN (unit=datetimeUnit | invalidUnit=stringLit) COMMA unitsAmount=valueExpression COMMA timestamp=valueExpression RIGHT_PAREN #timestampadd + | name=(TIMESTAMPDIFF | DATEDIFF | DATE_DIFF) LEFT_PAREN (unit=datetimeUnit | invalidUnit=stringLit) COMMA startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN #timestampdiff | CASE whenClause+ (ELSE elseExpression=expression)? END #searchedCase | CASE value=expression whenClause+ (ELSE elseExpression=expression)? END #simpleCase | name=(CAST | TRY_CAST) LEFT_PAREN expression AS dataType RIGHT_PAREN #cast diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index cb06fc31f0e7f..4761836cbad81 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -4943,14 +4943,28 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit * Create a TimestampAdd expression. */ override def visitTimestampadd(ctx: TimestampaddContext): Expression = withOrigin(ctx) { - TimestampAdd(ctx.unit.getText, expression(ctx.unitsAmount), expression(ctx.timestamp)) + if (ctx.invalidUnit != null) { + throw QueryParsingErrors.invalidDatetimeUnitError( + ctx, + ctx.name.getText, + ctx.invalidUnit.getText) + } else { + TimestampAdd(ctx.unit.getText, expression(ctx.unitsAmount), expression(ctx.timestamp)) + } } /** * Create a TimestampDiff expression. */ override def visitTimestampdiff(ctx: TimestampdiffContext): Expression = withOrigin(ctx) { - TimestampDiff(ctx.unit.getText, expression(ctx.startTimestamp), expression(ctx.endTimestamp)) + if (ctx.invalidUnit != null) { + throw QueryParsingErrors.invalidDatetimeUnitError( + ctx, + ctx.name.getText, + ctx.invalidUnit.getText) + } else { + TimestampDiff(ctx.unit.getText, expression(ctx.startTimestamp), expression(ctx.endTimestamp)) + } } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index 228da52f14a2f..6240bbc07cf70 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -669,4 +669,18 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { ctx ) } + + def invalidDatetimeUnitError( + ctx: ParserRuleContext, + functionName: String, + invalidValue: String): Throwable = { + new ParseException( + errorClass = "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + messageParameters = Map( + "functionName" -> toSQLId(functionName), + "parameter" -> toSQLId("unit"), + "invalidValue" -> invalidValue), + ctx + ) + } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out index 9f287223a0dde..28fe86d930fa0 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out @@ -807,6 +807,50 @@ select date_add(YEAR, 1, date'2022-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select date_add('QUARTER', 5, date'2022-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_add`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "date_add('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query analysis @@ -865,3 +909,47 @@ select datediff(QUARTER, date'2022-02-25', date'2023-05-25') select date_diff(YEAR, date'2022-02-25', date'2023-02-25') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_diff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 103, + "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out index 65baeb48e1167..c4858ac10ae74 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out @@ -765,6 +765,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -787,3 +831,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out index 829d650fac8fe..6f01cc3be078d 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out @@ -882,6 +882,50 @@ select date_add(YEAR, 1, date'2022-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select date_add('QUARTER', 5, date'2022-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_add`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "date_add('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query analysis @@ -940,3 +984,47 @@ select datediff(QUARTER, date'2022-02-25', date'2023-05-25') select date_diff(YEAR, date'2022-02-25', date'2023-02-25') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_diff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 103, + "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out index 078bf031420b6..ef1261197b696 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out @@ -882,6 +882,50 @@ select date_add(YEAR, 1, date'2022-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select date_add('QUARTER', 5, date'2022-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_add`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "date_add('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query analysis @@ -942,6 +986,50 @@ select date_diff(YEAR, date'2022-02-25', date'2023-02-25') [Analyzer test output redacted due to nondeterminism] +-- !query +select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_diff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 103, + "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} + + -- !query select timestamp '2019-01-01\t' -- !query analysis @@ -1780,6 +1868,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -1802,3 +1934,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out index 25e7cfcbcbb1f..856c317d13745 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out @@ -837,6 +837,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -859,3 +903,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out index 630e49bfaa506..5c42401acc353 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out @@ -782,6 +782,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -806,3 +850,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out index d6e29e72682c6..18d05088b566b 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out @@ -842,6 +842,50 @@ select timestampadd(SECOND, -1, date'2022-02-15') [Analyzer test output redacted due to nondeterminism] +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query analysis @@ -866,3 +910,47 @@ select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15') select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') -- !query analysis [Analyzer test output redacted due to nondeterminism] + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query analysis +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/inputs/date.sql b/sql/core/src/test/resources/sql-tests/inputs/date.sql index c4c7ff734702e..ed16ace0382ef 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/date.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/date.sql @@ -154,6 +154,9 @@ select date_add(MONTH, -1, timestamp'2022-02-25 01:02:03'); select dateadd(QUARTER, 5, date'2022-02-25'); select date_add(YEAR, 1, date'2022-02-25'); +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123'); +select date_add('QUARTER', 5, date'2022-02-25'); + -- Get the difference between timestamps or dates in the specified units select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001'); select date_diff(MILLISECOND, timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455'); @@ -165,3 +168,6 @@ select datediff(WEEK, timestamp'2022-02-25 01:02:03', timestamp'2022-01-28 01:02 select date_diff(MONTH, timestamp'2022-02-25 01:02:03', timestamp'2022-01-25 01:02:03'); select datediff(QUARTER, date'2022-02-25', date'2023-05-25'); select date_diff(YEAR, date'2022-02-25', date'2023-02-25'); + +select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455'); +select datediff('YEAR', date'2022-02-25', date'2023-02-25'); diff --git a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql index b0d958a24b149..163b734164ed6 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql @@ -150,8 +150,14 @@ select timestampadd(MINUTE, 58, timestamp'2022-02-14 01:02:03'); select timestampadd(YEAR, 1, date'2022-02-15'); select timestampadd(SECOND, -1, date'2022-02-15'); +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03'); +select timestampadd('SECOND', -1, date'2022-02-15'); + -- Get the difference between timestamps in the specified units select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03'); select timestampdiff(MINUTE, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03'); select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15'); select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59'); + +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03'); +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15'); diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index a309170d6f625..dee745995ea8d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -1041,6 +1041,54 @@ struct 2023-02-25 00:00:00 +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select date_add('QUARTER', 5, date'2022-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_add`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "date_add('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query schema @@ -1119,3 +1167,51 @@ select date_diff(YEAR, date'2022-02-25', date'2023-02-25') struct -- !query output 1 + + +-- !query +select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_diff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 103, + "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 3afae8ab91aa3..7433fcd52b9c7 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -1011,6 +1011,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1041,3 +1089,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index c9cd8227083d5..322d916f28295 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -1047,6 +1047,54 @@ struct 2023-02-25 00:00:00 +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select date_add('QUARTER', 5, date'2022-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_add`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "date_add('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query schema @@ -1125,3 +1173,51 @@ select date_diff(YEAR, date'2022-02-25', date'2023-02-25') struct -- !query output 1 + + +-- !query +select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_diff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 103, + "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index be809b83687a6..a824fd96c1166 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -1020,6 +1020,54 @@ struct 2023-02-25 00:00:00 +-- !query +select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`dateadd`", + "invalidValue" : "'MICROSECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')" + } ] +} + + +-- !query +select date_add('QUARTER', 5, date'2022-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_add`", + "invalidValue" : "'QUARTER'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "date_add('QUARTER', 5, date'2022-02-25')" + } ] +} + + -- !query select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', timestamp'2022-02-25 01:02:03.124001') -- !query schema @@ -1100,6 +1148,54 @@ struct 1 +-- !query +select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`date_diff`", + "invalidValue" : "'MILLISECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 103, + "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', timestamp'2022-02-25 01:02:03.455')" + } ] +} + + +-- !query +select datediff('YEAR', date'2022-02-25', date'2023-02-25') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`datediff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')" + } ] +} + + -- !query select timestamp '2019-01-01\t' -- !query schema @@ -2054,6 +2150,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -2084,3 +2228,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 9047b64e01d05..8a49774f74f4f 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -1007,6 +1007,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1037,3 +1085,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 53d86dfd51851..88ba5314b8ad5 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -1002,6 +1002,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1032,3 +1080,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +} diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 0c5f1aeb78b6f..545b49445e421 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -983,6 +983,54 @@ struct 2022-02-14 23:59:59 +-- !query +select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'MONTH'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')" + } ] +} + + +-- !query +select timestampadd('SECOND', -1, date'2022-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampadd`", + "invalidValue" : "'SECOND'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')" + } ] +} + + -- !query select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', timestamp'2022-01-14 01:02:03') -- !query schema @@ -1013,3 +1061,51 @@ select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59') struct -- !query output -1 + + +-- !query +select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'MINUTE'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 02:00:03')" + } ] +} + + +-- !query +select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.catalyst.parser.ParseException +{ + "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT", + "sqlState" : "22023", + "messageParameters" : { + "functionName" : "`timestampdiff`", + "invalidValue" : "'YEAR'", + "parameter" : "`unit`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')" + } ] +}